Repository: DragonKingpin/Hydra Branch: beta Commit: 268d95a736a5 Files: 3349 Total size: 7.5 MB Directory structure: gitextract_py8snoc4/ ├── .gitignore ├── .idea/ │ ├── .gitignore │ ├── ApifoxUploaderProjectSetting.xml │ ├── codeStyles/ │ │ └── codeStyleConfig.xml │ ├── compiler.xml │ ├── dataSources.xml │ ├── dictionaries/ │ │ ├── project.xml │ │ └── undefined.xml │ ├── encodings.xml │ ├── jarRepositories.xml │ ├── misc.xml │ ├── sqldialects.xml │ ├── uiDesigner.xml │ └── vcs.xml ├── Archcraft/ │ ├── ender-system-hydra/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── walnut/ │ │ └── archcraft/ │ │ └── ender/ │ │ ├── EnderHydra.java │ │ └── system/ │ │ ├── HydraEmpire.java │ │ ├── Hydroxy.java │ │ └── HydroxyImage.java │ ├── pom.xml │ ├── redstone-architecture/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── walnut/ │ │ └── archcraft/ │ │ └── redstone/ │ │ ├── architect/ │ │ │ ├── Bedrock.java │ │ │ ├── Redstone.java │ │ │ └── Stone.java │ │ ├── response/ │ │ │ ├── ArchResponseObjectManager.java │ │ │ ├── GenericResultResponse.java │ │ │ ├── RedResponse.java │ │ │ ├── RedResponseEntity.java │ │ │ ├── RedTraceableResponse.java │ │ │ └── ResponseObjectManager.java │ │ ├── system/ │ │ │ └── Dummy.java │ │ └── util/ │ │ └── Dummy.java │ └── redstone-message-stones/ │ ├── pom.xml │ └── src/ │ └── main/ │ └── java/ │ └── com/ │ └── walnut/ │ └── archcraft/ │ └── redstone/ │ ├── Dummy.java │ └── messge/ │ └── PrimaryMessageWareStone.java ├── CHANGELOG.md ├── File/ │ ├── File.iml │ ├── pom.xml │ └── src/ │ ├── main/ │ │ └── java/ │ │ └── com/ │ │ └── genius/ │ │ ├── App.java │ │ ├── cache/ │ │ │ ├── FileCache.java │ │ │ ├── FileCacheManager.java │ │ │ └── FileCacheManagerInstance.java │ │ ├── constpool/ │ │ │ └── GlobalFileCache.java │ │ ├── exception/ │ │ │ └── FileCacheException.java │ │ ├── method/ │ │ │ └── FileCondition.java │ │ ├── pojo/ │ │ │ ├── CommonConfigFile.java │ │ │ ├── ConfigFile.java │ │ │ ├── FileType.java │ │ │ └── oss/ │ │ │ ├── AliyunOSS.java │ │ │ └── OssAble.java │ │ └── util/ │ │ ├── FileUtil.java │ │ ├── JsonFileUtil.java │ │ └── OSSUtil.java │ └── test/ │ └── java/ │ └── com/ │ └── genius/ │ └── AppTest.java ├── Hydra/ │ ├── hydra-architecture/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── pinecone/ │ │ └── hydra/ │ │ ├── Hydra.java │ │ ├── Hydradom.java │ │ ├── deploy/ │ │ │ ├── Container.java │ │ │ ├── Deploy.java │ │ │ ├── Integration.java │ │ │ ├── Namespace.java │ │ │ ├── PhysicalHost.java │ │ │ ├── Quick.java │ │ │ ├── Server.java │ │ │ └── VirtualMachine.java │ │ ├── device/ │ │ │ ├── Deployment.java │ │ │ ├── Device.java │ │ │ └── Disk.java │ │ ├── express/ │ │ │ ├── Deliver.java │ │ │ ├── Express.java │ │ │ └── Package.java │ │ ├── system/ │ │ │ ├── ArchModularizedSubsystem.java │ │ │ ├── ArchSystemAutoAssembleComponent.java │ │ │ ├── ArchSystemCascadeComponent.java │ │ │ ├── ArchSystemCascadeComponentManager.java │ │ │ ├── BlockSystem.java │ │ │ ├── DistributedSystem.java │ │ │ ├── FederalSystem.java │ │ │ ├── HierarchySystem.java │ │ │ ├── HyComponent.java │ │ │ ├── HyHierarchy.java │ │ │ ├── HySkeleton.java │ │ │ ├── Hydrogen.java │ │ │ ├── MultiComponentSystem.java │ │ │ ├── ScopedSystem.java │ │ │ ├── SystemCascadeComponent.java │ │ │ ├── SystemCascadeComponentManager.java │ │ │ ├── SystemSkeleton.java │ │ │ ├── centrum/ │ │ │ │ ├── CentralControlSubsystem.java │ │ │ │ ├── Centrum.java │ │ │ │ ├── Metasystem.java │ │ │ │ └── UniformCentralSystem.java │ │ │ ├── component/ │ │ │ │ ├── ComponentInitializationException.java │ │ │ │ ├── GenericResourceDispenserCenter.java │ │ │ │ ├── GenericTracerScope.java │ │ │ │ ├── LogStatuses.java │ │ │ │ ├── LoggingConfigurator.java │ │ │ │ ├── ResourceDispenserCenter.java │ │ │ │ ├── Slf4jTraceable.java │ │ │ │ ├── Slf4jTracerScope.java │ │ │ │ ├── TracerConfigurator.java │ │ │ │ └── TracerScope.java │ │ │ ├── identifier/ │ │ │ │ └── KOPathResolver.java │ │ │ ├── imperium/ │ │ │ │ ├── ImperiumPrivy.java │ │ │ │ ├── KernelObjectRootMountPoint.java │ │ │ │ ├── KernelPrivyFileSystemConstants.java │ │ │ │ └── KernelRootMountPoint.java │ │ │ ├── ko/ │ │ │ │ ├── ArchKernelObjectConfig.java │ │ │ │ ├── CascadeInstrument.java │ │ │ │ ├── CascadeKOTreeInstrument.java │ │ │ │ ├── CascadeKernelObjectInstrument.java │ │ │ │ ├── InstrumentException.java │ │ │ │ ├── KernelObject.java │ │ │ │ ├── KernelObjectConfig.java │ │ │ │ ├── KernelObjectConstants.java │ │ │ │ ├── KernelObjectInstrument.java │ │ │ │ ├── KernelObjectTreeInstrument.java │ │ │ │ ├── MetaPersistenceException.java │ │ │ │ ├── QueryableInstrument.java │ │ │ │ ├── UOIUtils.java │ │ │ │ ├── action/ │ │ │ │ │ ├── ActionObject.java │ │ │ │ │ └── EventObject.java │ │ │ │ ├── control/ │ │ │ │ │ └── ControlObject.java │ │ │ │ ├── dao/ │ │ │ │ │ └── GUIDNameManipulator.java │ │ │ │ ├── driver/ │ │ │ │ │ ├── KOIMappingDriver.java │ │ │ │ │ ├── KOIMappingDriverFactory.java │ │ │ │ │ ├── KOIMasterManipulator.java │ │ │ │ │ └── KOISkeletonMasterManipulator.java │ │ │ │ ├── entity/ │ │ │ │ │ ├── ObjectHandle.java │ │ │ │ │ └── ObjectTable.java │ │ │ │ ├── handle/ │ │ │ │ │ ├── AppliableKHandle.java │ │ │ │ │ ├── ArchKHandle.java │ │ │ │ │ ├── HandleObject.java │ │ │ │ │ ├── HandleType.java │ │ │ │ │ ├── KHandle.java │ │ │ │ │ ├── KOMMountPointHandle.java │ │ │ │ │ ├── ObjectTreeAddressingSectionHandle.java │ │ │ │ │ ├── ObjectTreeGUIDAddressingSectionHandle.java │ │ │ │ │ └── SectionHandle.java │ │ │ │ ├── kom/ │ │ │ │ │ ├── ArchKOMTree.java │ │ │ │ │ ├── ArchReparseKOMTree.java │ │ │ │ │ ├── ExpressInstrument.java │ │ │ │ │ ├── GenericReparseKOMTreeAddition.java │ │ │ │ │ ├── KOMInstrument.java │ │ │ │ │ ├── KOMSelector.java │ │ │ │ │ ├── MultiFolderPathSelector.java │ │ │ │ │ ├── PathSelector.java │ │ │ │ │ ├── ProxiedKOMMountPointHandle.java │ │ │ │ │ ├── ReparseKOMTree.java │ │ │ │ │ ├── ReparseKOMTreeAddition.java │ │ │ │ │ ├── ReparseLinkSelector.java │ │ │ │ │ ├── ReparsePointSelector.java │ │ │ │ │ ├── SimpleMultiFolderPathSelector.java │ │ │ │ │ ├── SimplePathSelector.java │ │ │ │ │ └── StandardPathSelector.java │ │ │ │ ├── meta/ │ │ │ │ │ └── ElementObject.java │ │ │ │ └── runtime/ │ │ │ │ ├── ArchDirectMappingTrieRuntimeKOMTree.java │ │ │ │ ├── ArchRuntimeKOMTree.java │ │ │ │ ├── CentralizedRuntimeInstrument.java │ │ │ │ ├── DirectMappingTrieRuntimeInstrument.java │ │ │ │ ├── GenericRuntimeInstrumentConfig.java │ │ │ │ ├── KernelExpressInstrument.java │ │ │ │ └── RuntimeInstrument.java │ │ │ ├── polity/ │ │ │ │ └── RepublicSystem.java │ │ │ ├── subsystem/ │ │ │ │ ├── ArchMicroSystem.java │ │ │ │ ├── ArchSubsystemDirector.java │ │ │ │ ├── Cabinet.java │ │ │ │ ├── CentralKernelLordFederation.java │ │ │ │ ├── CentralMicroSystemCabinet.java │ │ │ │ ├── Federation.java │ │ │ │ ├── KernelLordFederation.java │ │ │ │ ├── KernelMicroSystemCabinet.java │ │ │ │ ├── MicroSystem.java │ │ │ │ └── SubsystemDirector.java │ │ │ └── types/ │ │ │ └── HydraKingdom.java │ │ ├── unit/ │ │ │ ├── imperium/ │ │ │ │ ├── ArchRegimentObjectModel.java │ │ │ │ ├── ArchUniformInstitutionalizedInstrument.java │ │ │ │ ├── GUIDImperialTrieNode.java │ │ │ │ ├── ImperialTree.java │ │ │ │ ├── ImperialTreeConstants.java │ │ │ │ ├── ImperialTreeNode.java │ │ │ │ ├── LinkedType.java │ │ │ │ ├── RegimentedImperialTree.java │ │ │ │ ├── UniImperialTree.java │ │ │ │ ├── entity/ │ │ │ │ │ ├── BranchNode.java │ │ │ │ │ ├── ElementumNode.java │ │ │ │ │ ├── EntityNode.java │ │ │ │ │ ├── MetaEntryNode.java │ │ │ │ │ ├── MetadataNode.java │ │ │ │ │ ├── ReparseLinkNode.java │ │ │ │ │ ├── SkeletonNode.java │ │ │ │ │ ├── TreeNode.java │ │ │ │ │ └── TreeReparseLinkNode.java │ │ │ │ ├── operator/ │ │ │ │ │ ├── OperatorFactory.java │ │ │ │ │ └── TreeNodeOperator.java │ │ │ │ └── source/ │ │ │ │ ├── TireOwnerManipulator.java │ │ │ │ ├── TreeMasterManipulator.java │ │ │ │ ├── TriePathCacheManipulator.java │ │ │ │ └── TrieTreeManipulator.java │ │ │ ├── iqueue/ │ │ │ │ ├── ArchQueueTableMeta.java │ │ │ │ ├── ConfigurableMegaDeflectPriorityQueueMeta.java │ │ │ │ ├── ConfigurableMegaStratumQueueMeta.java │ │ │ │ ├── DPQueueManipulator.java │ │ │ │ ├── DPStratumQueueManipulator.java │ │ │ │ ├── DeflectPriorityQueue.java │ │ │ │ ├── MagnitudeDPQueue.java │ │ │ │ ├── MegaDPStratumQueue.java │ │ │ │ ├── MegaDeflectPriorityQueueMeta.java │ │ │ │ ├── MegaPriorityQueue.java │ │ │ │ ├── MegaStratumQueue.java │ │ │ │ ├── MegaStratumQueueMeta.java │ │ │ │ ├── QueueExistManipulator.java │ │ │ │ ├── QueueMasterManipulator.java │ │ │ │ ├── QueueMeta.java │ │ │ │ ├── SharedSegmentIQueue.java │ │ │ │ └── entity/ │ │ │ │ ├── GenericQueueElement.java │ │ │ │ ├── GenericStratumQueueElement.java │ │ │ │ ├── QueueElement.java │ │ │ │ └── QueueStratumElement.java │ │ │ └── vgraph/ │ │ │ ├── ArchAtlasInstrument.java │ │ │ ├── ArchVectorDAG.java │ │ │ ├── AtlasInstrument.java │ │ │ ├── GraphNodePair.java │ │ │ ├── MagnitudeVectorDAG.java │ │ │ ├── VectorDAG.java │ │ │ ├── VectorGraphConfig.java │ │ │ ├── VectorGraphConstants.java │ │ │ ├── algo/ │ │ │ │ ├── BasicDAGPathResolver.java │ │ │ │ ├── BasicDAGPathSelector.java │ │ │ │ ├── DAGPathResolver.java │ │ │ │ └── DAGPathSelector.java │ │ │ ├── entity/ │ │ │ │ └── GraphNode.java │ │ │ ├── layer/ │ │ │ │ ├── AtlasLayer.java │ │ │ │ ├── AtlasLayerNamespace.java │ │ │ │ ├── Layer.java │ │ │ │ ├── LayerConfig.java │ │ │ │ ├── LayerGraphHandle.java │ │ │ │ ├── LayerInstrument.java │ │ │ │ ├── LayerNamespace.java │ │ │ │ ├── LayerTreeNode.java │ │ │ │ ├── VLayerConfig.java │ │ │ │ ├── VLayerInstrument.java │ │ │ │ ├── operator/ │ │ │ │ │ ├── ArchLayerComponentOperator.java │ │ │ │ │ ├── AtlasLayerComponentOperatorFactory.java │ │ │ │ │ ├── LayerComponentOperator.java │ │ │ │ │ ├── LayerComponentOperatorFactory.java │ │ │ │ │ ├── LayerNamespaceOperator.java │ │ │ │ │ └── LayerOperator.java │ │ │ │ └── source/ │ │ │ │ ├── LayerHandleManipulator.java │ │ │ │ ├── LayerManipulator.java │ │ │ │ ├── LayerMasterManipulator.java │ │ │ │ ├── LayerMasterTreeManipulator.java │ │ │ │ ├── LayerOwnerManipulator.java │ │ │ │ ├── LayerPathCacheManipulator.java │ │ │ │ ├── LayerTreeManipulator.java │ │ │ │ └── NamespaceManipulator.java │ │ │ ├── source/ │ │ │ │ ├── AtlasMappingDriver.java │ │ │ │ ├── AtlasMasterManipulator.java │ │ │ │ ├── VectorGraphManipulator.java │ │ │ │ ├── VectorGraphMasterManipulator.java │ │ │ │ └── VectorGraphPathCacheManipulator.java │ │ │ └── traversal/ │ │ │ ├── AtlasGraphIterator.java │ │ │ └── GraphIterator.java │ │ └── ware/ │ │ ├── DataWare.java │ │ ├── MessageWare.java │ │ ├── Middleware.java │ │ ├── MiddlewareDirector.java │ │ ├── MiddlewareManager.java │ │ ├── OLAPWare.java │ │ ├── OLTPWare.java │ │ ├── RDBWare.java │ │ ├── Ware.java │ │ ├── WareDirector.java │ │ ├── WareDomain.java │ │ └── WareManager.java │ ├── hydra-architecture-conduct/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── pinecone/ │ │ └── hydra/ │ │ └── system/ │ │ ├── conduct/ │ │ │ ├── CascadeMarshal.java │ │ │ ├── CascadeUnit.java │ │ │ ├── Marshal.java │ │ │ ├── Unionem.java │ │ │ └── Unit.java │ │ ├── flow/ │ │ │ ├── CascadeFlow.java │ │ │ ├── Flow.java │ │ │ ├── SequentialFlow.java │ │ │ └── Stage.java │ │ └── ups/ │ │ ├── UniformPyramidTask.java │ │ └── UniformPyramidTaskInstrument.java │ ├── hydra-architecture-message/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── pinecone/ │ │ └── message/ │ │ ├── ArchResponse.java │ │ └── StringResponse.java │ ├── hydra-architecture-storage/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── pinecone/ │ │ └── hydra/ │ │ └── storage/ │ │ └── UFile.java │ ├── hydra-framework-config/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── pinecone/ │ │ └── hydra/ │ │ ├── account/ │ │ │ ├── AccountConfig.java │ │ │ ├── AccountManager.java │ │ │ ├── KernelAccountConfig.java │ │ │ ├── UniformAccountManager.java │ │ │ ├── entity/ │ │ │ │ ├── ACNodeAllotment.java │ │ │ │ ├── Account.java │ │ │ │ ├── ArchElementNode.java │ │ │ │ ├── ArchFolderElementNode.java │ │ │ │ ├── Authorization.java │ │ │ │ ├── Credential.java │ │ │ │ ├── Domain.java │ │ │ │ ├── ElementNode.java │ │ │ │ ├── FileElement.java │ │ │ │ ├── FolderElement.java │ │ │ │ ├── GenericACNodeAllotment.java │ │ │ │ ├── GenericAccount.java │ │ │ │ ├── GenericAuthorization.java │ │ │ │ ├── GenericCredential.java │ │ │ │ ├── GenericDomain.java │ │ │ │ ├── GenericGroup.java │ │ │ │ ├── GenericPrivilege.java │ │ │ │ ├── GenericRole.java │ │ │ │ ├── Group.java │ │ │ │ ├── Privilege.java │ │ │ │ └── Role.java │ │ │ ├── operator/ │ │ │ │ ├── AccountServiceOperator.java │ │ │ │ ├── AccountServiceOperatorFactory.java │ │ │ │ ├── ArchAccountServiceOperator.java │ │ │ │ ├── GenericAccountOperator.java │ │ │ │ ├── GenericAccountOperatorFactory.java │ │ │ │ ├── GenericDomainOperator.java │ │ │ │ └── GenericGroupOperator.java │ │ │ └── source/ │ │ │ ├── AuthorizationManipulator.java │ │ │ ├── CredentialManipulator.java │ │ │ ├── DomainNodeManipulator.java │ │ │ ├── GroupNodeManipulator.java │ │ │ ├── PrivilegeManipulator.java │ │ │ ├── RoleManipulator.java │ │ │ ├── UserMasterManipulator.java │ │ │ └── UserNodeManipulator.java │ │ ├── config/ │ │ │ ├── ConfigSource.java │ │ │ ├── LocalConfigSource.java │ │ │ ├── MapConfigReinterpreter.java │ │ │ └── ScopedMapConfigReinterpreter.java │ │ └── registry/ │ │ ├── GenericKOMRegistry.java │ │ ├── GenericRenderKOMRegistry.java │ │ ├── KOMRegistry.java │ │ ├── KernelRegistryConfig.java │ │ ├── Registry.java │ │ ├── RegistryConfig.java │ │ ├── RegistryJPathSelector.java │ │ ├── RegistrySelectorCursorParser.java │ │ ├── RenderDistributeRegistry.java │ │ ├── SelectorParseException.java │ │ ├── entity/ │ │ │ ├── ArchConfigNode.java │ │ │ ├── ArchElementNode.java │ │ │ ├── Attributes.java │ │ │ ├── ConfigNode.java │ │ │ ├── ConfigNodeMeta.java │ │ │ ├── DefaultPropertyConverter.java │ │ │ ├── DefaultTextValueConverter.java │ │ │ ├── ElementNode.java │ │ │ ├── GenericAttributes.java │ │ │ ├── GenericConfigNodeMeta.java │ │ │ ├── GenericNamespace.java │ │ │ ├── GenericNamespaceMeta.java │ │ │ ├── GenericProperties.java │ │ │ ├── GenericProperty.java │ │ │ ├── GenericTextFile.java │ │ │ ├── GenericTextValue.java │ │ │ ├── Namespace.java │ │ │ ├── NamespaceMeta.java │ │ │ ├── Properties.java │ │ │ ├── Property.java │ │ │ ├── PropertyJSONEncoder.java │ │ │ ├── PropertyTypes.java │ │ │ ├── RegistryTreeNode.java │ │ │ ├── TextFile.java │ │ │ ├── TextValue.java │ │ │ ├── TextValueTypes.java │ │ │ └── TypeConverter.java │ │ ├── marshaling/ │ │ │ ├── AnnotatedRegObjectInjector.java │ │ │ ├── RegistryDOMEncoder.java │ │ │ ├── RegistryDecoder.java │ │ │ ├── RegistryEncoder.java │ │ │ ├── RegistryJQuery.java │ │ │ ├── RegistryJSONDecoder.java │ │ │ ├── RegistryJSONEncoder.java │ │ │ └── RegistryQuery.java │ │ ├── operator/ │ │ │ ├── ArchConfigNodeOperator.java │ │ │ ├── ArchRegistryOperator.java │ │ │ ├── GenericRegistryOperatorFactory.java │ │ │ ├── NamespaceNodeOperator.java │ │ │ ├── PropertiesOperator.java │ │ │ ├── RegistryNodeOperator.java │ │ │ ├── RegistryOperatorFactory.java │ │ │ └── TextValueNodeOperator.java │ │ ├── render/ │ │ │ ├── GenericRenderNamespace.java │ │ │ ├── GenericRenderProperties.java │ │ │ ├── GenericRenderProperty.java │ │ │ ├── GenericRenderTextFile.java │ │ │ ├── GenericRenderTextValue.java │ │ │ ├── RenderConfigNode.java │ │ │ ├── RenderNamespace.java │ │ │ ├── RenderProperties.java │ │ │ ├── RenderProperty.java │ │ │ ├── RenderRegistryTreeNode.java │ │ │ ├── RenderTextFile.java │ │ │ └── RenderTextValue.java │ │ └── source/ │ │ ├── RegistryAttributesManipulator.java │ │ ├── RegistryConfigNodeManipulator.java │ │ ├── RegistryMasterManipulator.java │ │ ├── RegistryNSNodeManipulator.java │ │ ├── RegistryNSNodeMetaManipulator.java │ │ ├── RegistryNodeMetaManipulator.java │ │ ├── RegistryNodeOwnerManipulator.java │ │ ├── RegistryNodePathManipulator.java │ │ ├── RegistryPropertiesManipulator.java │ │ ├── RegistryTextFileManipulator.java │ │ └── RegistryTreeManipulator.java │ ├── hydra-framework-device/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── pinecone/ │ │ └── hydra/ │ │ ├── deploy/ │ │ │ ├── ArchDeployFamilyMeta.java │ │ │ ├── DeployExtraMeta.java │ │ │ ├── DeployFamilyMeta.java │ │ │ ├── entity/ │ │ │ │ ├── GenericContainer.java │ │ │ │ ├── GenericPhysicalHost.java │ │ │ │ ├── GenericQuick.java │ │ │ │ └── GenericVirtualMachine.java │ │ │ └── kom/ │ │ │ ├── DeployConfig.java │ │ │ ├── DeployFamilyNode.java │ │ │ ├── DeployInstrument.java │ │ │ ├── DeployPathSelector.java │ │ │ ├── KernelDeployConfig.java │ │ │ ├── UniformDeployInstrument.java │ │ │ ├── entity/ │ │ │ │ ├── ArchElementNode.java │ │ │ │ ├── ArchServerElement.java │ │ │ │ ├── ClusterElement.java │ │ │ │ ├── CommonMeta.java │ │ │ │ ├── ContainerElement.java │ │ │ │ ├── DeployElement.java │ │ │ │ ├── DeployInsMapping.java │ │ │ │ ├── DeployTreeNode.java │ │ │ │ ├── ElementNode.java │ │ │ │ ├── FolderElement.java │ │ │ │ ├── GenericClusterElement.java │ │ │ │ ├── GenericCommonMeta.java │ │ │ │ ├── GenericContainerElement.java │ │ │ │ ├── GenericDeployInsMapping.java │ │ │ │ ├── GenericNamespace.java │ │ │ │ ├── GenericPhysicalHostElement.java │ │ │ │ ├── GenericQuickElement.java │ │ │ │ ├── GenericVirtualMachineElement.java │ │ │ │ ├── Namespace.java │ │ │ │ ├── PhysicalHostElement.java │ │ │ │ ├── QuickElement.java │ │ │ │ ├── ServerElement.java │ │ │ │ └── VirtualMachineElement.java │ │ │ ├── marshaling/ │ │ │ │ ├── DeployInstrumentDecoder.java │ │ │ │ ├── DeployInstrumentEncoder.java │ │ │ │ ├── DeployJSONDecoder.java │ │ │ │ └── DeployJSONEncoder.java │ │ │ ├── operator/ │ │ │ │ ├── ArchElementOperator.java │ │ │ │ ├── ClusterElementOperator.java │ │ │ │ ├── ContainerElementOperator.java │ │ │ │ ├── ElementOperator.java │ │ │ │ ├── ElementOperatorFactory.java │ │ │ │ ├── GenericElementOperatorFactory.java │ │ │ │ ├── NamespaceOperator.java │ │ │ │ ├── PhysicalHostElementOperator.java │ │ │ │ ├── QuickElementOperator.java │ │ │ │ └── VirtualMachineElementOperator.java │ │ │ └── source/ │ │ │ ├── ClusterNodeManipulator.java │ │ │ ├── ContainerElementManipulator.java │ │ │ ├── DeployMasterManipulator.java │ │ │ ├── DeployNamespaceManipulator.java │ │ │ ├── DeployNodeManipulator.java │ │ │ ├── DeployServiceInsMappingManipulator.java │ │ │ ├── NodeMetaManipulator.java │ │ │ ├── PhysicalHostManipulator.java │ │ │ ├── QuickElementManipulator.java │ │ │ └── VirtualMachineManipulator.java │ │ └── server/ │ │ ├── ArchServer.java │ │ ├── ArchServersCenter.java │ │ ├── Server.java │ │ └── ServersCenter.java │ ├── hydra-framework-runtime/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ └── java/ │ │ │ └── com/ │ │ │ └── pinecone/ │ │ │ └── hydra/ │ │ │ ├── auto/ │ │ │ │ ├── AbortException.java │ │ │ │ ├── ArchAutomatron.java │ │ │ │ ├── ArchInstructation.java │ │ │ │ ├── ArchParallelInstructation.java │ │ │ │ ├── ArchParallelSuggestation.java │ │ │ │ ├── ArchSequentialMarshalling.java │ │ │ │ ├── ArchSuggestation.java │ │ │ │ ├── Automaton.java │ │ │ │ ├── Automatron.java │ │ │ │ ├── AutomatronMationInvoker.java │ │ │ │ ├── Continue.java │ │ │ │ ├── ContinueException.java │ │ │ │ ├── DeathExceptionHandler.java │ │ │ │ ├── Die.java │ │ │ │ ├── ExceptionHandler.java │ │ │ │ ├── GenericMarshalling.java │ │ │ │ ├── GenericMationInvoker.java │ │ │ │ ├── Heartbeat.java │ │ │ │ ├── IgnoredReason.java │ │ │ │ ├── InstantInstructation.java │ │ │ │ ├── InstantKillException.java │ │ │ │ ├── Instructation.java │ │ │ │ ├── KernelInstructation.java │ │ │ │ ├── LifecycleAutomaton.java │ │ │ │ ├── Marshalling.java │ │ │ │ ├── MationInvoker.java │ │ │ │ ├── ParallelInstructation.java │ │ │ │ ├── ParallelSuggestation.java │ │ │ │ ├── PeriodicAutomaton.java │ │ │ │ ├── PeriodicAutomatron.java │ │ │ │ ├── Suggestation.java │ │ │ │ └── Terminate.java │ │ │ ├── orchestration/ │ │ │ │ ├── ArchExertion.java │ │ │ │ ├── ArchGraphNode.java │ │ │ │ ├── ArchIrrevocableController.java │ │ │ │ ├── ArchLoop.java │ │ │ │ ├── ArchParallel.java │ │ │ │ ├── ArchSequential.java │ │ │ │ ├── ArchStratum.java │ │ │ │ ├── ArchTransaction.java │ │ │ │ ├── BooleanCondition.java │ │ │ │ ├── BranchControlException.java │ │ │ │ ├── BranchNoticeException.java │ │ │ │ ├── BreakController.java │ │ │ │ ├── BreakPoint.java │ │ │ │ ├── CausalBranch.java │ │ │ │ ├── Condition.java │ │ │ │ ├── ContinueController.java │ │ │ │ ├── ContinuePoint.java │ │ │ │ ├── Exertion.java │ │ │ │ ├── ExertionEventCallback.java │ │ │ │ ├── ExertionStatus.java │ │ │ │ ├── Exertium.java │ │ │ │ ├── GraphNode.java │ │ │ │ ├── GraphStratum.java │ │ │ │ ├── InstantJumpOutBranchException.java │ │ │ │ ├── IntegrityLevel.java │ │ │ │ ├── JumpController.java │ │ │ │ ├── JumpPoint.java │ │ │ │ ├── Loop.java │ │ │ │ ├── LoopAction.java │ │ │ │ ├── Notifiable.java │ │ │ │ ├── Parallel.java │ │ │ │ ├── ParallelAction.java │ │ │ │ ├── ProcessController.java │ │ │ │ ├── Sequential.java │ │ │ │ ├── SequentialAction.java │ │ │ │ ├── Transaction.java │ │ │ │ ├── UnfulfilledActionException.java │ │ │ │ ├── parallel/ │ │ │ │ │ ├── ArchMasterParallelium.java │ │ │ │ │ ├── ParallelExertion.java │ │ │ │ │ └── WrappedMasterParallelium.java │ │ │ │ ├── recorder/ │ │ │ │ │ └── ActionTape.java │ │ │ │ └── regulation/ │ │ │ │ ├── NeglectRegulation.java │ │ │ │ ├── Regulation.java │ │ │ │ └── RuntimeNeglector.java │ │ │ ├── proc/ │ │ │ │ ├── ArchProcessManager.java │ │ │ │ ├── ArchUProcess.java │ │ │ │ ├── ControllableLevel.java │ │ │ │ ├── GenericProcessActionTape.java │ │ │ │ ├── InstitutionalProcess.java │ │ │ │ ├── KernelProcess.java │ │ │ │ ├── LineageProcessEnvironmentSection.java │ │ │ │ ├── LocalHostedProcess.java │ │ │ │ ├── LocalUProcess.java │ │ │ │ ├── ProcessActionTape.java │ │ │ │ ├── ProcessEnvironmentSection.java │ │ │ │ ├── ProcessManager.java │ │ │ │ ├── ProcessManagerConfig.java │ │ │ │ ├── ProcessManagerSystema.java │ │ │ │ ├── RemoteUProcess.java │ │ │ │ ├── UProcess.java │ │ │ │ ├── UniformProcessConfig.java │ │ │ │ ├── UniformProcessManager.java │ │ │ │ ├── entity/ │ │ │ │ │ ├── ElementNode.java │ │ │ │ │ └── ProcessElement.java │ │ │ │ ├── event/ │ │ │ │ │ ├── ProcessEvent.java │ │ │ │ │ ├── ProcessEventHandler.java │ │ │ │ │ └── ProcessLifecycleHandler.java │ │ │ │ ├── image/ │ │ │ │ │ ├── ArchEntryPointRunnable.java │ │ │ │ │ ├── ArchExecutionImage.java │ │ │ │ │ ├── ArchImageLoader.java │ │ │ │ │ ├── EntryPointRunnable.java │ │ │ │ │ ├── ExecutionImage.java │ │ │ │ │ ├── FileSystemMappingImageLoader.java │ │ │ │ │ ├── GenericClassImage.java │ │ │ │ │ ├── ImageLoadException.java │ │ │ │ │ ├── ImageLoadProcedureException.java │ │ │ │ │ ├── ImageLoader.java │ │ │ │ │ ├── ImageModifier.java │ │ │ │ │ ├── JVMClassExecutionImage.java │ │ │ │ │ ├── LocalHostedClassImage.java │ │ │ │ │ ├── SafeImageModifier.java │ │ │ │ │ ├── URLImageLoader.java │ │ │ │ │ ├── UniformImageLoader.java │ │ │ │ │ ├── UniformMultiScopeImageLoader.java │ │ │ │ │ └── kom/ │ │ │ │ │ ├── ElementNode.java │ │ │ │ │ ├── GenericImageElement.java │ │ │ │ │ ├── ImageElement.java │ │ │ │ │ ├── VirtualExeImageInstrument.java │ │ │ │ │ └── VirtualMappingExeImageInstrument.java │ │ │ │ ├── ns/ │ │ │ │ │ ├── ControlGroup.java │ │ │ │ │ ├── GenericSegregationSpace.java │ │ │ │ │ └── ProcSpace.java │ │ │ │ ├── signal/ │ │ │ │ │ └── Signal.java │ │ │ │ └── tomb/ │ │ │ │ ├── ResurgentTombstone.java │ │ │ │ ├── RuntimeTombstone.java │ │ │ │ └── TombCheckpoint.java │ │ │ ├── servgram/ │ │ │ │ ├── ActionType.java │ │ │ │ ├── ArchGramFactory.java │ │ │ │ ├── ArchGramLoader.java │ │ │ │ ├── ArchGramScopeSet.java │ │ │ │ ├── ArchServgramOrchestrator.java │ │ │ │ ├── ArchServgramium.java │ │ │ │ ├── AutoOrchestrator.java │ │ │ │ ├── Gram.java │ │ │ │ ├── GramFactory.java │ │ │ │ ├── GramLoader.java │ │ │ │ ├── GramScope.java │ │ │ │ ├── GramTransaction.java │ │ │ │ ├── LocalGramFactory.java │ │ │ │ ├── LocalGramLoader.java │ │ │ │ ├── LocalGramScopeSet.java │ │ │ │ ├── LocalGramTransaction.java │ │ │ │ ├── LocalParallelGramExertium.java │ │ │ │ ├── LocalSequentialGramExertium.java │ │ │ │ ├── LocalServgramOrchestrator.java │ │ │ │ ├── MultiGramsLoader.java │ │ │ │ ├── OrchestrateInterruptException.java │ │ │ │ ├── Servgram.java │ │ │ │ ├── ServgramOrchestrator.java │ │ │ │ ├── Servgramium.java │ │ │ │ ├── Servgramlet.java │ │ │ │ └── filters/ │ │ │ │ ├── AnnotationValueFilter.java │ │ │ │ ├── ExcludeGramFilters.java │ │ │ │ └── GramAnnotationValueFilter.java │ │ │ └── task/ │ │ │ ├── App.java │ │ │ ├── ArchInstanceMeta.java │ │ │ ├── ArchTask.java │ │ │ ├── ArchTaskFamilyMeta.java │ │ │ ├── ArchTaskInstance.java │ │ │ ├── InstanceEventType.java │ │ │ ├── ProcApp.java │ │ │ ├── Task.java │ │ │ ├── TaskApp.java │ │ │ ├── TaskExtraMeta.java │ │ │ ├── TaskFamilyMeta.java │ │ │ ├── TaskInstance.java │ │ │ ├── TaskInstanceExecState.java │ │ │ ├── TaskInstanceMeta.java │ │ │ ├── TaskInstanceStatus.java │ │ │ ├── Taskiom.java │ │ │ ├── Taskium.java │ │ │ ├── kom/ │ │ │ │ ├── KernelTaskConfig.java │ │ │ │ ├── TaskConfig.java │ │ │ │ ├── TaskFamilyNode.java │ │ │ │ ├── TaskInstrument.java │ │ │ │ ├── TaskMetaConstants.java │ │ │ │ ├── TaskPathSelector.java │ │ │ │ ├── UniformTaskInstrument.java │ │ │ │ ├── entity/ │ │ │ │ │ ├── AppElement.java │ │ │ │ │ ├── ArchElementNode.java │ │ │ │ │ ├── ElementNode.java │ │ │ │ │ ├── EntryNode.java │ │ │ │ │ ├── FolderElement.java │ │ │ │ │ ├── GenericAppElement.java │ │ │ │ │ ├── GenericNamespace.java │ │ │ │ │ ├── GenericTaskElement.java │ │ │ │ │ ├── Namespace.java │ │ │ │ │ ├── TaskElement.java │ │ │ │ │ └── TaskTreeNode.java │ │ │ │ ├── instance/ │ │ │ │ │ ├── GenericInstanceEntry.java │ │ │ │ │ ├── InstanceEntry.java │ │ │ │ │ ├── InstanceInstrument.java │ │ │ │ │ ├── KernelInstanceInstrument.java │ │ │ │ │ └── source/ │ │ │ │ │ └── InstanceNodeManipulator.java │ │ │ │ ├── marshaling/ │ │ │ │ │ ├── TaskInstrumentDecoder.java │ │ │ │ │ ├── TaskInstrumentEncoder.java │ │ │ │ │ ├── TaskJSONDecoder.java │ │ │ │ │ └── TaskJSONEncoder.java │ │ │ │ ├── operator/ │ │ │ │ │ ├── AppElementOperator.java │ │ │ │ │ ├── ArchElementOperator.java │ │ │ │ │ ├── ElementOperator.java │ │ │ │ │ ├── ElementOperatorFactory.java │ │ │ │ │ ├── GenericElementOperatorFactory.java │ │ │ │ │ ├── NamespaceOperator.java │ │ │ │ │ └── TaskElementOperator.java │ │ │ │ └── source/ │ │ │ │ ├── AppNodeManipulator.java │ │ │ │ ├── TaskMasterManipulator.java │ │ │ │ ├── TaskNamespaceManipulator.java │ │ │ │ └── TaskNodeManipulator.java │ │ │ └── marshal/ │ │ │ ├── TaskPriority.java │ │ │ ├── TaskScheduleCycle.java │ │ │ └── TaskScheduleType.java │ │ └── test/ │ │ └── java/ │ │ └── com/ │ │ ├── ioc/ │ │ │ ├── SystemTestIoC.java │ │ │ └── TestIoC.java │ │ ├── orchestration/ │ │ │ ├── SimpleExertium.java │ │ │ ├── SimpleParallelium.java │ │ │ ├── TestBasicTransaction.java │ │ │ └── TestInstructation.java │ │ └── servgram/ │ │ └── TestServgram.java │ ├── hydra-framework-service/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── pinecone/ │ │ └── hydra/ │ │ ├── scenario/ │ │ │ ├── entity/ │ │ │ │ ├── GenericNamespaceNode.java │ │ │ │ ├── GenericNamespaceNodeMeta.java │ │ │ │ ├── GenericScenarioCommonData.java │ │ │ │ ├── NamespaceNode.java │ │ │ │ ├── NamespaceNodeMeta.java │ │ │ │ └── ScenarioCommonData.java │ │ │ ├── source/ │ │ │ │ ├── NamespaceNodeManipulator.java │ │ │ │ ├── NamespaceNodeMetaManipulator.java │ │ │ │ ├── ScenarioCommonDataManipulator.java │ │ │ │ └── ScenarioMasterManipulator.java │ │ │ └── tree/ │ │ │ ├── DistributedScenarioMetaTree.java │ │ │ └── GenericDistributedScenarioMetaTree.java │ │ └── service/ │ │ ├── Application.java │ │ ├── ArchService.java │ │ ├── ArchServiceFamilyMeta.java │ │ ├── ProcApplication.java │ │ ├── ScheduleType.java │ │ ├── Service.java │ │ ├── ServiceApplication.java │ │ ├── ServiceFamilyMeta.java │ │ ├── ServiceInstance.java │ │ ├── ServiceMeta.java │ │ ├── Serviciom.java │ │ ├── Servicium.java │ │ ├── Status.java │ │ ├── entity/ │ │ │ ├── BindUSII.java │ │ │ └── USII.java │ │ └── kom/ │ │ ├── GenericNamespaceRules.java │ │ ├── KernelServiceConfig.java │ │ ├── NamespaceRules.java │ │ ├── ServiceConfig.java │ │ ├── ServiceFamilyNode.java │ │ ├── ServiceInstrument.java │ │ ├── ServicePathSelector.java │ │ ├── UniformServiceInstrument.java │ │ ├── entity/ │ │ │ ├── ApplicationElement.java │ │ │ ├── ArchElementNode.java │ │ │ ├── ArchServoElement.java │ │ │ ├── CommonMeta.java │ │ │ ├── ElementNode.java │ │ │ ├── FolderElement.java │ │ │ ├── GenericApplicationElement.java │ │ │ ├── GenericCommonMeta.java │ │ │ ├── GenericNamespace.java │ │ │ ├── GenericServiceElement.java │ │ │ ├── GenericServiceInstanceEntity.java │ │ │ ├── Namespace.java │ │ │ ├── ServiceElement.java │ │ │ ├── ServiceInstanceEntry.java │ │ │ ├── ServiceTreeNode.java │ │ │ └── ServoElement.java │ │ ├── marshaling/ │ │ │ ├── ServiceInstrumentDecoder.java │ │ │ ├── ServiceInstrumentEncoder.java │ │ │ ├── ServiceJSONDecoder.java │ │ │ └── ServiceJSONEncoder.java │ │ ├── operator/ │ │ │ ├── ApplicationElementOperator.java │ │ │ ├── ArchElementOperator.java │ │ │ ├── ElementOperator.java │ │ │ ├── ElementOperatorFactory.java │ │ │ ├── GenericElementOperatorFactory.java │ │ │ ├── NamespaceOperator.java │ │ │ └── ServiceElementOperator.java │ │ └── source/ │ │ ├── ApplicationMetaManipulator.java │ │ ├── ApplicationNodeManipulator.java │ │ ├── NamespaceRulesManipulator.java │ │ ├── NodeMetaManipulator.java │ │ ├── ServiceInstanceManipulator.java │ │ ├── ServiceMasterManipulator.java │ │ ├── ServiceMetaManipulator.java │ │ ├── ServiceNamespaceManipulator.java │ │ └── ServiceNodeManipulator.java │ ├── hydra-framework-storage/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── pinecone/ │ │ └── hydra/ │ │ └── storage/ │ │ ├── ArchFileObjectMeta.java │ │ ├── ArchStorageConfig.java │ │ ├── CheckedFile.java │ │ ├── RandomAccessChanface.java │ │ ├── ReadChannelRecalled.java │ │ ├── StorageConfig.java │ │ ├── StorageConstants.java │ │ ├── StorageExportIORequest.java │ │ ├── StorageIOResponse.java │ │ ├── StorageInstructRequest.java │ │ ├── StorageInstructResponse.java │ │ ├── StorageNaming.java │ │ ├── StorageReceiveIORequest.java │ │ ├── TitanStorageExportIORequest.java │ │ ├── TitanStorageIOResponse.java │ │ ├── TitanStorageNaming.java │ │ ├── TitanStorageReceiveIORequest.java │ │ ├── bucket/ │ │ │ ├── BucketInstrument.java │ │ │ ├── TitanBucketInstrument.java │ │ │ ├── entity/ │ │ │ │ ├── Bucket.java │ │ │ │ ├── GenericBucket.java │ │ │ │ ├── GenericSite.java │ │ │ │ ├── GenericSiteNode.java │ │ │ │ ├── Site.java │ │ │ │ └── SiteNode.java │ │ │ └── source/ │ │ │ ├── BucketManipulator.java │ │ │ ├── BucketMasterManipulator.java │ │ │ ├── SiteManipulator.java │ │ │ └── SiteNodeManipulator.java │ │ ├── file/ │ │ │ ├── ClusterSegmentNaming.java │ │ │ ├── FileConstants.java │ │ │ ├── FileSystemConfig.java │ │ │ ├── KOFSClusterSegmentNaming.java │ │ │ ├── KOMFileSystem.java │ │ │ ├── KernelFileSystemConfig.java │ │ │ ├── UniformObjectFileSystem.java │ │ │ ├── Verification.java │ │ │ ├── builder/ │ │ │ │ ├── ComponentUOFSBuilder.java │ │ │ │ ├── Feature.java │ │ │ │ ├── UOFSBuilder.java │ │ │ │ └── UOFSComponentor.java │ │ │ ├── cache/ │ │ │ │ ├── DefaultCacheConstants.java │ │ │ │ ├── FileSystemCacheConfig.java │ │ │ │ └── MappedFileSystemCacheConfig.java │ │ │ ├── entity/ │ │ │ │ ├── ArcReparseSemanticNode.java │ │ │ │ ├── ArchCluster.java │ │ │ │ ├── ArchElementNode.java │ │ │ │ ├── Cluster.java │ │ │ │ ├── ClusterPage.java │ │ │ │ ├── ClusterPage64.java │ │ │ │ ├── DirectlyExternalSymbolic.java │ │ │ │ ├── ElementNode.java │ │ │ │ ├── ExternalSymbolic.java │ │ │ │ ├── FSNodeAllotment.java │ │ │ │ ├── FileMeta.java │ │ │ │ ├── FileNode.java │ │ │ │ ├── FileSystemAttributes.java │ │ │ │ ├── FileTreeNode.java │ │ │ │ ├── Folder.java │ │ │ │ ├── FolderMeta.java │ │ │ │ ├── GenericExternalSymbolic.java │ │ │ │ ├── GenericFSNodeAllotment.java │ │ │ │ ├── GenericFileMeta.java │ │ │ │ ├── GenericFileNode.java │ │ │ │ ├── GenericFolder.java │ │ │ │ ├── GenericFolderMeta.java │ │ │ │ ├── GenericLocalCluster.java │ │ │ │ ├── GenericLocalClusterMeta.java │ │ │ │ ├── GenericRemoteCluster.java │ │ │ │ ├── GenericSymbolic.java │ │ │ │ ├── GenericSymbolicMeta.java │ │ │ │ ├── LocalCluster.java │ │ │ │ ├── LocalClusterMeta.java │ │ │ │ ├── RemoteCluster.java │ │ │ │ ├── ReparseSemanticNode.java │ │ │ │ ├── Symbolic.java │ │ │ │ └── SymbolicMeta.java │ │ │ ├── external/ │ │ │ │ ├── ArchNativeExternalFileObject.java │ │ │ │ ├── ExternalFile.java │ │ │ │ ├── ExternalFileObject.java │ │ │ │ ├── ExternalFileSystemInstrument.java │ │ │ │ ├── ExternalFolder.java │ │ │ │ ├── ExternalSymbolicSelector.java │ │ │ │ ├── GenericNativeExternalFile.java │ │ │ │ ├── GenericNativeExternalFolder.java │ │ │ │ ├── KenExternalFileSystemInstrument.java │ │ │ │ └── KenExternalSymbolicSelector.java │ │ │ ├── marshaling/ │ │ │ │ ├── ClusterGroup.java │ │ │ │ └── StripedClusterGroup.java │ │ │ ├── operator/ │ │ │ │ ├── ArchFileSystemOperator.java │ │ │ │ ├── FileSystemOperator.java │ │ │ │ ├── FileSystemOperatorFactory.java │ │ │ │ ├── GenericExternalSymbolicOperator.java │ │ │ │ ├── GenericFileOperator.java │ │ │ │ ├── GenericFileSystemOperatorFactory.java │ │ │ │ └── GenericFolderOperator.java │ │ │ ├── source/ │ │ │ │ ├── ExternalSymbolicManipulator.java │ │ │ │ ├── FileManipulator.java │ │ │ │ ├── FileMasterManipulator.java │ │ │ │ ├── FileMetaManipulator.java │ │ │ │ ├── FileSystemAttributeManipulator.java │ │ │ │ ├── FolderManipulator.java │ │ │ │ ├── FolderMetaManipulator.java │ │ │ │ ├── FolderVolumeMappingManipulator.java │ │ │ │ ├── LocalClusterManipulator.java │ │ │ │ ├── RemoteClusterManipulator.java │ │ │ │ ├── SymbolicManipulator.java │ │ │ │ └── SymbolicMetaManipulator.java │ │ │ └── transmit/ │ │ │ ├── UniformSourceLocator.java │ │ │ ├── exporter/ │ │ │ │ ├── ArchFileExporterEntity.java │ │ │ │ ├── FileExport.java │ │ │ │ ├── FileExport64.java │ │ │ │ ├── FileExportEntity.java │ │ │ │ ├── FileExportEntity64.java │ │ │ │ ├── TitanFileExport64.java │ │ │ │ └── TitanFileExportEntity64.java │ │ │ └── receiver/ │ │ │ ├── ArchFileReceiveEntity.java │ │ │ ├── FileReceive.java │ │ │ ├── FileReceive64.java │ │ │ ├── FileReceiveEntity.java │ │ │ ├── FileReceiveEntity64.java │ │ │ ├── TitanFileReceive64.java │ │ │ └── TitanFileReceiveEntity64.java │ │ ├── io/ │ │ │ ├── Chanface.java │ │ │ ├── ChanfaceReader.java │ │ │ ├── ChannelRecalled.java │ │ │ ├── TitanFileChannelChanface.java │ │ │ ├── TitanInputStreamChanface.java │ │ │ ├── TitanOutputStreamChanface.java │ │ │ ├── TitanRandomInputStreamAccessChanface.java │ │ │ └── UIOException.java │ │ ├── mfs/ │ │ │ ├── GenericNativeMFile.java │ │ │ ├── MFile.java │ │ │ ├── MappingFileSystem.java │ │ │ ├── NativeMFile.java │ │ │ └── NativeMappingFileSystem.java │ │ ├── natives/ │ │ │ └── NativeExternalFileSystems.java │ │ ├── policy/ │ │ │ ├── PolicyManage.java │ │ │ ├── TitanPolicyManage.java │ │ │ ├── chain/ │ │ │ │ ├── PolicyChain.java │ │ │ │ ├── VersionPolicyChain.java │ │ │ │ └── factory/ │ │ │ │ └── PolicyChainFactory.java │ │ │ ├── entity/ │ │ │ │ ├── GenericPolicy.java │ │ │ │ └── Policy.java │ │ │ └── source/ │ │ │ ├── PolicyFileMappingManipulator.java │ │ │ ├── PolicyManipulator.java │ │ │ └── PolicyMasterManipulator.java │ │ ├── remote/ │ │ │ ├── RemoteFSInstrument.java │ │ │ └── RemoteUOFSInstrument.java │ │ ├── version/ │ │ │ ├── TitanVersionManage.java │ │ │ ├── VersionManage.java │ │ │ ├── entity/ │ │ │ │ ├── TitanVersion.java │ │ │ │ ├── TitanVersionMapping.java │ │ │ │ ├── Version.java │ │ │ │ └── VersionMapping.java │ │ │ └── source/ │ │ │ ├── VersionManipulator.java │ │ │ ├── VersionMappingManipulator.java │ │ │ └── VersionMasterManipulator.java │ │ └── volume/ │ │ ├── IUnifiedTransmitConstructor.java │ │ ├── KernelVolumeConfig.java │ │ ├── TitanVolumeFile.java │ │ ├── UnifiedTransmitConstructor.java │ │ ├── UniformVolumeManager.java │ │ ├── VolumeConfig.java │ │ ├── VolumeConstants.java │ │ ├── VolumeFile.java │ │ ├── VolumeManager.java │ │ ├── VolumePoliceDog.java │ │ ├── entity/ │ │ │ ├── ArchExportEntity.java │ │ │ ├── ArchLogicVolume.java │ │ │ ├── ArchReceiveEntity.java │ │ │ ├── ArchVolume.java │ │ │ ├── DirectReceiver.java │ │ │ ├── Exporter.java │ │ │ ├── ExporterEntity.java │ │ │ ├── LogicVolume.java │ │ │ ├── MirroredVolume.java │ │ │ ├── MountPoint.java │ │ │ ├── PhysicalVolume.java │ │ │ ├── ReceiveEntity.java │ │ │ ├── Receiver.java │ │ │ ├── SimpleVolume.java │ │ │ ├── SpannedVolume.java │ │ │ ├── StripedReceiver.java │ │ │ ├── StripedVolume.java │ │ │ ├── TitanMountPoint.java │ │ │ ├── TitanVolumeAllotment.java │ │ │ ├── TitanVolumeCapacity64.java │ │ │ ├── Volume.java │ │ │ ├── VolumeAllotment.java │ │ │ ├── VolumeCapacity64.java │ │ │ └── local/ │ │ │ ├── LocalMirroredVolume.java │ │ │ ├── LocalPhysicalVolume.java │ │ │ ├── LocalSimpleVolume.java │ │ │ ├── LocalSpannedVolume.java │ │ │ ├── LocalStripedVolume.java │ │ │ ├── VolumeCapacity.java │ │ │ ├── mirrored/ │ │ │ │ └── TitanLocalMirroredVolume.java │ │ │ ├── physical/ │ │ │ │ ├── TitanLocalPhysicalVolume.java │ │ │ │ ├── export/ │ │ │ │ │ ├── DirectExport.java │ │ │ │ │ ├── DirectExport64.java │ │ │ │ │ ├── DirectExportEntity.java │ │ │ │ │ ├── DirectExportEntity64.java │ │ │ │ │ ├── TitanDirectExport64.java │ │ │ │ │ └── TitanDirectExportEntity64.java │ │ │ │ └── receive/ │ │ │ │ ├── DirectReceive.java │ │ │ │ ├── DirectReceive64.java │ │ │ │ ├── DirectReceiveEntity.java │ │ │ │ ├── DirectReceiveEntity64.java │ │ │ │ ├── TitanDirectReceive64.java │ │ │ │ └── TitanDirectReceiveEntity64.java │ │ │ ├── simple/ │ │ │ │ ├── TitanLocalSimpleVolume.java │ │ │ │ ├── export/ │ │ │ │ │ ├── SimpleExport.java │ │ │ │ │ ├── SimpleExport64.java │ │ │ │ │ ├── SimpleExportEntity.java │ │ │ │ │ ├── SimpleExportEntity64.java │ │ │ │ │ ├── TitanSimpleExport64.java │ │ │ │ │ └── TitanSimpleExportEntity64.java │ │ │ │ └── recevice/ │ │ │ │ ├── SimpleReceive.java │ │ │ │ ├── SimpleReceive64.java │ │ │ │ ├── SimpleReceiveEntity.java │ │ │ │ ├── SimpleReceiveEntity64.java │ │ │ │ ├── SimpleReceiver.java │ │ │ │ ├── SimpleReceiverEntity.java │ │ │ │ ├── TitanSimpleReceive64.java │ │ │ │ ├── TitanSimpleReceiveEntity64.java │ │ │ │ ├── channel/ │ │ │ │ │ ├── SimpleChannelReceiver.java │ │ │ │ │ ├── SimpleChannelReceiver64.java │ │ │ │ │ ├── SimpleChannelReceiverEntity.java │ │ │ │ │ ├── SimpleChannelReceiverEntity64.java │ │ │ │ │ ├── TitanSimpleChannelReceiver64.java │ │ │ │ │ └── TitanSimpleChannelReceiverEntity64.java │ │ │ │ └── stream/ │ │ │ │ ├── SimpleStreamReceiveEntity.java │ │ │ │ ├── SimpleStreamReceiveEntity64.java │ │ │ │ ├── SimpleStreamReceiver.java │ │ │ │ ├── SimpleStreamReceiver64.java │ │ │ │ ├── TitanSimpleStreamReceive64.java │ │ │ │ └── TitanSimpleStreamReceiveEntity64.java │ │ │ ├── spanned/ │ │ │ │ ├── TitanLocalSpannedVolume.java │ │ │ │ ├── export/ │ │ │ │ │ ├── SpannedExport.java │ │ │ │ │ ├── SpannedExport64.java │ │ │ │ │ ├── SpannedExportEntity.java │ │ │ │ │ ├── SpannedExportEntity64.java │ │ │ │ │ ├── TitanSpannedExport64.java │ │ │ │ │ └── TitanSpannedExportEntity64.java │ │ │ │ └── receive/ │ │ │ │ ├── SpannedReceive.java │ │ │ │ ├── SpannedReceive64.java │ │ │ │ ├── SpannedReceiveEntity.java │ │ │ │ ├── SpannedReceiveEntity64.java │ │ │ │ ├── TitanSpannedReceive64.java │ │ │ │ └── TitanSpannedReceiveEntity64.java │ │ │ └── striped/ │ │ │ ├── BufferOutMate.java │ │ │ ├── BufferOutStatus.java │ │ │ ├── BufferWriteStatus.java │ │ │ ├── CacheBlock.java │ │ │ ├── CacheBlockStatus.java │ │ │ ├── LocalStripedTaskThread.java │ │ │ ├── ReceiveBufferInStatus.java │ │ │ ├── ReceiveBufferOutStatus.java │ │ │ ├── StripBufferInJob.java │ │ │ ├── StripBufferOutJob.java │ │ │ ├── StripBufferStatus.java │ │ │ ├── StripCacheBlock.java │ │ │ ├── StripChannelReceiverJob.java │ │ │ ├── StripExportJob.java │ │ │ ├── StripLockEntity.java │ │ │ ├── StripReceiveBufferInJob.java │ │ │ ├── StripReceiveBufferOutJob.java │ │ │ ├── StripReceiverJob.java │ │ │ ├── StripTerminalStateRecord.java │ │ │ ├── TerminalStateRecord.java │ │ │ ├── TitanLocalStripedVolume.java │ │ │ ├── TitanStripBufferInJob.java │ │ │ ├── TitanStripBufferOutJob.java │ │ │ ├── TitanStripLockEntity.java │ │ │ ├── TitanStripReceiveBufferInJob.java │ │ │ ├── TitanStripReceiveBufferOutJob.java │ │ │ ├── TitanStripReceiverJob.java │ │ │ ├── export/ │ │ │ │ ├── StripedExport.java │ │ │ │ ├── StripedExport64.java │ │ │ │ ├── StripedExportEntity.java │ │ │ │ ├── StripedExportEntity64.java │ │ │ │ ├── TitanStripedExport64.java │ │ │ │ ├── TitanStripedExportEntity64.java │ │ │ │ └── channel/ │ │ │ │ ├── StripedChannelExport.java │ │ │ │ ├── StripedChannelExport64.java │ │ │ │ ├── StripedChannelExportEntity.java │ │ │ │ ├── StripedChannelExportEntity64.java │ │ │ │ ├── TitanStripedChannelExport64.java │ │ │ │ └── TitanStripedChannelExportEntity64.java │ │ │ └── receive/ │ │ │ ├── StripedReceive.java │ │ │ ├── StripedReceive64.java │ │ │ ├── StripedReceiveEntity.java │ │ │ ├── StripedReceiveEntity64.java │ │ │ ├── StripedReceiver.java │ │ │ ├── StripedReceiverEntity.java │ │ │ ├── TitanStripedReceive64.java │ │ │ ├── TitanStripedReceiveEntity64.java │ │ │ ├── channnel/ │ │ │ │ ├── StripedChannelReceiver.java │ │ │ │ ├── StripedChannelReceiver64.java │ │ │ │ ├── StripedChannelReceiverEntity.java │ │ │ │ ├── StripedChannelReceiverEntity64.java │ │ │ │ ├── TitanStripedChannelReceiver64.java │ │ │ │ └── TitanStripedChannelReceiverEntity64.java │ │ │ └── stream/ │ │ │ ├── StripedStreamReceive.java │ │ │ ├── StripedStreamReceive64.java │ │ │ ├── StripedStreamReceiveEntity.java │ │ │ ├── StripedStreamReceiveEntity64.java │ │ │ ├── TitanStripedStreamReceive64.java │ │ │ └── TitanStripedStreamReceiveEntity64.java │ │ ├── kvfs/ │ │ │ ├── ExecutorPool.java │ │ │ ├── KenVolumeFileSystem.java │ │ │ ├── KenusPool.java │ │ │ └── OnVolumeFileSystem.java │ │ ├── operator/ │ │ │ ├── ArchVolumeOperator.java │ │ │ ├── SimpleVolumeOperator.java │ │ │ ├── SpannedVolumeOperator.java │ │ │ ├── StripedVolumeOperator.java │ │ │ ├── TitanVolumeOperatorFactory.java │ │ │ ├── VolumeOperator.java │ │ │ └── VolumeOperatorFactory.java │ │ ├── policy/ │ │ │ ├── Dummy.java │ │ │ └── strip/ │ │ │ ├── ArchSizingMatcher.java │ │ │ ├── DynamicStripSizingPolicy.java │ │ │ ├── GenericDynamicStripSizingPolicy.java │ │ │ ├── MegaFileSizingMatcher64.java │ │ │ ├── SizingMatcher.java │ │ │ ├── SmallFileSizingMatcher64.java │ │ │ └── TinyFileSizingMatcher64.java │ │ ├── runtime/ │ │ │ ├── ArchStripedTaskThread.java │ │ │ ├── ArchTaskThread.java │ │ │ ├── MasterVolumeGram.java │ │ │ ├── TaskThread.java │ │ │ ├── VolumeGram.java │ │ │ ├── VolumeJob.java │ │ │ └── VolumeJobCompromiseException.java │ │ └── source/ │ │ ├── LineSegmentManipulator.java │ │ ├── LogicVolumeManipulator.java │ │ ├── MirroredVolumeManipulator.java │ │ ├── MountPointManipulator.java │ │ ├── PhysicalVolumeManipulator.java │ │ ├── SQLiteVolumeManipulator.java │ │ ├── SimpleVolumeManipulator.java │ │ ├── SpannedVolumeManipulator.java │ │ ├── StripedVolumeManipulator.java │ │ ├── VolumeAllocateManipulator.java │ │ ├── VolumeCapacityManipulator.java │ │ ├── VolumeMasterManipulator.java │ │ └── VolumeTreeManipulator.java │ ├── hydra-kom-default-driver/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ ├── java/ │ │ │ └── com/ │ │ │ └── pinecone/ │ │ │ └── hydra/ │ │ │ ├── FileOwnerMapper.java │ │ │ ├── account/ │ │ │ │ └── ibatis/ │ │ │ │ ├── AuthorizationMapper.java │ │ │ │ ├── CredentialMapper.java │ │ │ │ ├── DomainNodeMapper.java │ │ │ │ ├── GroupNodeMapper.java │ │ │ │ ├── PrivilegeMapper.java │ │ │ │ ├── RoleMapper.java │ │ │ │ ├── UserNodeMapper.java │ │ │ │ ├── UserOwnerMapper.java │ │ │ │ ├── UserPathCacheMapper.java │ │ │ │ ├── UserTreeMapper.java │ │ │ │ └── hydranium/ │ │ │ │ ├── UserMappingDriver.java │ │ │ │ ├── UserMasterManipulatorImpl.java │ │ │ │ └── UserMasterTreeManipulatorImpl.java │ │ │ ├── bucket/ │ │ │ │ └── ibatis/ │ │ │ │ ├── BucketMapping.java │ │ │ │ ├── SiteMapping.java │ │ │ │ ├── SiteNodeMapper.java │ │ │ │ └── hydranium/ │ │ │ │ ├── BucketMappingDriver.java │ │ │ │ └── BucketMasterManipulatorImpl.java │ │ │ ├── deploy/ │ │ │ │ └── ibatis/ │ │ │ │ ├── ClusterNodeMapper.java │ │ │ │ ├── ContainerElementMapper.java │ │ │ │ ├── DeployNamespaceMapper.java │ │ │ │ ├── DeployNodeMapper.java │ │ │ │ ├── DeployNodeMetaMapper.java │ │ │ │ ├── DeployNodeOwnerMapper.java │ │ │ │ ├── DeployNodePathCacheMapper.java │ │ │ │ ├── DeployServiceInsMappingMapper.java │ │ │ │ ├── DeployTreeMapper.java │ │ │ │ ├── PhysicalHostMapper.java │ │ │ │ ├── QuickElementMapper.java │ │ │ │ ├── VirtualMachineMapper.java │ │ │ │ └── hydranium/ │ │ │ │ ├── DeployMappingDriver.java │ │ │ │ ├── DeployMasterManipulatorImpl.java │ │ │ │ └── DeployMasterTreeManipulatorImpl.java │ │ │ ├── entity/ │ │ │ │ └── ibatis/ │ │ │ │ ├── GUID128TypeHandler.java │ │ │ │ ├── GUID72TypeHandler.java │ │ │ │ ├── GUIDTypeHandler.java │ │ │ │ ├── UOITypeHandler.java │ │ │ │ ├── URITypeHandler.java │ │ │ │ └── hydranium/ │ │ │ │ └── ArchMappingDriver.java │ │ │ ├── file/ │ │ │ │ └── ibatis/ │ │ │ │ ├── ExternalSymbolicMapper.java │ │ │ │ ├── FileMapper.java │ │ │ │ ├── FileMetaMapper.java │ │ │ │ ├── FileOwnerMapper.java │ │ │ │ ├── FilePathCacheMapper.java │ │ │ │ ├── FileSystemAttributeMapper.java │ │ │ │ ├── FileTreeMapper.java │ │ │ │ ├── FolderMapper.java │ │ │ │ ├── FolderMetaMapper.java │ │ │ │ ├── FolderVolumeMappingMapper.java │ │ │ │ ├── LocalClusterMapper.java │ │ │ │ ├── RemoteClusterMapper.java │ │ │ │ ├── SymbolicMapper.java │ │ │ │ ├── SymbolicMetaMapper.java │ │ │ │ └── hydranium/ │ │ │ │ ├── FileMappingDriver.java │ │ │ │ ├── FileMasterManipulatorImpl.java │ │ │ │ └── FileMasterTreeManipulatorImpl.java │ │ │ ├── layer/ │ │ │ │ └── ibatis/ │ │ │ │ ├── LayerCachePathMapper.java │ │ │ │ ├── LayerHandleMapper.java │ │ │ │ ├── LayerMapper.java │ │ │ │ ├── LayerOwnerMapper.java │ │ │ │ ├── LayerTreeMapper.java │ │ │ │ ├── NamespaceMapper.java │ │ │ │ └── hydranium/ │ │ │ │ ├── LayerMappingDriver.java │ │ │ │ ├── LayerMasterManipulatorImpl.java │ │ │ │ └── LayerMasterTreeManipulatorImpl.java │ │ │ ├── policy/ │ │ │ │ └── ibatis/ │ │ │ │ ├── PolicyFileMappingMapper.java │ │ │ │ ├── PolicyMapper.java │ │ │ │ └── hydranium/ │ │ │ │ ├── PolicyMappingDriver.java │ │ │ │ └── PolicyMasterManipulatorImpl.java │ │ │ ├── queue/ │ │ │ │ └── ibatis/ │ │ │ │ ├── AtlasExecuteQueueMapper.java │ │ │ │ ├── AtlasStratumQueueMapper.java │ │ │ │ ├── QueueExistMapper.java │ │ │ │ └── hydranium/ │ │ │ │ ├── QueueMappingDriver.java │ │ │ │ └── QueueMasterManipulatorImpl.java │ │ │ ├── registry/ │ │ │ │ └── ibatis/ │ │ │ │ ├── RegistryAttributesMapper.java │ │ │ │ ├── RegistryConfigNodeMapper.java │ │ │ │ ├── RegistryNSNodeMapper.java │ │ │ │ ├── RegistryNSNodeMetaMapper.java │ │ │ │ ├── RegistryNodeMetaMapper.java │ │ │ │ ├── RegistryNodeOwnerMapper.java │ │ │ │ ├── RegistryNodePathCacheMapper.java │ │ │ │ ├── RegistryPropertiesMapper.java │ │ │ │ ├── RegistryTextFileMapper.java │ │ │ │ ├── RegistryTreeMapper.java │ │ │ │ └── hydranium/ │ │ │ │ ├── RegistryMappingDriver.java │ │ │ │ ├── RegistryMasterManipulatorImpl.java │ │ │ │ └── RegistryMasterTreeManipulatorImpl.java │ │ │ ├── scenario/ │ │ │ │ └── ibatis/ │ │ │ │ ├── ScenarioCommonDataMapper.java │ │ │ │ ├── ScenarioNamespaceNodeMapper.java │ │ │ │ ├── ScenarioNamespaceNodeMetaMapper.java │ │ │ │ ├── ScenarioNodeOwnerMapper.java │ │ │ │ ├── ScenarioNodePathCacheMapper.java │ │ │ │ ├── ScenarioTreeMapper.java │ │ │ │ └── hydranium/ │ │ │ │ ├── ScenarioMappingDriver.java │ │ │ │ ├── ScenarioMasterManipulatorImpl.java │ │ │ │ └── ScenarioMasterTreeManipulatorImpl.java │ │ │ ├── service/ │ │ │ │ └── ibatis/ │ │ │ │ ├── AppNodeMetaMapper.java │ │ │ │ ├── ApplicationNodeMapper.java │ │ │ │ ├── NamespaceRulesMapper.java │ │ │ │ ├── ServiceInstanceMapper.java │ │ │ │ ├── ServiceMetaMapper.java │ │ │ │ ├── ServiceNamespaceMapper.java │ │ │ │ ├── ServiceNodeMapper.java │ │ │ │ ├── ServiceNodeMetaMapper.java │ │ │ │ ├── ServiceNodeOwnerMapper.java │ │ │ │ ├── ServicePathCacheMapper.java │ │ │ │ ├── ServiceTreeMapper.java │ │ │ │ └── hydranium/ │ │ │ │ ├── ServiceMappingDriver.java │ │ │ │ ├── ServiceMasterManipulatorImpl.java │ │ │ │ └── ServiceMasterTreeManipulatorImpl.java │ │ │ ├── task/ │ │ │ │ └── ibatis/ │ │ │ │ ├── AppNodeMapper.java │ │ │ │ ├── InstanceNodeMapper.java │ │ │ │ ├── TaskNamespaceMapper.java │ │ │ │ ├── TaskNodeMapper.java │ │ │ │ ├── TaskNodeOwnerMapper.java │ │ │ │ ├── TaskPathCacheMapper.java │ │ │ │ ├── TaskTreeMapper.java │ │ │ │ └── hydranium/ │ │ │ │ ├── TaskMappingDriver.java │ │ │ │ ├── TaskMasterManipulatorImpl.java │ │ │ │ └── TaskMasterTreeManipulatorImpl.java │ │ │ ├── version/ │ │ │ │ └── ibatis/ │ │ │ │ ├── VersionMapper.java │ │ │ │ ├── VersionMappingMapper.java │ │ │ │ └── hydranium/ │ │ │ │ ├── VersionMappingDriver.java │ │ │ │ └── VersionMasterManipulatorImpl.java │ │ │ └── volume/ │ │ │ └── ibatis/ │ │ │ ├── LineSegmentMapper.java │ │ │ ├── MirroredVolumeMapper.java │ │ │ ├── MountPointMapper.java │ │ │ ├── PhysicalVolumeMapper.java │ │ │ ├── PrimeLogicVolumeMapper.java │ │ │ ├── SQLiteVolumeMapper.java │ │ │ ├── SimpleVolumeMapper.java │ │ │ ├── SpannedVolumeMapper.java │ │ │ ├── StripedVolumeMapper.java │ │ │ ├── VolumeAllocateMapper.java │ │ │ ├── VolumeCachePathMapper.java │ │ │ ├── VolumeCapacityMapper.java │ │ │ ├── VolumeOwnerMapper.java │ │ │ ├── VolumeTreeMapper.java │ │ │ └── hydranium/ │ │ │ ├── VolumeMappingDriver.java │ │ │ ├── VolumeMasterManipulatorImpl.java │ │ │ └── VolumeMasterTreeManipulatorImpl.java │ │ └── resources/ │ │ └── mapper/ │ │ └── kernel/ │ │ └── task/ │ │ ├── InstanceNodeMapper.xml │ │ └── TaskNodeMapper.xml │ ├── hydra-lib-grpc-service-sdk/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ ├── java/ │ │ │ └── com/ │ │ │ └── pinecone/ │ │ │ └── hydra/ │ │ │ ├── grpc/ │ │ │ │ ├── client/ │ │ │ │ │ ├── GrpcAppointClient.java │ │ │ │ │ └── GrpcClientConfig.java │ │ │ │ └── server/ │ │ │ │ ├── GrpcAppointServer.java │ │ │ │ ├── GrpcProcess.java │ │ │ │ └── GrpcServerConfig.java │ │ │ └── service/ │ │ │ └── registry/ │ │ │ └── grpc/ │ │ │ ├── client/ │ │ │ │ └── GrpcServiceClient.java │ │ │ └── server/ │ │ │ ├── ClientMetaDataInterceptor.java │ │ │ ├── GrpcControlStreamService.java │ │ │ ├── GrpcServiceAppointServer.java │ │ │ ├── GrpcServiceClientile.java │ │ │ ├── GrpcServiceLifecycleService.java │ │ │ ├── GrpcServiceMetaService.java │ │ │ ├── GrpcSession.java │ │ │ ├── cs/ │ │ │ │ ├── ControlMessage.java │ │ │ │ ├── ControlMessageOrBuilder.java │ │ │ │ ├── ControlStreamGrpc.java │ │ │ │ └── ControlStreamOuterClass.java │ │ │ ├── iface/ │ │ │ │ ├── ServiceLifecycleImpl.java │ │ │ │ └── ServiceMetaManipulationIfaceImpl.java │ │ │ ├── lifecycle/ │ │ │ │ ├── BoolReply.java │ │ │ │ ├── BoolReplyOrBuilder.java │ │ │ │ ├── ClientIdRequest.java │ │ │ │ ├── ClientIdRequestOrBuilder.java │ │ │ │ ├── CountReply.java │ │ │ │ ├── CountReplyOrBuilder.java │ │ │ │ ├── CreateInstanceMetaRequest.java │ │ │ │ ├── CreateInstanceMetaRequestOrBuilder.java │ │ │ │ ├── EmptyReply.java │ │ │ │ ├── EmptyReplyOrBuilder.java │ │ │ │ ├── EmptyRequest.java │ │ │ │ ├── EmptyRequestOrBuilder.java │ │ │ │ ├── InstanceIdRequest.java │ │ │ │ ├── InstanceIdRequestOrBuilder.java │ │ │ │ ├── RegisterServiceReply.java │ │ │ │ ├── RegisterServiceReplyOrBuilder.java │ │ │ │ ├── RegisterServiceRequest.java │ │ │ │ ├── RegisterServiceRequestOrBuilder.java │ │ │ │ ├── ServiceIdRequest.java │ │ │ │ ├── ServiceIdRequestOrBuilder.java │ │ │ │ ├── ServiceLifecycleGrpc.java │ │ │ │ └── ServiceLifecycleProto.java │ │ │ └── meta/ │ │ │ ├── ClientIdRequest.java │ │ │ ├── ClientIdRequestOrBuilder.java │ │ │ ├── CreateNewServiceRequest.java │ │ │ ├── CreateNewServiceRequestOrBuilder.java │ │ │ ├── EvalRequest.java │ │ │ ├── EvalRequestOrBuilder.java │ │ │ ├── GuidRequest.java │ │ │ ├── GuidRequestOrBuilder.java │ │ │ ├── PathRequest.java │ │ │ ├── PathRequestOrBuilder.java │ │ │ ├── ServiceIdRequest.java │ │ │ ├── ServiceIdRequestOrBuilder.java │ │ │ ├── ServiceMetaDTO.java │ │ │ ├── ServiceMetaDTOListReply.java │ │ │ ├── ServiceMetaDTOListReplyOrBuilder.java │ │ │ ├── ServiceMetaDTOOrBuilder.java │ │ │ ├── ServiceMetaDTOReply.java │ │ │ ├── ServiceMetaDTOReplyOrBuilder.java │ │ │ ├── ServiceMetaGrpc.java │ │ │ ├── ServiceMetaProto.java │ │ │ ├── StringReply.java │ │ │ └── StringReplyOrBuilder.java │ │ └── proto/ │ │ ├── control_stream.proto │ │ ├── service_lifecycle.proto │ │ └── service_meta.proto │ ├── hydra-lib-thrift-sdk/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── com/ │ │ │ │ └── pinecone/ │ │ │ │ └── hydra/ │ │ │ │ └── thrift/ │ │ │ │ ├── GenericThriftServiceRegistry.java │ │ │ │ ├── MCConnectionArguments.java │ │ │ │ ├── SharedConnectionArguments.java │ │ │ │ ├── ThriftServiceRegistry.java │ │ │ │ ├── client/ │ │ │ │ │ ├── GenericMultiplexedThriftClient.java │ │ │ │ │ ├── GenericThriftClient.java │ │ │ │ │ ├── MultiplexedThriftClient.java │ │ │ │ │ └── ThriftClient.java │ │ │ │ ├── server/ │ │ │ │ │ ├── GenericThriftServer.java │ │ │ │ │ ├── MultiplexedServer.java │ │ │ │ │ ├── ServerConnectArguments.java │ │ │ │ │ ├── ServerConnectionArguments.java │ │ │ │ │ └── ThriftServer.java │ │ │ │ └── service/ │ │ │ │ ├── HelloWorldService.java │ │ │ │ └── impl/ │ │ │ │ └── HelloWorldServiceImpl.java │ │ │ └── resources/ │ │ │ └── thrift/ │ │ │ └── hellow.thrift │ │ └── test/ │ │ └── java/ │ │ ├── com/ │ │ │ └── thrift/ │ │ │ ├── TestThriftClient.java │ │ │ └── TestThriftService.java │ │ └── org/ │ │ └── example/ │ │ └── AppTest.java │ ├── hydra-lib-uofs-cache/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ └── java/ │ │ │ └── com/ │ │ │ └── pinecone/ │ │ │ └── hydra/ │ │ │ └── storage/ │ │ │ └── file/ │ │ │ └── UOFSCacheComponentor.java │ │ └── test/ │ │ └── java/ │ │ └── org/ │ │ └── example/ │ │ └── AppTest.java │ ├── hydra-message-broadcast/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── pinecone/ │ │ └── hydra/ │ │ └── umb/ │ │ ├── UMBBytesDecoder.java │ │ ├── UMBClientException.java │ │ ├── UMBHead.java │ │ ├── UMBPHeadV1.java │ │ ├── UMBServiceException.java │ │ ├── UMCPackageMessageEncoder.java │ │ ├── UlfMBInformMessage.java │ │ ├── UlfPackageMessageEncoder.java │ │ ├── UlfPackageMessageHandler.java │ │ ├── broadcast/ │ │ │ ├── ArchUnidirectionalMCProtocol.java │ │ │ ├── BroadcastConsumer.java │ │ │ ├── BroadcastControlAgent.java │ │ │ ├── BroadcastControlConsumer.java │ │ │ ├── BroadcastControlNode.java │ │ │ ├── BroadcastControlProducer.java │ │ │ ├── BroadcastNode.java │ │ │ ├── BroadcastPollConsumer.java │ │ │ ├── BroadcastProducer.java │ │ │ ├── DistributedConsumer.java │ │ │ ├── GenericUNT.java │ │ │ ├── PollResult.java │ │ │ ├── PushConsumer.java │ │ │ ├── UMCBroadcastConsumer.java │ │ │ ├── UMCBroadcastNode.java │ │ │ ├── UMCBroadcastProducer.java │ │ │ ├── UNT.java │ │ │ ├── converter/ │ │ │ │ ├── GenericResultBytesConverter.java │ │ │ │ └── ResultBytesConverter.java │ │ │ └── proxy/ │ │ │ ├── GenericIfaceProxyFactory.java │ │ │ └── IfaceProxyFactory.java │ │ ├── kafka/ │ │ │ ├── KBroadcastPollConsumer.java │ │ │ ├── KBroadcastProducer.java │ │ │ ├── KClient.java │ │ │ ├── KConfig.java │ │ │ ├── KafkaClient.java │ │ │ ├── KafkaConfig.java │ │ │ ├── KafkaConstants.java │ │ │ ├── KafkaMedium.java │ │ │ ├── KafkaPollResult.java │ │ │ ├── KafkaReceiver.java │ │ │ ├── KafkaTransmit.java │ │ │ ├── UlfBroadcastPollConsumer.java │ │ │ ├── UlfBroadcastProducer.java │ │ │ ├── UlfKafkaClient.java │ │ │ ├── WolfKafkaConsumer.java │ │ │ ├── WolfKafkaProducer.java │ │ │ └── WolfMCKafkaClient.java │ │ ├── rabbit/ │ │ │ ├── RabbitMQClient.java │ │ │ └── RabbitMedium.java │ │ ├── rocket/ │ │ │ ├── ArchMQConsumer.java │ │ │ ├── RocketClient.java │ │ │ ├── RocketConfig.java │ │ │ ├── RocketConstants.java │ │ │ ├── RocketMQClient.java │ │ │ ├── RocketMQConfig.java │ │ │ ├── RocketMedium.java │ │ │ ├── RocketReceiver.java │ │ │ ├── RocketTransmit.java │ │ │ ├── UlfBroadcastProducer.java │ │ │ ├── UlfPushConsumer.java │ │ │ ├── UlfRocketClient.java │ │ │ ├── WolfBroadcastProducer.java │ │ │ ├── WolfMCRocketClient.java │ │ │ └── WolfPushConsumer.java │ │ └── wolf/ │ │ ├── ArchBroadcastControlAgent.java │ │ ├── ArchUlfBroadcastControlAgent.java │ │ ├── UlfBroadcastControlAgent.java │ │ ├── UlfBroadcastControlNode.java │ │ ├── UlfBroadcastControlProducer.java │ │ ├── WolfMCBClient.java │ │ ├── WolfMCBConsumer.java │ │ └── WolfMCBProducer.java │ ├── hydra-message-control/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ └── java/ │ │ │ └── com/ │ │ │ └── pinecone/ │ │ │ └── hydra/ │ │ │ ├── appoints/ │ │ │ │ └── AppointNodus.java │ │ │ ├── uma/ │ │ │ │ ├── ActingDuplexExpress.java │ │ │ │ ├── AppointClient.java │ │ │ │ ├── AppointNode.java │ │ │ │ ├── AppointServer.java │ │ │ │ ├── ArchAppointNode.java │ │ │ │ ├── ArchDuplexExpress.java │ │ │ │ ├── ArchUlfAppointNode.java │ │ │ │ ├── AsynMsgHandler.java │ │ │ │ ├── AsynReturnHandler.java │ │ │ │ ├── DuplexAppointClient.java │ │ │ │ ├── DuplexAppointNode.java │ │ │ │ ├── DuplexAppointServer.java │ │ │ │ ├── HuskyDuplexExpress.java │ │ │ │ ├── UlfAppointClient.java │ │ │ │ ├── UlfAppointNode.java │ │ │ │ ├── UlfAppointServer.java │ │ │ │ ├── UlfDuplexAppointClient.java │ │ │ │ ├── UlfDuplexAppointServer.java │ │ │ │ ├── pool/ │ │ │ │ │ └── GenericMultiClientChannelRegistry.java │ │ │ │ ├── proxy/ │ │ │ │ │ ├── GenericIfaceProxyFactory.java │ │ │ │ │ ├── GenericPassiveClientIfaceProxyFactory.java │ │ │ │ │ ├── IfaceProxyFactory.java │ │ │ │ │ └── PassiveClientIfaceProxyFactory.java │ │ │ │ └── wolf/ │ │ │ │ ├── WolfAppointClient.java │ │ │ │ ├── WolfAppointHelper.java │ │ │ │ ├── WolfAppointServer.java │ │ │ │ ├── WolvesAppointClient.java │ │ │ │ └── WolvesAppointServer.java │ │ │ ├── umc/ │ │ │ │ ├── io/ │ │ │ │ │ ├── ChannelInputStream.java │ │ │ │ │ ├── ChannelOutputStream.java │ │ │ │ │ ├── IOCounter.java │ │ │ │ │ └── IOLoadBalanceStrategy.java │ │ │ │ ├── msg/ │ │ │ │ │ ├── AbstractUMCHead.java │ │ │ │ │ ├── ArchBytesTransferMessage.java │ │ │ │ │ ├── ArchInformCMessage.java │ │ │ │ │ ├── ArchInformMessage.java │ │ │ │ │ ├── ArchStreamTransferMessage.java │ │ │ │ │ ├── ArchUMCMessage.java │ │ │ │ │ ├── ArchUMCProtocol.java │ │ │ │ │ ├── ArchUMCReceiver.java │ │ │ │ │ ├── ArchUMCTransmit.java │ │ │ │ │ ├── AsynChannelAllocator.java │ │ │ │ │ ├── AsyncMessenger.java │ │ │ │ │ ├── AsyncMessengerChannelControlBlock.java │ │ │ │ │ ├── AsyncMsgHandleAdapter.java │ │ │ │ │ ├── CascadeMessageNode.java │ │ │ │ │ ├── ChannelAllocateException.java │ │ │ │ │ ├── ChannelControlBlock.java │ │ │ │ │ ├── ChannelHandleException.java │ │ │ │ │ ├── ChannelPool.java │ │ │ │ │ ├── ChannelStatus.java │ │ │ │ │ ├── EMCBytesDecoder.java │ │ │ │ │ ├── EMCHead.java │ │ │ │ │ ├── ExtraEncode.java │ │ │ │ │ ├── FairChannelPool.java │ │ │ │ │ ├── GenericEMCBytesDecoder.java │ │ │ │ │ ├── IdleFirstBalanceStrategy.java │ │ │ │ │ ├── InformMessage.java │ │ │ │ │ ├── MappedChannelPool.java │ │ │ │ │ ├── Medium.java │ │ │ │ │ ├── MediumTerminationException.java │ │ │ │ │ ├── Message.java │ │ │ │ │ ├── MessageNode.java │ │ │ │ │ ├── MessageNodus.java │ │ │ │ │ ├── MessageStereotypes.java │ │ │ │ │ ├── Messagus.java │ │ │ │ │ ├── Messenger.java │ │ │ │ │ ├── MessengerChannelControlBlock.java │ │ │ │ │ ├── MsgNodeConfig.java │ │ │ │ │ ├── MsgProtocol.java │ │ │ │ │ ├── MultiClientChannelRegistry.java │ │ │ │ │ ├── Recipient.java │ │ │ │ │ ├── RecipientChannelControlBlock.java │ │ │ │ │ ├── RegisterChannelPool.java │ │ │ │ │ ├── Status.java │ │ │ │ │ ├── StreamTerminateException.java │ │ │ │ │ ├── SyncFairChannelPool.java │ │ │ │ │ ├── TransferMessage.java │ │ │ │ │ ├── UMCCHead.java │ │ │ │ │ ├── UMCCHeadV1.java │ │ │ │ │ ├── UMCChannel.java │ │ │ │ │ ├── UMCConstants.java │ │ │ │ │ ├── UMCException.java │ │ │ │ │ ├── UMCHead.java │ │ │ │ │ ├── UMCHeadV1.java │ │ │ │ │ ├── UMCMessage.java │ │ │ │ │ ├── UMCMethod.java │ │ │ │ │ ├── UMCProtocol.java │ │ │ │ │ ├── UMCReceiver.java │ │ │ │ │ ├── UMCServiceException.java │ │ │ │ │ ├── UMCTransmit.java │ │ │ │ │ ├── event/ │ │ │ │ │ │ ├── ChannelDataInterceptor.java │ │ │ │ │ │ ├── ChannelEventHandler.java │ │ │ │ │ │ └── ChannelInactiveHandler.java │ │ │ │ │ ├── extra/ │ │ │ │ │ │ ├── ExtraHeadCoder.java │ │ │ │ │ │ ├── ExtraHeadDecoder.java │ │ │ │ │ │ ├── ExtraHeadEncoder.java │ │ │ │ │ │ ├── ExtraHeadMarshalingException.java │ │ │ │ │ │ ├── GenericExtraHeadCoder.java │ │ │ │ │ │ ├── GenericExtraHeadDecoder.java │ │ │ │ │ │ └── GenericExtraHeadEncoder.java │ │ │ │ │ └── handler/ │ │ │ │ │ ├── ErrorMessageAudit.java │ │ │ │ │ ├── GenericErrorMessageAudit.java │ │ │ │ │ └── WrappedErrorMessageException.java │ │ │ │ ├── vita/ │ │ │ │ │ ├── HeartbeatControl.java │ │ │ │ │ └── HeartbeatFeedbackor.java │ │ │ │ └── wolf/ │ │ │ │ ├── ArchChannelControlBlock.java │ │ │ │ ├── ArchChannelPool.java │ │ │ │ ├── ArchUMCChannel.java │ │ │ │ ├── AsyncUlfMedium.java │ │ │ │ ├── ChannelUtils.java │ │ │ │ ├── GenericUMCByteMessageDecoder.java │ │ │ │ ├── InternalErrors.java │ │ │ │ ├── MCConnectionArguments.java │ │ │ │ ├── MCSecurityAuthentication.java │ │ │ │ ├── MCSecurityToken.java │ │ │ │ ├── NettyChannelControlBlock.java │ │ │ │ ├── NettyUMCChannel.java │ │ │ │ ├── SharedConnectionArguments.java │ │ │ │ ├── StandardRemoteUserAuthentication.java │ │ │ │ ├── UlfAsyncMsgHandleAdapter.java │ │ │ │ ├── UlfBytesTransferMessage.java │ │ │ │ ├── UlfChannel.java │ │ │ │ ├── UlfChannelStatus.java │ │ │ │ ├── UlfIOLoadBalanceStrategy.java │ │ │ │ ├── UlfIdleFirstBalanceStrategy.java │ │ │ │ ├── UlfInformMessage.java │ │ │ │ ├── UlfInstructMessage.java │ │ │ │ ├── UlfMCReceiver.java │ │ │ │ ├── UlfMCTransmit.java │ │ │ │ ├── UlfMessageNode.java │ │ │ │ ├── UlfMessageStereotypes.java │ │ │ │ ├── UlfStreamTransferMessage.java │ │ │ │ ├── UnsetUlfAsyncMsgHandleAdapter.java │ │ │ │ ├── WolfMCInitializationException.java │ │ │ │ ├── WolfMCNode.java │ │ │ │ ├── WolfMCServiceException.java │ │ │ │ ├── WolfMCStandardConstants.java │ │ │ │ ├── WolfNettyServgram.java │ │ │ │ ├── client/ │ │ │ │ │ ├── ArchAsyncMessenger.java │ │ │ │ │ ├── ClientConnectArguments.java │ │ │ │ │ ├── ClientConnectionArguments.java │ │ │ │ │ ├── MessengerNettyChannelControlBlock.java │ │ │ │ │ ├── ProactiveParallelFairChannelPool.java │ │ │ │ │ ├── ProactiveParallelFairSyncChannelPool.java │ │ │ │ │ ├── UlfAsyncMessengerChannelControlBlock.java │ │ │ │ │ ├── UlfClient.java │ │ │ │ │ └── WolfMCClient.java │ │ │ │ └── server/ │ │ │ │ ├── AbstractTimerTask.java │ │ │ │ ├── IdleChannelTimerTask.java │ │ │ │ ├── PassiveRegisterChannelPool.java │ │ │ │ ├── RecipientNettyChannelControlBlock.java │ │ │ │ ├── ServerConnectArguments.java │ │ │ │ ├── ServerConnectionArguments.java │ │ │ │ ├── UlfRecipientChannelControlBlock.java │ │ │ │ ├── UlfServer.java │ │ │ │ └── WolfMCServer.java │ │ │ └── umct/ │ │ │ ├── AddressMapping.java │ │ │ ├── ArchMessagelet.java │ │ │ ├── ArchMessagram.java │ │ │ ├── ArchMsgDeliver.java │ │ │ ├── ArchMsgExpress.java │ │ │ ├── ArchUMCConnection.java │ │ │ ├── DenialServiceException.java │ │ │ ├── DuplexExpress.java │ │ │ ├── GenericMessagramScanner.java │ │ │ ├── IlleagalResponseException.java │ │ │ ├── IntegratedMessagram.java │ │ │ ├── InvokeEntity.java │ │ │ ├── JSONLetMsgDeliver.java │ │ │ ├── MessageDeliver.java │ │ │ ├── MessageExpress.java │ │ │ ├── MessageHandler.java │ │ │ ├── MessageJunction.java │ │ │ ├── Messagelet.java │ │ │ ├── Messagram.java │ │ │ ├── MessagramScanner.java │ │ │ ├── ProtoletMsgDeliver.java │ │ │ ├── ServiceException.java │ │ │ ├── ServiceInternalException.java │ │ │ ├── UMCConnection.java │ │ │ ├── UMCTExpress.java │ │ │ ├── UMCTExpressHandler.java │ │ │ ├── UMCTNode.java │ │ │ ├── UlfConnection.java │ │ │ ├── UlfMessageHandler.java │ │ │ ├── WolfMCExpress.java │ │ │ ├── bind/ │ │ │ │ └── ArgParam.java │ │ │ ├── decipher/ │ │ │ │ ├── HeaderDecipher.java │ │ │ │ ├── JSONHeaderDecipher.java │ │ │ │ └── PrototypeDecipher.java │ │ │ ├── husky/ │ │ │ │ ├── AddressedEntity.java │ │ │ │ ├── ArchAddressedEntity.java │ │ │ │ ├── ArchRequestPackage.java │ │ │ │ ├── ArchResponsePackage.java │ │ │ │ ├── HuskyCTPConstants.java │ │ │ │ ├── HuskyServiceErrorMessages.java │ │ │ │ ├── Interceptor.java │ │ │ │ ├── MessagePackage.java │ │ │ │ ├── RequestPackage.java │ │ │ │ ├── ResponsePackage.java │ │ │ │ ├── compiler/ │ │ │ │ │ ├── ArchIfaceCompiler.java │ │ │ │ │ ├── ArchIfaceInspector.java │ │ │ │ │ ├── ArchProtoIfaceCompiler.java │ │ │ │ │ ├── BytecodeIfaceCompiler.java │ │ │ │ │ ├── ClassDigest.java │ │ │ │ │ ├── CompileException.java │ │ │ │ │ ├── CompilerEncoder.java │ │ │ │ │ ├── DigestIfaceCompiler.java │ │ │ │ │ ├── DynamicMethodPrototype.java │ │ │ │ │ ├── GenericClassDigest.java │ │ │ │ │ ├── GenericCompilerEncoder.java │ │ │ │ │ ├── GenericIfaceInspector.java │ │ │ │ │ ├── GenericIfaceMappingDigest.java │ │ │ │ │ ├── GenericIfaceParamsDigest.java │ │ │ │ │ ├── GenericMethodDigest.java │ │ │ │ │ ├── IfaceCompiler.java │ │ │ │ │ ├── IfaceInspector.java │ │ │ │ │ ├── IfaceMappingDigest.java │ │ │ │ │ ├── IfaceParamsDigest.java │ │ │ │ │ ├── InterfacialCompiler.java │ │ │ │ │ ├── MethodDigest.java │ │ │ │ │ ├── MethodPrototype.java │ │ │ │ │ ├── ProtoIfaceCompiler.java │ │ │ │ │ └── ProtoInterfacialCompiler.java │ │ │ │ ├── function/ │ │ │ │ │ ├── ArgumentRequest.java │ │ │ │ │ ├── FunctionMold.java │ │ │ │ │ ├── GenericArgumentRequest.java │ │ │ │ │ ├── GenericFunctionMold.java │ │ │ │ │ ├── GenericReturnResponse.java │ │ │ │ │ ├── MethodTemplates.java │ │ │ │ │ └── ReturnResponse.java │ │ │ │ ├── heartbeat/ │ │ │ │ │ ├── HeartbeatConstants.java │ │ │ │ │ ├── HuskyHeartbeatControl.java │ │ │ │ │ └── HuskyHeartbeatFeedbackor.java │ │ │ │ └── machinery/ │ │ │ │ ├── ArchRouteDispatcher.java │ │ │ │ ├── DigestContextMachinery.java │ │ │ │ ├── DigestMappingLoader.java │ │ │ │ ├── DigestTransformer.java │ │ │ │ ├── ExcludeDigestMappingFilters.java │ │ │ │ ├── HuskyContextMachinery.java │ │ │ │ ├── HuskyMappingLoader.java │ │ │ │ ├── HuskyMappingScopeSet.java │ │ │ │ ├── HuskyRouteDispatcher.java │ │ │ │ ├── HuskyRouteDispatcherFabricator.java │ │ │ │ ├── HuskyTransformer.java │ │ │ │ ├── MCTContextMachinery.java │ │ │ │ ├── MCTTransformer.java │ │ │ │ ├── MultiMappingLoader.java │ │ │ │ ├── PMCTContextMachinery.java │ │ │ │ ├── PMCTTransformer.java │ │ │ │ ├── ProtoRouteDispatcher.java │ │ │ │ └── RouteDispatcher.java │ │ │ ├── lets/ │ │ │ │ ├── MessageServiceScanner.java │ │ │ │ ├── MessageletScanner.java │ │ │ │ └── MsgService.java │ │ │ ├── mapping/ │ │ │ │ ├── ArchMappingInspector.java │ │ │ │ ├── BytecodeControllerInspector.java │ │ │ │ ├── ControllerInspector.java │ │ │ │ ├── GenericMappingDigest.java │ │ │ │ ├── GenericParamsDigest.java │ │ │ │ ├── InspectException.java │ │ │ │ ├── MappingDigest.java │ │ │ │ ├── MappingInspector.java │ │ │ │ ├── MethodDigestUtils.java │ │ │ │ └── ParamsDigest.java │ │ │ ├── proxy/ │ │ │ │ └── UMCTHub.java │ │ │ └── stereotype/ │ │ │ ├── Controller.java │ │ │ ├── Iface.java │ │ │ └── IfaceUtils.java │ │ └── test/ │ │ └── java/ │ │ └── com/ │ │ └── umc/ │ │ └── TestUMCC.java │ ├── hydra-service-control/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── pinecone/ │ │ └── hydra/ │ │ └── service/ │ │ └── registry/ │ │ ├── ClientServiceRegisterException.java │ │ ├── ServiceControlException.java │ │ ├── ServiceControlRPCException.java │ │ ├── ServiceInstanceCreationException.java │ │ ├── ServiceValidationException.java │ │ ├── UniformService.java │ │ ├── WolfServiceInstance.java │ │ ├── appoint/ │ │ │ ├── ServiceAppointServer.java │ │ │ └── ServiceClientile.java │ │ ├── client/ │ │ │ ├── ArchServiceClient.java │ │ │ ├── HuskyServiceClient.java │ │ │ └── ServiceClient.java │ │ ├── constant/ │ │ │ ├── ServiceStatus.java │ │ │ └── ServiceVitalizationStatus.java │ │ ├── dto/ │ │ │ ├── ApplicationMetaDTO.java │ │ │ ├── RegisterServiceDTO.java │ │ │ └── ServiceMetaDTO.java │ │ ├── event/ │ │ │ ├── ServiceRegisterEvent.java │ │ │ └── ServiceRegisterEventHandler.java │ │ ├── server/ │ │ │ ├── ServiceEventHooker.java │ │ │ ├── ServiceLifecycleIface.java │ │ │ ├── ServiceLifecycleService.java │ │ │ ├── ServiceManager.java │ │ │ ├── ServiceMetaManipulationIface.java │ │ │ ├── ServiceMetaService.java │ │ │ ├── UniformServiceEventHooker.java │ │ │ └── UniformServiceManager.java │ │ └── ulf/ │ │ ├── HuskyServiceAppointServer.java │ │ ├── HuskyServiceClientile.java │ │ ├── ServiceLifecycleController.java │ │ └── ServiceMetaController.java │ ├── hydra-system-reign/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── pinecone/ │ │ └── hydra/ │ │ └── reign/ │ │ └── UnixInstitutionalizedMetaImperiumPrivy.java │ ├── hydra-system-tritium/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── com/ │ │ │ │ └── pinecone/ │ │ │ │ └── tritium/ │ │ │ │ ├── ConfigConstants.java │ │ │ │ ├── MasterServgramOrchestrator.java │ │ │ │ ├── Tritium.java │ │ │ │ ├── ally/ │ │ │ │ │ ├── IndexableManager.java │ │ │ │ │ ├── messengers/ │ │ │ │ │ │ └── MessagersManager.java │ │ │ │ │ └── rdb/ │ │ │ │ │ ├── DruidDataSourceFactory.java │ │ │ │ │ ├── GenericIbatisClient.java │ │ │ │ │ ├── RDBManager.java │ │ │ │ │ └── UniformRDBClient.java │ │ │ │ ├── messagron/ │ │ │ │ │ ├── HeistMessage.java │ │ │ │ │ ├── Messageletson.java │ │ │ │ │ └── Messagron.java │ │ │ │ ├── system/ │ │ │ │ │ ├── BasicServer.java │ │ │ │ │ ├── ConfigScope.java │ │ │ │ │ ├── Hierarchy.java │ │ │ │ │ ├── InterWareDirector.java │ │ │ │ │ ├── KnittedMiddlewareDirector.java │ │ │ │ │ ├── MissionTerminateException.java │ │ │ │ │ ├── NomenclatureAllocator.java │ │ │ │ │ ├── ServersScope.java │ │ │ │ │ ├── StorageSystem.java │ │ │ │ │ ├── SystemDaemon.java │ │ │ │ │ ├── TritiumConfigScope.java │ │ │ │ │ └── TritiumSystem.java │ │ │ │ └── util/ │ │ │ │ └── ConfigHelper.java │ │ │ └── resources/ │ │ │ └── logback.xml │ │ └── test/ │ │ └── java/ │ │ └── com/ │ │ ├── major/ │ │ │ └── TestTritium.java │ │ ├── mc/ │ │ │ ├── JesusChrist.java │ │ │ ├── TestMCClient.java │ │ │ └── TestMCServer.java │ │ ├── protobuf/ │ │ │ ├── Bear.java │ │ │ ├── Beaver.java │ │ │ ├── Monkey.java │ │ │ ├── Parasite.java │ │ │ ├── Rabbit.java │ │ │ ├── Raccoon.java │ │ │ ├── RaccoonController.java │ │ │ ├── RaccoonKing.java │ │ │ ├── RedBeaver.java │ │ │ ├── RedRaccoon.java │ │ │ ├── Rpc.java │ │ │ ├── RpcRequest.java │ │ │ ├── RpcRequestOrBuilder.java │ │ │ ├── RpcResponse.java │ │ │ ├── RpcResponseOrBuilder.java │ │ │ ├── Slave.java │ │ │ ├── TestKafkaClient.java │ │ │ ├── TestProtobuf.java │ │ │ ├── TestRPCSystem.java │ │ │ ├── TestRocketClient.java │ │ │ ├── rpc.proto │ │ │ └── v3/ │ │ │ ├── Rpc.java │ │ │ ├── RpcRequest.java1 │ │ │ ├── RpcRequestOrBuilder.java │ │ │ ├── RpcResponse.java1 │ │ │ └── RpcResponseOrBuilder.java │ │ ├── springram/ │ │ │ └── TestSpringram.java │ │ └── utils/ │ │ ├── TestSchemeQuerier.java │ │ └── TestVFS.java │ └── pom.xml ├── LICENSE ├── Messenger/ │ ├── Messenger.iml │ ├── pom.xml │ └── src/ │ ├── main/ │ │ └── java/ │ │ └── com/ │ │ └── genius/ │ │ ├── App.java │ │ ├── common/ │ │ │ └── UlfUMC/ │ │ │ ├── CommonMessageBuilder.java │ │ │ ├── ErrorMessageBuilder.java │ │ │ ├── MessageBuilder.java │ │ │ ├── MessageFactory.java │ │ │ ├── SlaveMessageBuilder.java │ │ │ ├── UlfUMCBody.java │ │ │ ├── UlfUMCMessage.java │ │ │ ├── UlfUMCMessageException.java │ │ │ ├── UlfUMCMessageType.java │ │ │ └── UlfUMCProtocol.java │ │ ├── config/ │ │ │ └── MessageConverterConfig.java │ │ └── pool/ │ │ ├── FunctionNamePool.java │ │ └── MqPool.java │ └── test/ │ └── java/ │ └── com/ │ └── genius/ │ └── AppTest.java ├── Odin/ │ ├── odin-architecture/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── walnut/ │ │ └── odin/ │ │ ├── atlas/ │ │ │ ├── advance/ │ │ │ │ ├── GraphAdvancer.java │ │ │ │ ├── GraphStratumAdvancer.java │ │ │ │ └── GraphStratumTape.java │ │ │ └── graph/ │ │ │ └── RuntimeAtlasInstrument.java │ │ ├── conduct/ │ │ │ ├── CollectiveTaskLegionary.java │ │ │ ├── CollectiveTaskRegiment.java │ │ │ ├── ProcessorDeployManager.java │ │ │ ├── RegimentException.java │ │ │ ├── entity/ │ │ │ │ ├── InstanceAtlasAdjacent.java │ │ │ │ ├── InstanceAtlasNode.java │ │ │ │ ├── InstanceEvent.java │ │ │ │ ├── InstanceExec.java │ │ │ │ ├── LaunchedContext.java │ │ │ │ ├── RegimentJoinRequest.java │ │ │ │ └── RegimentJoinResponse.java │ │ │ └── schedule/ │ │ │ ├── InstanceScheduleAllocator.java │ │ │ ├── InstanceScheduleImpetus.java │ │ │ ├── TaskSchedulePreparator.java │ │ │ ├── UniformTaskScheduler.java │ │ │ └── entity/ │ │ │ ├── ConcurrentQuota.java │ │ │ ├── DepartureChecklist.java │ │ │ └── ScheduleFittingContext.java │ │ ├── dispatch/ │ │ │ ├── ArchTaskExecutionI32Queue.java │ │ │ ├── ConsumeCompromisedPolice.java │ │ │ ├── DispatchStrategy.java │ │ │ ├── PipelineLaunchReport.java │ │ │ ├── QueueBadAllocatedException.java │ │ │ ├── TaskConsumeException.java │ │ │ ├── TaskDispatchException.java │ │ │ ├── TaskDispatcher.java │ │ │ ├── TaskExecutionProcessor.java │ │ │ ├── TaskExecutionQueue.java │ │ │ ├── TaskInstanceConsumer.java │ │ │ ├── TaskLaunchContext.java │ │ │ ├── TaskQueueMeta.java │ │ │ └── entity/ │ │ │ ├── ArchTaskQueueMeta.java │ │ │ ├── GenericTaskProcessorEntity.java │ │ │ ├── GenericTaskQueueEntity.java │ │ │ └── TaskProcessorEntity.java │ │ ├── proc/ │ │ │ ├── ProcessLifecycleExaminer.java │ │ │ ├── ProcessRemoteEventHandler.java │ │ │ ├── RemoteProcess.java │ │ │ ├── RemoteProcessLifecycleException.java │ │ │ ├── RemoteProcessManagerNode.java │ │ │ ├── RemoteProcessServiceException.java │ │ │ ├── RemoteProcessServiceRPCException.java │ │ │ ├── RemoteTerminationStatus.java │ │ │ ├── RemoteVitalizationStatus.java │ │ │ ├── client/ │ │ │ │ └── RemoteProcessManagerClient.java │ │ │ ├── entity/ │ │ │ │ ├── RemoteTerminationReport.java │ │ │ │ ├── RemoteVitalizationResponse.java │ │ │ │ ├── UProcessMirrorDTO.java │ │ │ │ └── UProcessRuntimeMeta.java │ │ │ └── server/ │ │ │ └── RemoteProcessManagerServer.java │ │ ├── system/ │ │ │ ├── RavenException.java │ │ │ ├── RavenRuntimeException.java │ │ │ └── TaskCentralControl.java │ │ └── task/ │ │ ├── CentralizedTaskInstrument.java │ │ ├── GenericRavenTaskConfig.java │ │ ├── RavenTask.java │ │ ├── RavenTaskConfig.java │ │ ├── RavenTaskConstants.java │ │ ├── RavenTaskInstance.java │ │ ├── dto/ │ │ │ └── CategoryTag.java │ │ ├── entity/ │ │ │ └── pyramid/ │ │ │ ├── Category.java │ │ │ ├── CategoryType.java │ │ │ └── TaskCategory.java │ │ ├── mapper/ │ │ │ ├── InstanceAtlasAdjacentMapper.java │ │ │ ├── InstanceAtlasNodeMapper.java │ │ │ ├── InstanceEventMapper.java │ │ │ ├── InstanceExecMapper.java │ │ │ └── OdinTaskMappingDriver.java │ │ ├── service/ │ │ │ └── CategoryService.java │ │ ├── source/ │ │ │ ├── CategoryMappingManipulator.java │ │ │ ├── CategoryTypeManipulator.java │ │ │ ├── RavenTaskMasterManipulator.java │ │ │ ├── ScheduleManipulator.java │ │ │ ├── TaskCategoryManipulator.java │ │ │ └── TaskProcessorManipulator.java │ │ ├── system/ │ │ │ └── TaskPathInvalidException.java │ │ └── troll/ │ │ ├── InstanceLaunchException.java │ │ ├── LaunchException.java │ │ ├── LaunchFeature.java │ │ └── TaskExecutionLauncher.java │ ├── odin-framework-atlas/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ ├── java/ │ │ │ └── com/ │ │ │ └── walnut/ │ │ │ └── odin/ │ │ │ └── atlas/ │ │ │ ├── advance/ │ │ │ │ ├── GenericGraphStratumTape.java │ │ │ │ ├── GenericQueueEntity.java │ │ │ │ ├── GenericTapedBFSGraphAdvancer.java │ │ │ │ ├── QueueEntity.java │ │ │ │ ├── TapedBFSGraphStratumAdvancer.java │ │ │ │ └── strategy/ │ │ │ │ ├── AtlasPriorityProcessStrategy.java │ │ │ │ ├── GraphPriorityProcessStrategy.java │ │ │ │ ├── MegaInDegreeFirstStrategy.java │ │ │ │ └── PriorityProcessStrategy.java │ │ │ ├── flow/ │ │ │ │ ├── AnalyzeStage.java │ │ │ │ ├── ConductFlow.java │ │ │ │ ├── ConductStage.java │ │ │ │ ├── MarshallingStage.java │ │ │ │ └── OptimizationStage.java │ │ │ ├── graph/ │ │ │ │ ├── UniformRuntimeAtlas.java │ │ │ │ └── entity/ │ │ │ │ ├── TaskAtlasNode.java │ │ │ │ └── TaskGraphNode.java │ │ │ └── mapper/ │ │ │ ├── QueueStratumManipulator.java │ │ │ ├── QueueStratumMapper.java │ │ │ ├── RunAtlasMasterManipulator.java │ │ │ ├── RuntimeVGraphMapper.java │ │ │ ├── RuntimeVectorGraphPathCacheMapper.java │ │ │ └── TaskGraphManipulator.java │ │ └── resources/ │ │ └── mapper/ │ │ └── kernel/ │ │ └── task/ │ │ ├── InstanceAtlasAdjacentMapper.xml │ │ ├── InstanceAtlasNodeMapper.xml │ │ ├── InstanceEventMapper.xml │ │ ├── InstanceManipulator.xml │ │ ├── QueueStratumMapper.xml │ │ └── RuntimeVGraphMapper.xml │ ├── odin-framework-conduct/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── walnut/ │ │ └── odin/ │ │ ├── conduct/ │ │ │ ├── ProcessorLifecycleController.java │ │ │ ├── ProcessorLifecycleIface.java │ │ │ ├── RavenCollectiveTaskLegionary.java │ │ │ ├── RavenCollectiveTaskRegiment.java │ │ │ ├── RavenProcessorDeployManager.java │ │ │ ├── dag/ │ │ │ │ ├── ConfigurableTaskGraphOrchestratorConfig.java │ │ │ │ ├── ExecuteCallBack.java │ │ │ │ ├── RavenTaskGraphOrchestrator.java │ │ │ │ ├── TaskExecuteCallBack.java │ │ │ │ ├── TaskExertium.java │ │ │ │ ├── TaskGraphOrchestrator.java │ │ │ │ ├── TaskGraphOrchestratorConfig.java │ │ │ │ └── TaskGraphOrchestratorConstants.java │ │ │ ├── entity/ │ │ │ │ ├── GenericInstanceAtlasAdjacent.java │ │ │ │ ├── GenericInstanceAtlasNode.java │ │ │ │ ├── GenericInstanceEvent.java │ │ │ │ └── GenericInstanceExec.java │ │ │ └── schedule/ │ │ │ ├── InstanceAtlasNodeManipular.java │ │ │ ├── RavenInstanceScheduleImpetus.java │ │ │ ├── RavenScheduleAllocator.java │ │ │ ├── RavenTaskSchedulePreparator.java │ │ │ ├── RavenTaskScheduler.java │ │ │ └── ScheduleCronHelper.java │ │ ├── dispatch/ │ │ │ ├── AdaptiveCapacityDispatchStrategy.java │ │ │ ├── DefaultPipelineLaunchReport.java │ │ │ ├── GenericI32TaskQueue.java │ │ │ ├── RavenTaskDispatcher.java │ │ │ └── RavenTaskExecutionProcessor.java │ │ └── task/ │ │ └── mapper/ │ │ └── TaskProcessorMapper.java │ ├── odin-framework-runtime/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── walnut/ │ │ └── odin/ │ │ ├── proc/ │ │ │ ├── ArchRemoteProcessManagerNode.java │ │ │ ├── MediatedRemoteProcess.java │ │ │ ├── ProcessesUtils.java │ │ │ ├── RemoteProcessLifecycleExaminer.java │ │ │ ├── client/ │ │ │ │ ├── RPCRecallSysProcessEventHandler.java │ │ │ │ ├── RavenRemoteProcessManagerClient.java │ │ │ │ ├── ReactiveMasterProcessLifecycleController.java │ │ │ │ └── SlaveProcessLifecycleIface.java │ │ │ └── server/ │ │ │ ├── MasterProcessLifecycleIface.java │ │ │ ├── RavenRemoteProcessManagerServer.java │ │ │ └── ReactiveSlaveProcessLifecycleController.java │ │ └── task/ │ │ ├── RavenTaskInstrument.java │ │ ├── dto/ │ │ │ └── GenericCategoryTag.java │ │ ├── entity/ │ │ │ └── pyramid/ │ │ │ ├── ArchCategory.java │ │ │ ├── GenericCategoryType.java │ │ │ └── GenericTaskCategory.java │ │ ├── mapper/ │ │ │ ├── CategoryMappingMapper.java │ │ │ ├── CategoryTypeMapper.java │ │ │ └── TaskCategoryMapper.java │ │ ├── service/ │ │ │ └── RavenCategoryService.java │ │ └── troll/ │ │ ├── ArchRavenTask.java │ │ ├── ArchRavenTaskInstance.java │ │ ├── GenericRavenTask.java │ │ ├── GenericRavenTaskInstance.java │ │ ├── LaunchErrorCauses.java │ │ └── TrollTaskExecutionLauncher.java │ ├── odin-mapper-driver/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── walnut/ │ │ └── odin/ │ │ ├── atlas/ │ │ │ └── mapper/ │ │ │ ├── ArchAtlasMappingDriver.java │ │ │ ├── GUID72TypeHandler.java │ │ │ ├── GUIDTypeHandler.java │ │ │ ├── OdinAtlasMappingDriver.java │ │ │ ├── OdinAtlasMasterGraphManipulatorImpl.java │ │ │ ├── OdinAtlasMasterManipulatorImpl.java │ │ │ └── UOITypeHandler.java │ │ └── task/ │ │ └── mapper/ │ │ ├── OdinUniformTaskMappingDriver.java │ │ ├── RavenTaskMasterManipulatorImpl.java │ │ └── ScheduleManipulatorImpl.java │ ├── odin-system/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── walnut/ │ │ └── odin/ │ │ └── system/ │ │ └── Odin.java │ └── pom.xml ├── Pinecones/ │ ├── Jelly/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ └── java/ │ │ │ └── com/ │ │ │ └── pinecone/ │ │ │ └── slime/ │ │ │ └── jelly/ │ │ │ ├── map/ │ │ │ │ └── SS.java │ │ │ └── source/ │ │ │ ├── NamespacedKey.java │ │ │ ├── ds/ │ │ │ │ ├── dao/ │ │ │ │ │ ├── CannotAcquireLockException.java │ │ │ │ │ ├── CannotSerializeTransactionException.java │ │ │ │ │ ├── ConcurrencyFailureException.java │ │ │ │ │ ├── DataAccessException.java │ │ │ │ │ ├── DataAccessResourceFailureException.java │ │ │ │ │ ├── DataIntegrityViolationException.java │ │ │ │ │ ├── DeadlockLoserDataAccessException.java │ │ │ │ │ ├── DuplicateKeyException.java │ │ │ │ │ ├── InvalidDataAccessApiUsageException.java │ │ │ │ │ ├── InvalidDataAccessResourceUsageException.java │ │ │ │ │ ├── NonTransientDataAccessException.java │ │ │ │ │ ├── NonTransientDataAccessResourceException.java │ │ │ │ │ ├── PermissionDeniedDataAccessException.java │ │ │ │ │ ├── PersistenceExceptionTranslator.java │ │ │ │ │ ├── PessimisticLockingFailureException.java │ │ │ │ │ ├── QueryTimeoutException.java │ │ │ │ │ ├── RecoverableDataAccessException.java │ │ │ │ │ ├── TransientDataAccessException.java │ │ │ │ │ └── TransientDataAccessResourceException.java │ │ │ │ ├── jdbc/ │ │ │ │ │ ├── AbstractFallbackSQLExceptionTranslator.java │ │ │ │ │ ├── BadSqlGrammarException.java │ │ │ │ │ ├── CustomSQLErrorCodesTranslation.java │ │ │ │ │ ├── DatabaseMetaDataCallback.java │ │ │ │ │ ├── InvalidResultSetAccessException.java │ │ │ │ │ ├── MetaDataAccessException.java │ │ │ │ │ ├── SQLErrorCodeSQLExceptionTranslator.java │ │ │ │ │ ├── SQLErrorCodes.java │ │ │ │ │ ├── SQLErrorCodesFactory.java │ │ │ │ │ ├── SQLExceptionSubclassTranslator.java │ │ │ │ │ ├── SQLExceptionTranslator.java │ │ │ │ │ ├── SQLStateSQLExceptionTranslator.java │ │ │ │ │ ├── TransientDataAccessException.java │ │ │ │ │ ├── TransientDataAccessResourceException.java │ │ │ │ │ ├── UncategorizedDataAccessException.java │ │ │ │ │ └── UncategorizedSQLException.java │ │ │ │ └── transaction/ │ │ │ │ ├── PlatformTransactionManager.java │ │ │ │ ├── ResourceHolder.java │ │ │ │ ├── ResourceHolderSupport.java │ │ │ │ ├── ResourceTransactionManager.java │ │ │ │ ├── SavepointManager.java │ │ │ │ ├── StaticTransactionDefinition.java │ │ │ │ ├── TransactionDefinition.java │ │ │ │ ├── TransactionException.java │ │ │ │ ├── TransactionExecution.java │ │ │ │ ├── TransactionManager.java │ │ │ │ ├── TransactionStatus.java │ │ │ │ ├── TransactionSynchronization.java │ │ │ │ ├── TransactionSynchronizationAdapter.java │ │ │ │ ├── TransactionSynchronizationManager.java │ │ │ │ ├── TransactionSynchronizationUtils.java │ │ │ │ └── TransactionTimedOutException.java │ │ │ ├── ibatis/ │ │ │ │ ├── ArchDynamicQuerierResultHandler.java │ │ │ │ ├── DynamicQuerierEntityResultHandler.java │ │ │ │ ├── DynamicQuerierMappedResultHandler.java │ │ │ │ ├── DynamicQuerierSqlBuilder.java │ │ │ │ ├── GenericMybatisQuerierDataManipulator.java │ │ │ │ ├── IbatisClient.java │ │ │ │ ├── IbatisDAOScanner.java │ │ │ │ ├── IbatisDataAccessObject.java │ │ │ │ ├── IbatisManipulatorProxyMapperFactory.java │ │ │ │ ├── IbatisXMLResourceScanner.java │ │ │ │ ├── ProxySessionMapperPool.java │ │ │ │ ├── SoloSessionMapperPool.java │ │ │ │ └── proxy/ │ │ │ │ ├── MyBatisExceptionTranslator.java │ │ │ │ ├── MyBatisSystemException.java │ │ │ │ ├── SqlSessionHolder.java │ │ │ │ ├── SqlSessionTemplate.java │ │ │ │ └── SqlSessionUtils.java │ │ │ ├── memcached/ │ │ │ │ ├── GenericMemcachedManipulator.java │ │ │ │ └── MemcachedManipulator.java │ │ │ └── redis/ │ │ │ ├── GenericRedisHashManipulator.java │ │ │ ├── GenericRedisMasterManipulator.java │ │ │ ├── IteratorSourceAdapter.java │ │ │ ├── RedisEntryIterator.java │ │ │ ├── RedisIterator.java │ │ │ └── RedisKeysIterator.java │ │ └── test/ │ │ └── java/ │ │ └── com/ │ │ └── TestJelly.java │ ├── Pinecone/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ └── java/ │ │ │ └── com/ │ │ │ └── pinecone/ │ │ │ ├── PineTrial.java │ │ │ ├── Pinecone.java │ │ │ └── framework/ │ │ │ ├── lang/ │ │ │ │ ├── NamedInheritableThreadLocal.java │ │ │ │ ├── NamedThreadLocal.java │ │ │ │ └── field/ │ │ │ │ ├── DataStructureEntity.java │ │ │ │ ├── FieldEntity.java │ │ │ │ ├── GenericFieldEntity.java │ │ │ │ ├── GenericStructure.java │ │ │ │ └── SegmentEntity.java │ │ │ ├── system/ │ │ │ │ ├── ApoptosisRejectSignalException.java │ │ │ │ ├── AssertionRuntimeException.java │ │ │ │ ├── AsynSystem.java │ │ │ │ ├── CascadeSystem.java │ │ │ │ ├── ConformitySystem.java │ │ │ │ ├── ErrorStrings.java │ │ │ │ ├── Experimental.java │ │ │ │ ├── Framework.java │ │ │ │ ├── GenericMasterTaskManager.java │ │ │ │ ├── InstantKillError.java │ │ │ │ ├── IntegratedSubsystem.java │ │ │ │ ├── IrrationalProvokedException.java │ │ │ │ ├── IrrationalProvokedType.java │ │ │ │ ├── ModularizedSubsystem.java │ │ │ │ ├── NestedCheckedException.java │ │ │ │ ├── NestedExceptionUtils.java │ │ │ │ ├── NestedRuntimeException.java │ │ │ │ ├── NoSuchProviderException.java │ │ │ │ ├── Noexcept.java │ │ │ │ ├── NonNull.java │ │ │ │ ├── NotImplementedException.java │ │ │ │ ├── Nullable.java │ │ │ │ ├── ParseException.java │ │ │ │ ├── PieceworkManager.java │ │ │ │ ├── PineRuntimeException.java │ │ │ │ ├── Pinecore.java │ │ │ │ ├── PrimarySystem.java │ │ │ │ ├── ProvokeHandleException.java │ │ │ │ ├── ProxyProvokeHandleException.java │ │ │ │ ├── RedirectRuntimeException.java │ │ │ │ ├── RestartSignalException.java │ │ │ │ ├── RuntimeConstructionException.java │ │ │ │ ├── RuntimeInstantiationException.java │ │ │ │ ├── RuntimeSystem.java │ │ │ │ ├── Subsystem.java │ │ │ │ ├── SynergicSystem.java │ │ │ │ ├── Unsafe.java │ │ │ │ ├── aop/ │ │ │ │ │ ├── InfrastructureProxy.java │ │ │ │ │ ├── RawTargetAccess.java │ │ │ │ │ └── ScopedObject.java │ │ │ │ ├── architecture/ │ │ │ │ │ ├── ArchCascadeComponent.java │ │ │ │ │ ├── ArchCascadeComponentManager.java │ │ │ │ │ ├── ArchComponent.java │ │ │ │ │ ├── ArchComponentManager.java │ │ │ │ │ ├── CascadeComponent.java │ │ │ │ │ ├── CascadeComponentManager.java │ │ │ │ │ ├── Component.java │ │ │ │ │ ├── ComponentManager.java │ │ │ │ │ ├── SystemComponent.java │ │ │ │ │ └── SystemComponentManager.java │ │ │ │ ├── construction/ │ │ │ │ │ ├── DynamicInstancePool.java │ │ │ │ │ ├── DynamicStructure.java │ │ │ │ │ ├── GenericDynamicInstancePool.java │ │ │ │ │ ├── GenericStructureDefinition.java │ │ │ │ │ ├── InstanceDispenser.java │ │ │ │ │ ├── InstanceManufacturer.java │ │ │ │ │ ├── InstancePool.java │ │ │ │ │ ├── ObjectBasicTraits.java │ │ │ │ │ ├── ObjectTraits.java │ │ │ │ │ ├── Postpone.java │ │ │ │ │ ├── ReuseCycle.java │ │ │ │ │ ├── Structure.java │ │ │ │ │ ├── StructureDefinition.java │ │ │ │ │ ├── StructureInstanceDispenser.java │ │ │ │ │ ├── Structures.java │ │ │ │ │ ├── UnifyCentralInstanceDispenser.java │ │ │ │ │ └── UnifyStructureInjector.java │ │ │ │ ├── executum/ │ │ │ │ │ ├── ArchExecutum.java │ │ │ │ │ ├── ArchProcessum.java │ │ │ │ │ ├── ArchThreadum.java │ │ │ │ │ ├── Chronum.java │ │ │ │ │ ├── EventedTaskManager.java │ │ │ │ │ ├── ExclusiveProcessum.java │ │ │ │ │ ├── ExecutableSummoner.java │ │ │ │ │ ├── Executum.java │ │ │ │ │ ├── JobCompromisedException.java │ │ │ │ │ ├── LifeDaemon.java │ │ │ │ │ ├── Lifecycle.java │ │ │ │ │ ├── Processum.java │ │ │ │ │ ├── StageCompromisedException.java │ │ │ │ │ ├── Systema.java │ │ │ │ │ ├── Systemum.java │ │ │ │ │ ├── TaskCompromisedException.java │ │ │ │ │ ├── TaskManager.java │ │ │ │ │ └── VitalResource.java │ │ │ │ ├── functions/ │ │ │ │ │ ├── ChosenDispatcher.java │ │ │ │ │ ├── Executable.java │ │ │ │ │ ├── Executor.java │ │ │ │ │ ├── Function.java │ │ │ │ │ ├── FunctionTraits.java │ │ │ │ │ ├── Invokable.java │ │ │ │ │ ├── Invoker.java │ │ │ │ │ ├── LinearDispatcher.java │ │ │ │ │ ├── SteerableSegment.java │ │ │ │ │ └── SystemInvoker.java │ │ │ │ ├── homotype/ │ │ │ │ │ ├── Assimilable.java │ │ │ │ │ ├── HomoInjector.java │ │ │ │ │ ├── Homotypic.java │ │ │ │ │ ├── Injector.java │ │ │ │ │ └── StereotypicInjector.java │ │ │ │ ├── prototype/ │ │ │ │ │ ├── Ally.java │ │ │ │ │ ├── Factory.java │ │ │ │ │ ├── FamilyContext.java │ │ │ │ │ ├── MapStructuresEvaluator.java │ │ │ │ │ ├── ObjectiveArray.java │ │ │ │ │ ├── ObjectiveBean.java │ │ │ │ │ ├── ObjectiveClass.java │ │ │ │ │ ├── ObjectiveEvaluator.java │ │ │ │ │ ├── ObjectiveList.java │ │ │ │ │ ├── ObjectiveMap.java │ │ │ │ │ ├── Objectom.java │ │ │ │ │ ├── OverridableFamily.java │ │ │ │ │ ├── PineUnit.java │ │ │ │ │ ├── Pinenut.java │ │ │ │ │ ├── PinenutTraits.java │ │ │ │ │ ├── Prototype.java │ │ │ │ │ ├── Strategy.java │ │ │ │ │ ├── Summoner.java │ │ │ │ │ └── TypeIndex.java │ │ │ │ ├── regime/ │ │ │ │ │ ├── Automatus.java │ │ │ │ │ ├── Censorate.java │ │ │ │ │ ├── Examiner.java │ │ │ │ │ ├── Executioner.java │ │ │ │ │ ├── Instrument.java │ │ │ │ │ ├── Orchestrator.java │ │ │ │ │ ├── Regiment.java │ │ │ │ │ ├── Supervisor.java │ │ │ │ │ ├── Tracker.java │ │ │ │ │ ├── Volition.java │ │ │ │ │ └── arch/ │ │ │ │ │ ├── Controllor.java │ │ │ │ │ ├── Director.java │ │ │ │ │ ├── Dominator.java │ │ │ │ │ ├── Dominus.java │ │ │ │ │ ├── Lord.java │ │ │ │ │ └── Manager.java │ │ │ │ ├── regimentation/ │ │ │ │ │ ├── CascadeNodus.java │ │ │ │ │ ├── Nodus.java │ │ │ │ │ ├── UniformCascadeNodus.java │ │ │ │ │ └── UniformNodus.java │ │ │ │ └── stereotype/ │ │ │ │ ├── HungarianNotation.java │ │ │ │ └── JavaBeans.java │ │ │ ├── unit/ │ │ │ │ ├── AbstractMap.java │ │ │ │ ├── AbstractMultiValueMap.java │ │ │ │ ├── BidLinkedEntry.java │ │ │ │ ├── BitSet64.java │ │ │ │ ├── ConcurrentReferenceHashMap.java │ │ │ │ ├── Dictionary.java │ │ │ │ ├── Dictium.java │ │ │ │ ├── DummyMap.java │ │ │ │ ├── KeyValue.java │ │ │ │ ├── LinkedCaseInsensitiveMap.java │ │ │ │ ├── LinkedMultiValueMap.java │ │ │ │ ├── LinkedTreeMap.java │ │ │ │ ├── LinkedTreeMapList.java │ │ │ │ ├── LinkedTreeSet.java │ │ │ │ ├── ListDictium.java │ │ │ │ ├── ListedSortedMap.java │ │ │ │ ├── MapDictium.java │ │ │ │ ├── Mapnut.java │ │ │ │ ├── MultiScopeMap.java │ │ │ │ ├── MultiScopeMaptron.java │ │ │ │ ├── MultiValueMap.java │ │ │ │ ├── MultiValueMapper.java │ │ │ │ ├── MultiValueMaptron.java │ │ │ │ ├── PrecedeMultiMaptron.java │ │ │ │ ├── PrecedeMultiScopeMap.java │ │ │ │ ├── ScopeMap.java │ │ │ │ ├── ScopeTrees.java │ │ │ │ ├── SharedList.java │ │ │ │ ├── SingletonSupplier.java │ │ │ │ ├── TreeMap.java │ │ │ │ ├── UniScopeMap.java │ │ │ │ ├── UniScopeMaptron.java │ │ │ │ ├── Units.java │ │ │ │ ├── affinity/ │ │ │ │ │ ├── DataSharer.java │ │ │ │ │ ├── GenericObjectomSharer.java │ │ │ │ │ ├── ObjectOverrider.java │ │ │ │ │ └── RecursiveUnitOverrider.java │ │ │ │ ├── distinct/ │ │ │ │ │ ├── ArchBloomDistinctAudit.java │ │ │ │ │ ├── DistinctAudit.java │ │ │ │ │ ├── DistinctType.java │ │ │ │ │ ├── GenericDistinctAudit.java │ │ │ │ │ ├── GenericPrototypeDistinctAudit.java │ │ │ │ │ ├── MegaBloomDistinctAudit.java │ │ │ │ │ ├── MegaMergeDistinctAudit.java │ │ │ │ │ └── MegaPrototypeBloomDistinctAudit.java │ │ │ │ ├── multi/ │ │ │ │ │ ├── MultiCollectionMap.java │ │ │ │ │ ├── MultiCollectionMaptron.java │ │ │ │ │ ├── MultiCollectionProxyMap.java │ │ │ │ │ ├── MultiHashSetMaptron.java │ │ │ │ │ ├── MultiListMaptron.java │ │ │ │ │ ├── MultiSetMap.java │ │ │ │ │ └── MultiSetMaptron.java │ │ │ │ ├── tabulate/ │ │ │ │ │ ├── CollectedEntryDecoder.java │ │ │ │ │ ├── CollectedEntryEncoder.java │ │ │ │ │ ├── FamilyEntryNameEncoder.java │ │ │ │ │ ├── FamilyIterator.java │ │ │ │ │ ├── GenericCollectedEntryDecoder.java │ │ │ │ │ ├── GenericCollectedEntryEncoder.java │ │ │ │ │ ├── GenericNamespaceFamilyEntryNameEncoder.java │ │ │ │ │ ├── RecursiveEntryIterator.java │ │ │ │ │ ├── RecursiveFamilyIterator.java │ │ │ │ │ ├── TypedNamespaceFamilyEntryNameEncoder.java │ │ │ │ │ └── UnitFamilyNode.java │ │ │ │ ├── top/ │ │ │ │ │ ├── HeapTopper.java │ │ │ │ │ ├── LinkedMultiTreeToptron.java │ │ │ │ │ ├── LinkedTreeToptron.java │ │ │ │ │ ├── MultiToptronValueAdapter.java │ │ │ │ │ ├── MultiTreeToptron.java │ │ │ │ │ ├── TopmostSelector.java │ │ │ │ │ ├── Topper.java │ │ │ │ │ ├── Toptron.java │ │ │ │ │ ├── ToptronMap.java │ │ │ │ │ ├── ToptronMultiMap.java │ │ │ │ │ └── TreeToptron.java │ │ │ │ └── trie/ │ │ │ │ ├── AbstractTrieMap.java │ │ │ │ ├── ArchTrieNode.java │ │ │ │ ├── DirectoryNode.java │ │ │ │ ├── GenericDirectoryNode.java │ │ │ │ ├── GenericReparseNode.java │ │ │ │ ├── GenericValueNode.java │ │ │ │ ├── IllegalOperationException.java │ │ │ │ ├── ReparseNode.java │ │ │ │ ├── SeparatedSegmentor.java │ │ │ │ ├── TrieMap.java │ │ │ │ ├── TrieNode.java │ │ │ │ ├── TrieSegmentor.java │ │ │ │ ├── UniTrieMaptron.java │ │ │ │ └── ValueNode.java │ │ │ └── util/ │ │ │ ├── Assert.java │ │ │ ├── Bits.java │ │ │ ├── Bytes.java │ │ │ ├── CharactersUtils.java │ │ │ ├── ClassUtils.java │ │ │ ├── CollectionUtils.java │ │ │ ├── CursorParser.java │ │ │ ├── Debug.java │ │ │ ├── GeneralStrings.java │ │ │ ├── OSIdentifier.java │ │ │ ├── ObjectUtils.java │ │ │ ├── PatternMatchUtils.java │ │ │ ├── Randomium.java │ │ │ ├── ReflectionUtils.java │ │ │ ├── StringTraits.java │ │ │ ├── StringUtils.java │ │ │ ├── SuperConvert.java │ │ │ ├── SupplierUtils.java │ │ │ ├── UnitHelper.java │ │ │ ├── comparator/ │ │ │ │ ├── CompoundComparator.java │ │ │ │ ├── InvertibleComparator.java │ │ │ │ ├── OrderComparator.java │ │ │ │ ├── Ordered.java │ │ │ │ └── PriorityOrdered.java │ │ │ ├── config/ │ │ │ │ ├── Config.java │ │ │ │ ├── Configson.java │ │ │ │ ├── GenericStartupCommandParser.java │ │ │ │ ├── JSONConfig.java │ │ │ │ ├── JSONSystemConfig.java │ │ │ │ ├── MappedConfig.java │ │ │ │ ├── OverridableConfig.java │ │ │ │ ├── PatriarchalConfig.java │ │ │ │ ├── StartupCommandParser.java │ │ │ │ ├── SysConfigson.java │ │ │ │ └── SystemConfig.java │ │ │ ├── datetime/ │ │ │ │ ├── DatePattern.java │ │ │ │ ├── GenericMultiFormDateTimeAudit.java │ │ │ │ ├── StorageDate.java │ │ │ │ ├── StorageDateTime.java │ │ │ │ ├── StorageTime.java │ │ │ │ ├── UniformDateTimeAudit.java │ │ │ │ └── compact/ │ │ │ │ ├── CompactTimeUnit.java │ │ │ │ ├── CompactTimeUnit32.java │ │ │ │ ├── CompactTimestamp.java │ │ │ │ └── CompactTimestamp32.java │ │ │ ├── id/ │ │ │ │ ├── BytesID.java │ │ │ │ ├── GUID.java │ │ │ │ ├── GuidAllocator.java │ │ │ │ ├── GuidGenerateException.java │ │ │ │ ├── Identification.java │ │ │ │ ├── IllegalIdentificationException.java │ │ │ │ ├── Int32ID.java │ │ │ │ ├── Int64ID.java │ │ │ │ ├── NameStringID.java │ │ │ │ ├── NumericID.java │ │ │ │ └── StringID.java │ │ │ ├── io/ │ │ │ │ ├── FileIterator.java │ │ │ │ ├── FileNamePathIterator.java │ │ │ │ ├── FileUtils.java │ │ │ │ ├── PathItemIterator.java │ │ │ │ ├── PathIterator.java │ │ │ │ ├── Tracer.java │ │ │ │ └── Tracerson.java │ │ │ ├── json/ │ │ │ │ ├── ArchCursorParser.java │ │ │ │ ├── ArchJSONArray.java │ │ │ │ ├── ArchJSONObject.java │ │ │ │ ├── CustomizableJSONCursorParser.java │ │ │ │ ├── Dictson.java │ │ │ │ ├── GenericJSONEncoder.java │ │ │ │ ├── GenericJSONMarshal.java │ │ │ │ ├── JPlus.java │ │ │ │ ├── JPlusContext.java │ │ │ │ ├── JPlusCursorParser.java │ │ │ │ ├── JSON.java │ │ │ │ ├── JSONArray.java │ │ │ │ ├── JSONArrayDecoder.java │ │ │ │ ├── JSONArraytron.java │ │ │ │ ├── JSONCompiler.java │ │ │ │ ├── JSONCompilerException.java │ │ │ │ ├── JSONCursorParser.java │ │ │ │ ├── JSONDecoder.java │ │ │ │ ├── JSONDecompiler.java │ │ │ │ ├── JSONDictium.java │ │ │ │ ├── JSONEncoder.java │ │ │ │ ├── JSONException.java │ │ │ │ ├── JSONMaptron.java │ │ │ │ ├── JSONMarshal.java │ │ │ │ ├── JSONMarshalMode.java │ │ │ │ ├── JSONObject.java │ │ │ │ ├── JSONObjectDecoder.java │ │ │ │ ├── JSONParseException.java │ │ │ │ ├── JSONParserRedirectException.java │ │ │ │ ├── JSONString.java │ │ │ │ ├── JSONUtils.java │ │ │ │ ├── ObjectJSONCursorUnmarshal.java │ │ │ │ ├── TypeContext.java │ │ │ │ ├── TypeReference.java │ │ │ │ ├── binary/ │ │ │ │ │ ├── BsonTraits.java │ │ │ │ │ └── Bsonut.java │ │ │ │ ├── handler/ │ │ │ │ │ ├── EncodeHandlerRegistry.java │ │ │ │ │ ├── GenericEncodeHandlerRegistry.java │ │ │ │ │ └── JSONObjectEncodeHandler.java │ │ │ │ └── homotype/ │ │ │ │ ├── AnnotatedJSONInjector.java │ │ │ │ ├── AnnotatedObjectInjector.java │ │ │ │ ├── ArchBeanColonist.java │ │ │ │ ├── BeanColonist.java │ │ │ │ ├── BeanJSONEncoder.java │ │ │ │ ├── BeanMapDecoder.java │ │ │ │ ├── DirectBeanColonist.java │ │ │ │ ├── DirectJSONInjector.java │ │ │ │ ├── DirectObjectInjector.java │ │ │ │ ├── GenericBeanJSONEncoder.java │ │ │ │ ├── GenericBeanMapDecoder.java │ │ │ │ ├── GenericStructJSONDecoder.java │ │ │ │ ├── GenericStructJSONEncoder.java │ │ │ │ ├── JSONGet.java │ │ │ │ ├── JSONInjector.java │ │ │ │ ├── MapStructure.java │ │ │ │ ├── ObjectInjector.java │ │ │ │ ├── StructJSONDecoder.java │ │ │ │ ├── StructJSONEncoder.java │ │ │ │ └── WrappedBeanColonist.java │ │ │ ├── lang/ │ │ │ │ ├── ArchClassScopeLoader.java │ │ │ │ ├── ArchClassScopeSet.java │ │ │ │ ├── ArchDynamicFactory.java │ │ │ │ ├── ArchMultiProtocolNamespaceFetcher.java │ │ │ │ ├── ClassCandidateScanner.java │ │ │ │ ├── ClassFilter.java │ │ │ │ ├── ClassNameFetcher.java │ │ │ │ ├── ClassScanner.java │ │ │ │ ├── ClassScope.java │ │ │ │ ├── ClassScopeLoader.java │ │ │ │ ├── ClassScopeNSProtocolIteratorsFactory.java │ │ │ │ ├── DynamicFactory.java │ │ │ │ ├── FileClassCollectorAdapter.java │ │ │ │ ├── FilePackageCollectorAdapter.java │ │ │ │ ├── GenericClassScopeSet.java │ │ │ │ ├── GenericDynamicFactory.java │ │ │ │ ├── GenericScopeNSProtocolIteratorsFactory.java │ │ │ │ ├── InnerMetadataReader.java │ │ │ │ ├── JarClassCollectorAdapter.java │ │ │ │ ├── JarPackageCollectorAdapter.java │ │ │ │ ├── JarUtils.java │ │ │ │ ├── LazyScopedPackage.java │ │ │ │ ├── MetadataReader.java │ │ │ │ ├── MultiClassScopeLoader.java │ │ │ │ ├── NSProtocolIteratorsFactoryAdapter.java │ │ │ │ ├── NamespaceCollector.java │ │ │ │ ├── NamespaceIteratorPair.java │ │ │ │ ├── ObjectCandidateScanner.java │ │ │ │ ├── ObjectScanner.java │ │ │ │ ├── PackageNameFetcher.java │ │ │ │ ├── PathNamespaceCollectum.java │ │ │ │ ├── ScopedPackage.java │ │ │ │ ├── TypeFilter.java │ │ │ │ └── iterator/ │ │ │ │ ├── ArchJarEntryIterator.java │ │ │ │ ├── DirectoryFileIterator.java │ │ │ │ ├── DirectoryPackageIterator.java │ │ │ │ ├── JarEntryIterator.java │ │ │ │ ├── JarFileIterator.java │ │ │ │ ├── JarPackageIterator.java │ │ │ │ └── NamespaceIterator.java │ │ │ ├── lock/ │ │ │ │ ├── ReentrantReadWriteSpinLock.java │ │ │ │ ├── ReentrantSpinLock.java │ │ │ │ └── SpinLock.java │ │ │ ├── math/ │ │ │ │ ├── BigNumberMath.java │ │ │ │ ├── BigNumberMathAchieve.java │ │ │ │ ├── PrecisionHolder.java │ │ │ │ └── Vectorizer.java │ │ │ ├── name/ │ │ │ │ ├── ArchName.java │ │ │ │ ├── ArchNamespaceNode.java │ │ │ │ ├── FixScopeName.java │ │ │ │ ├── GenericMultiNamespace.java │ │ │ │ ├── GenericNamespaceParser.java │ │ │ │ ├── MultiNamespace.java │ │ │ │ ├── MultiScopeName.java │ │ │ │ ├── Name.java │ │ │ │ ├── Namespace.java │ │ │ │ ├── NamespaceParser.java │ │ │ │ ├── ScopeName.java │ │ │ │ ├── UniNamespace.java │ │ │ │ └── path/ │ │ │ │ ├── BasicPathResolver.java │ │ │ │ └── PathResolver.java │ │ │ ├── rdb/ │ │ │ │ ├── ArchRDBExecutor.java │ │ │ │ ├── DirectResultSession.java │ │ │ │ ├── MappedExecutor.java │ │ │ │ ├── MappedSQLSplicer.java │ │ │ │ ├── RDBHost.java │ │ │ │ ├── ResultSession.java │ │ │ │ ├── SQLSplicer.java │ │ │ │ └── SQLStrings.java │ │ │ ├── template/ │ │ │ │ ├── TemplateCursorParser.java │ │ │ │ ├── TemplateParser.java │ │ │ │ ├── UTRAlmondProvider.java │ │ │ │ └── UniformTemplateRenderer.java │ │ │ └── uoi/ │ │ │ ├── GenericUniformObjectLoaderFactory.java │ │ │ ├── LocalUOIJavaClassProvider.java │ │ │ ├── UOI.java │ │ │ ├── UniformObjectLoader.java │ │ │ └── UniformObjectLoaderFactory.java │ │ └── test/ │ │ └── java/ │ │ └── com/ │ │ ├── system/ │ │ │ ├── SimpleCascadeComponentManager.java │ │ │ └── TestComponent.java │ │ ├── unit/ │ │ │ ├── JavaGenericTests.java │ │ │ ├── TestFileIteratorAndDistinct.java │ │ │ ├── TestMultiValueMap.java │ │ │ └── TestUnits.java │ │ └── util/ │ │ ├── TestCompactTimestamp.java │ │ ├── TestDateTime.java │ │ ├── TestJSONConfig.java │ │ ├── TestNamespace.java │ │ ├── TestParser.java │ │ ├── TestRRWSLock.java │ │ ├── TestTemplate.java │ │ ├── inc.jplus │ │ └── json/ │ │ ├── Parasite.java │ │ ├── Slave.java │ │ └── TestJSON.java │ ├── Slime/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ └── java/ │ │ │ └── com/ │ │ │ └── pinecone/ │ │ │ └── slime/ │ │ │ ├── cache/ │ │ │ │ ├── CacheConstants.java │ │ │ │ └── query/ │ │ │ │ ├── ArchConcurrentCountDictCache.java │ │ │ │ ├── ArchCountDictCache.java │ │ │ │ ├── ArchLocalDictCachePage.java │ │ │ │ ├── ConcurrentMergeLRUDictCachePage.java │ │ │ │ ├── CountDictCachePage.java │ │ │ │ ├── DictCachePage.java │ │ │ │ ├── DirectlySourceAccessCacheAdapter.java │ │ │ │ ├── IterableDictCachePage.java │ │ │ │ ├── LocalBufferedDictCachePage.java │ │ │ │ ├── LocalDictCachePage.java │ │ │ │ ├── LocalFixedLRUDictCachePage.java │ │ │ │ ├── RangedDictCachePage.java │ │ │ │ ├── SourceRetriever.java │ │ │ │ ├── UniformCountDictCache.java │ │ │ │ ├── UniformCountSelfLoadingDictCache.java │ │ │ │ ├── UniformDictCache.java │ │ │ │ ├── UniformSelfLoadingDictCache.java │ │ │ │ └── pool/ │ │ │ │ ├── BatchPageSourceRetriever.java │ │ │ │ ├── CountSelfPooledPageDictCache.java │ │ │ │ ├── LocalHotspotPooledDictCache.java │ │ │ │ ├── LocalLRUPooledDictCache.java │ │ │ │ ├── LocalLRUPrimaryPooledDictCache.java │ │ │ │ ├── LocalRangedDictCachePage.java │ │ │ │ ├── PoolCaches.java │ │ │ │ ├── PooledPageDictCache.java │ │ │ │ └── PrimaryPooledDictCache.java │ │ │ ├── chunk/ │ │ │ │ ├── ArchPatriarchalChunk.java │ │ │ │ ├── Chunk.java │ │ │ │ ├── ContiguousPage.java │ │ │ │ ├── Continunk.java │ │ │ │ ├── DiscreteChunk.java │ │ │ │ ├── DivisibleChunk.java │ │ │ │ ├── Frame.java │ │ │ │ ├── Minimunk.java │ │ │ │ ├── Page.java │ │ │ │ ├── PatriarchalChunk.java │ │ │ │ ├── RangedChunk64.java │ │ │ │ ├── RangedPage.java │ │ │ │ ├── RangedPage64.java │ │ │ │ ├── Splitunk.java │ │ │ │ ├── marshaling/ │ │ │ │ │ ├── ArchMasterSplitunkPartitioner64.java │ │ │ │ │ ├── BuddyPrepPartitionDividerStrategy64.java │ │ │ │ │ ├── ChunkPartitioner.java │ │ │ │ │ ├── EvenSeqChunkPartitioner64.java │ │ │ │ │ ├── PageCluster.java │ │ │ │ │ ├── PageDividerPartition64.java │ │ │ │ │ ├── PageGroup.java │ │ │ │ │ ├── PagePartition.java │ │ │ │ │ ├── PagePartitionGroup.java │ │ │ │ │ ├── PagePartitioner.java │ │ │ │ │ ├── PartitionDividerStrategy.java │ │ │ │ │ ├── PartitionableChunkDivider64.java │ │ │ │ │ ├── PartitionablePageDivider64.java │ │ │ │ │ ├── PreparedEvenSeqPagePartitioner64.java │ │ │ │ │ ├── PreparedPageDividerPartition64.java │ │ │ │ │ ├── SequentialPagePartitionGroup.java │ │ │ │ │ └── SequentialPagePartitionGroup64.java │ │ │ │ └── scheduler/ │ │ │ │ ├── ActivePageScheduler.java │ │ │ │ ├── ActivePageScheduler64.java │ │ │ │ ├── ArchMasterSplitunkDivider64.java │ │ │ │ ├── BadAllocateException.java │ │ │ │ ├── BatchActivePageScheduler.java │ │ │ │ ├── BatchActivePageScheduler64.java │ │ │ │ ├── ChunkDivider.java │ │ │ │ ├── ChunkRegister.java │ │ │ │ ├── DefaultPageRecycleStrategy.java │ │ │ │ ├── DirectPagePool.java │ │ │ │ ├── FixedChunkDivider64.java │ │ │ │ ├── FixedPageDivider64.java │ │ │ │ ├── LocalBatchActivePageScheduler64.java │ │ │ │ ├── LocalMapChunkRegister.java │ │ │ │ ├── PageDivider.java │ │ │ │ ├── PagePool.java │ │ │ │ ├── PageRecycleStrategy.java │ │ │ │ ├── PageScheduler.java │ │ │ │ ├── RangedPageScheduler.java │ │ │ │ └── RangedPageScheduler64.java │ │ │ ├── cluster/ │ │ │ │ ├── ArchSequentialChunkGroup.java │ │ │ │ ├── ChunkGroup.java │ │ │ │ ├── Cluster.java │ │ │ │ ├── RangedCluster.java │ │ │ │ └── SequentialChunkGroup.java │ │ │ ├── entity/ │ │ │ │ ├── ArchEnumIndexableEntity.java │ │ │ │ ├── EnumIndexableEntity.java │ │ │ │ └── ObjectiveEntity.java │ │ │ ├── map/ │ │ │ │ ├── AlterableCacher.java │ │ │ │ ├── AlterableQuerier.java │ │ │ │ ├── LocalMapQuerier.java │ │ │ │ ├── Mapper.java │ │ │ │ ├── MonoKeyQueryRange.java │ │ │ │ ├── Querier.java │ │ │ │ ├── QueryRange.java │ │ │ │ ├── indexable/ │ │ │ │ │ ├── IndexableCachedMap.java │ │ │ │ │ └── IndexableMapQuerier.java │ │ │ │ └── rdb/ │ │ │ │ ├── RDBMapQuerier.java │ │ │ │ └── RangedRDBCachedMap.java │ │ │ ├── meta/ │ │ │ │ ├── TableIndex64Meta.java │ │ │ │ └── TableIndexMeta.java │ │ │ ├── query/ │ │ │ │ ├── GenericPageQuery.java │ │ │ │ └── PageQuery.java │ │ │ ├── source/ │ │ │ │ ├── ArchQueryScopeMeta.java │ │ │ │ ├── DAOScanner.java │ │ │ │ ├── DataAccessObject.java │ │ │ │ ├── GenericResultConverter.java │ │ │ │ ├── ResultConverter.java │ │ │ │ ├── UniformQueryScopeMeta.java │ │ │ │ ├── XMLResourceScanner.java │ │ │ │ ├── indexable/ │ │ │ │ │ ├── GenericIndexKeySourceRetriever.java │ │ │ │ │ ├── GenericIndexableTargetScopeMeta.java │ │ │ │ │ ├── IndexableDataManipulator.java │ │ │ │ │ ├── IndexableIterableManipulator.java │ │ │ │ │ └── IndexableTargetScopeMeta.java │ │ │ │ └── rdb/ │ │ │ │ ├── ArchRelationalDatabase.java │ │ │ │ ├── ContiguousNumIndexBatchPageSourceRetriever.java │ │ │ │ ├── GenericRDBTargetTableMeta.java │ │ │ │ ├── GenericSingleKeySourceRetriever.java │ │ │ │ ├── RDBClient.java │ │ │ │ ├── RDBQuerierDataManipulator.java │ │ │ │ ├── RDBTargetTableMeta.java │ │ │ │ ├── RangedRDBQuerierDataManipulator.java │ │ │ │ └── RelationalDatabase.java │ │ │ └── unitization/ │ │ │ ├── IntervalRangeComparator.java │ │ │ ├── LinerRange.java │ │ │ ├── MinMaxRange.java │ │ │ ├── MinMaxRange64.java │ │ │ ├── NumPrecision.java │ │ │ ├── PartialOrderRange.java │ │ │ ├── PartialRange.java │ │ │ ├── Precision.java │ │ │ ├── Precision64.java │ │ │ └── Range.java │ │ └── test/ │ │ └── java/ │ │ └── com/ │ │ ├── cache/ │ │ │ └── TestCache.java │ │ └── chunk/ │ │ └── TestChunk.java │ ├── Springram/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── pinecone/ │ │ └── summer/ │ │ └── spring/ │ │ ├── SpringKernel.java │ │ ├── Springram.java │ │ ├── Springron.java │ │ └── util/ │ │ └── ConfigUtils.java │ ├── Summer/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── pinecone/ │ │ └── summer/ │ │ ├── ArchConnectDispatcher.java │ │ ├── ArchConnection.java │ │ ├── ArchHostSystem.java │ │ ├── ArchPageson.java │ │ ├── ArchRouterDispatcher.java │ │ ├── ArchWizard.java │ │ ├── ArchWizardSummoner.java │ │ ├── ArchWizardum.java │ │ ├── Connectiom.java │ │ ├── GetConnection.java │ │ ├── NaughtyGenieInvokedException.java │ │ ├── PostConnection.java │ │ ├── RouterType.java │ │ ├── SystemRoutlet.java │ │ ├── SystemServlet.java │ │ ├── SystemSpawner.java │ │ ├── TerminateSessionException.java │ │ ├── WizardGeniesInvoker.java │ │ ├── context/ │ │ │ └── ServletContextAware.java │ │ ├── http/ │ │ │ ├── CommonHttpEntityParser.java │ │ │ ├── HttpEntityParser.java │ │ │ ├── HttpHeaders.java │ │ │ ├── HttpMethod.java │ │ │ ├── HttpURLParser.java │ │ │ ├── InvalidMediaTypeException.java │ │ │ └── MediaType.java │ │ ├── io/ │ │ │ ├── AbstractResource.java │ │ │ ├── InputStreamSource.java │ │ │ ├── PathResource.java │ │ │ ├── Resource.java │ │ │ └── WritableResource.java │ │ ├── multiparts/ │ │ │ ├── MaxUploadSizeExceededException.java │ │ │ ├── MultipartException.java │ │ │ ├── MultipartFile.java │ │ │ ├── MultipartHttpServletRequest.java │ │ │ ├── MultipartRequest.java │ │ │ ├── MultipartResolver.java │ │ │ ├── commons/ │ │ │ │ ├── CommonsFileUploadSupport.java │ │ │ │ ├── CommonsMultipartFile.java │ │ │ │ ├── CommonsMultipartFiles.java │ │ │ │ └── CommonsMultipartResolver.java │ │ │ └── support/ │ │ │ ├── AbstractMultipartHttpServletRequest.java │ │ │ └── DefaultMultipartHttpServletRequest.java │ │ ├── prototype/ │ │ │ ├── Citizen.java │ │ │ ├── Component.java │ │ │ ├── ConnectDispatcher.java │ │ │ ├── Connection.java │ │ │ ├── Connectson.java │ │ │ ├── Controller.java │ │ │ ├── GenieBottle.java │ │ │ ├── HostSystem.java │ │ │ ├── JSONBasedControl.java │ │ │ ├── JasperBasedModel.java │ │ │ ├── ModelEnchanter.java │ │ │ ├── Pagesion.java │ │ │ ├── Pageson.java │ │ │ ├── RouterDispatcher.java │ │ │ ├── RouterMapping.java │ │ │ ├── SequentialDispatcher.java │ │ │ ├── Servletson.java │ │ │ ├── Wizard.java │ │ │ ├── WizardSummoner.java │ │ │ └── Wizardum.java │ │ └── util/ │ │ ├── InvalidMimeTypeException.java │ │ ├── MimeType.java │ │ ├── MimeTypeUtils.java │ │ ├── ResourceUtils.java │ │ ├── RouteUtils.java │ │ └── WebUtils.java │ ├── Ulfhedinn/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ └── java/ │ │ │ └── com/ │ │ │ └── pinecone/ │ │ │ └── ulf/ │ │ │ └── util/ │ │ │ ├── bson/ │ │ │ │ ├── ArchJSONDecompiler.java │ │ │ │ ├── DataTypeCode.java │ │ │ │ ├── UlfJSONCompiler.java │ │ │ │ └── UlfJSONDecompiler.java │ │ │ ├── guid/ │ │ │ │ ├── GUIDs.java │ │ │ │ ├── i128/ │ │ │ │ │ ├── ArchGuidAllocator128.java │ │ │ │ │ ├── GUID128.java │ │ │ │ │ ├── GuidAllocator128.java │ │ │ │ │ ├── GuidAllocator128V1.java │ │ │ │ │ ├── GuidAllocator128V2.java │ │ │ │ │ ├── GuidAllocator128V3.java │ │ │ │ │ ├── GuidAllocator128V4.java │ │ │ │ │ ├── GuidAllocator128V5.java │ │ │ │ │ ├── GuidAllocator128V6.java │ │ │ │ │ ├── GuidAllocator128V7.java │ │ │ │ │ ├── GuidAllocatorHC128V7.java │ │ │ │ │ ├── UUID128.java │ │ │ │ │ ├── UuidCreator.java │ │ │ │ │ ├── codec/ │ │ │ │ │ │ ├── GuidCodec.java │ │ │ │ │ │ ├── StandardBinaryCodec.java │ │ │ │ │ │ ├── StandardStringCodec.java │ │ │ │ │ │ ├── UriCodec.java │ │ │ │ │ │ ├── UrnCodec.java │ │ │ │ │ │ ├── base/ │ │ │ │ │ │ │ ├── Base16Codec.java │ │ │ │ │ │ │ ├── Base32Codec.java │ │ │ │ │ │ │ ├── Base58BtcCodec.java │ │ │ │ │ │ │ ├── Base62Codec.java │ │ │ │ │ │ │ ├── Base64Codec.java │ │ │ │ │ │ │ ├── Base64UrlCodec.java │ │ │ │ │ │ │ ├── BaseN.java │ │ │ │ │ │ │ ├── BaseNCodec.java │ │ │ │ │ │ │ └── function/ │ │ │ │ │ │ │ ├── Base16Decoder.java │ │ │ │ │ │ │ ├── Base16Encoder.java │ │ │ │ │ │ │ ├── Base32Decoder.java │ │ │ │ │ │ │ ├── Base32Encoder.java │ │ │ │ │ │ │ ├── Base64Decoder.java │ │ │ │ │ │ │ ├── Base64Encoder.java │ │ │ │ │ │ │ ├── BaseNDecoder.java │ │ │ │ │ │ │ ├── BaseNEncoder.java │ │ │ │ │ │ │ ├── BaseNRemainderDecoder.java │ │ │ │ │ │ │ └── BaseNRemainderEncoder.java │ │ │ │ │ │ └── other/ │ │ │ │ │ │ ├── DotNetGuid1Codec.java │ │ │ │ │ │ ├── DotNetGuid4Codec.java │ │ │ │ │ │ ├── NcnameCodec.java │ │ │ │ │ │ ├── SlugCodec.java │ │ │ │ │ │ └── TimeOrderedCodec.java │ │ │ │ │ ├── enums/ │ │ │ │ │ │ ├── UuidLocalDomain.java │ │ │ │ │ │ ├── UuidNamespace.java │ │ │ │ │ │ ├── UuidVariant.java │ │ │ │ │ │ └── UuidVersion.java │ │ │ │ │ ├── exception/ │ │ │ │ │ │ └── InvalidUuidException.java │ │ │ │ │ ├── factory/ │ │ │ │ │ │ ├── AbstCombFactory.java │ │ │ │ │ │ ├── AbstNameBasedFactory.java │ │ │ │ │ │ ├── AbstRandomBasedFactory.java │ │ │ │ │ │ ├── AbstTimeBasedFactory.java │ │ │ │ │ │ ├── UuidFactory.java │ │ │ │ │ │ ├── function/ │ │ │ │ │ │ │ ├── ClockSeqFunction.java │ │ │ │ │ │ │ ├── NodeIdFunction.java │ │ │ │ │ │ │ ├── RandomFunction.java │ │ │ │ │ │ │ ├── TimeFunction.java │ │ │ │ │ │ │ └── impl/ │ │ │ │ │ │ │ ├── DefaultClockSeqFunction.java │ │ │ │ │ │ │ ├── DefaultNodeIdFunction.java │ │ │ │ │ │ │ ├── DefaultRandomFunction.java │ │ │ │ │ │ │ ├── DefaultTimeFunction.java │ │ │ │ │ │ │ ├── HashNodeIdFunction.java │ │ │ │ │ │ │ ├── MacNodeIdFunction.java │ │ │ │ │ │ │ ├── RandomNodeIdFunction.java │ │ │ │ │ │ │ └── WindowsTimeFunction.java │ │ │ │ │ │ ├── nonstandard/ │ │ │ │ │ │ │ ├── PrefixCombFactory.java │ │ │ │ │ │ │ ├── ShortPrefixCombFactory.java │ │ │ │ │ │ │ ├── ShortSuffixCombFactory.java │ │ │ │ │ │ │ └── SuffixCombFactory.java │ │ │ │ │ │ └── standard/ │ │ │ │ │ │ ├── DceSecurityFactory.java │ │ │ │ │ │ ├── NameBasedMd5Factory.java │ │ │ │ │ │ ├── NameBasedSha1Factory.java │ │ │ │ │ │ ├── RandomBasedFactory.java │ │ │ │ │ │ ├── TimeBasedFactory.java │ │ │ │ │ │ ├── TimeOrderedEpochFactory.java │ │ │ │ │ │ └── TimeOrderedFactory.java │ │ │ │ │ └── util/ │ │ │ │ │ ├── CombUtil.java │ │ │ │ │ ├── MachineId.java │ │ │ │ │ ├── UuidBuilder.java │ │ │ │ │ ├── UuidComparator.java │ │ │ │ │ ├── UuidTime.java │ │ │ │ │ ├── UuidUtil.java │ │ │ │ │ ├── UuidValidator.java │ │ │ │ │ ├── immutable/ │ │ │ │ │ │ ├── ByteArray.java │ │ │ │ │ │ └── CharArray.java │ │ │ │ │ └── internal/ │ │ │ │ │ ├── ByteUtil.java │ │ │ │ │ ├── JavaVersionUtil.java │ │ │ │ │ ├── NetworkUtil.java │ │ │ │ │ ├── RandomUtil.java │ │ │ │ │ └── SettingsUtil.java │ │ │ │ └── i64/ │ │ │ │ ├── BitsAllocator.java │ │ │ │ ├── GUID64.java │ │ │ │ ├── GUID72.java │ │ │ │ ├── GuidAllocator64.java │ │ │ │ ├── GuidAllocator72.java │ │ │ │ ├── GuidAllocator72V2.java │ │ │ │ ├── utils/ │ │ │ │ │ ├── DateUtils.java │ │ │ │ │ ├── DockerUtils.java │ │ │ │ │ ├── EnumUtils.java │ │ │ │ │ ├── NamingThreadFactory.java │ │ │ │ │ ├── NetUtils.java │ │ │ │ │ ├── PaddedAtomicLong.java │ │ │ │ │ └── ValuedEnum.java │ │ │ │ └── worker/ │ │ │ │ ├── GenericDisposableWorkerIdAssigner.java │ │ │ │ ├── WorkerIdAssigner.java │ │ │ │ ├── WorkerNodeType.java │ │ │ │ └── entity/ │ │ │ │ └── WorkerNodeEntity.java │ │ │ ├── lang/ │ │ │ │ ├── ArchMultiScopeFactory.java │ │ │ │ ├── ArchMultiScopeLoader.java │ │ │ │ ├── GenericPreloadClassInspector.java │ │ │ │ ├── HierarchyClassInspector.java │ │ │ │ ├── MultiScopeFactory.java │ │ │ │ ├── MultiTraitClassLoader.java │ │ │ │ ├── PooledClassCandidateScanner.java │ │ │ │ ├── PreloadClassInspector.java │ │ │ │ ├── SimpleAnnotationExcludeFilter.java │ │ │ │ └── TraitClassLoader.java │ │ │ ├── protobuf/ │ │ │ │ ├── BeanProtobufDecoder.java │ │ │ │ ├── BeanProtobufEncoder.java │ │ │ │ ├── DescriptorNameNormalizer.java │ │ │ │ ├── FieldProtobufDecoder.java │ │ │ │ ├── FieldProtobufEncoder.java │ │ │ │ ├── FileDescriptorFormater.java │ │ │ │ ├── GenericBeanProtobufDecoder.java │ │ │ │ ├── GenericBeanProtobufEncoder.java │ │ │ │ ├── GenericFieldProtobufDecoder.java │ │ │ │ ├── GenericFieldProtobufEncoder.java │ │ │ │ ├── Options.java │ │ │ │ ├── PrimitiveWrapper.java │ │ │ │ ├── ProtobufEncodeException.java │ │ │ │ ├── ProtobufUtils.java │ │ │ │ ├── RepeatedWrapper.java │ │ │ │ └── WolfProtobufConstants.java │ │ │ └── template/ │ │ │ ├── UTRFreeMarkerProvider.java │ │ │ └── UTRThymeleafProvider.java │ │ └── test/ │ │ └── java/ │ │ └── com/ │ │ ├── TestBson.java │ │ └── UTRTests.java │ ├── pom.xml │ ├── ulf-lib-construction/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ └── java/ │ │ │ └── com/ │ │ │ └── pinecone/ │ │ │ └── ulf/ │ │ │ └── beans/ │ │ │ ├── aop/ │ │ │ │ └── UlfurEnableAspectProxy.java │ │ │ └── construction/ │ │ │ ├── StructureAnnotationConfiguration.java │ │ │ ├── StructureAnnotationProcessor.java │ │ │ ├── UlfInstanceManufacturer.java │ │ │ └── UlfurInstanceManufacturer.java │ │ └── test/ │ │ └── java/ │ │ └── com/ │ │ └── wolf/ │ │ └── construction/ │ │ ├── CanesService.java │ │ ├── CanisAspect.java │ │ ├── FoxBlade.java │ │ ├── FoxService.java │ │ └── HuskyService.java │ └── ulf-lib-oltp-rdb/ │ ├── pom.xml │ └── src/ │ └── main/ │ └── java/ │ └── com/ │ └── pinecone/ │ └── ulf/ │ └── rdb/ │ ├── mysql/ │ │ ├── MySQLExecutor.java │ │ └── MySQLHost.java │ └── sqlite/ │ ├── SQLiteExecutor.java │ ├── SQLiteHost.java │ └── SQLiteMethod.java ├── README.md ├── RedQueen/ │ ├── pom.xml │ ├── redqueen-architecture/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── acorn/ │ │ └── redqueen/ │ │ ├── compute/ │ │ │ └── ComputationNode.java │ │ └── system/ │ │ └── Dummy.java │ ├── redqueen-computation-suit/ │ │ └── pom.xml │ ├── redqueen-framework-service/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── acorn/ │ │ └── redqueen/ │ │ └── service/ │ │ ├── ApplicationManager.java │ │ ├── ArchRedApplication.java │ │ ├── RedApplication.java │ │ ├── RedQueenServiceControllerException.java │ │ ├── RedServiceApplication.java │ │ ├── ServiceApplication.java │ │ └── conduct/ │ │ ├── CollectiveServiceRegiment.java │ │ └── RedCollectiveServiceRegiment.java │ └── redqueen-system/ │ ├── pom.xml │ └── src/ │ └── main/ │ └── java/ │ └── com/ │ └── acorn/ │ └── redqueen/ │ ├── RedQueen.java │ └── system/ │ └── ServiceCentralControl.java ├── Saurons/ │ ├── Saurye/ │ │ └── pom.xml │ ├── Shadow/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ ├── META-INF/ │ │ │ └── MANIFEST.MF │ │ └── com/ │ │ └── sauron/ │ │ └── shadow/ │ │ ├── Shadow.java │ │ ├── ShadowBoot.java │ │ ├── chronicle/ │ │ │ ├── AffinitySuggestation.java │ │ │ ├── ArchClerk.java │ │ │ ├── Chronicle.java │ │ │ ├── ChronicleHeist.java │ │ │ ├── ChroniclePeriodicHeistKernel.java │ │ │ ├── ChronicleReaver.java │ │ │ ├── Clerk.java │ │ │ ├── Newstron/ │ │ │ │ ├── BaiduClerk.java │ │ │ │ ├── CNNClerk.java │ │ │ │ ├── GoogleClerk.java │ │ │ │ └── ZhihuClerk.java │ │ │ ├── SimpleAjaxBasedClerk.java │ │ │ └── dao/ │ │ │ └── BasicChronicleManipulator.java │ │ └── heists/ │ │ ├── Apesk/ │ │ │ ├── ApeskHeist.java │ │ │ ├── ApeskReaver.java │ │ │ └── ApeskStalker.java │ │ ├── ArtStation/ │ │ │ ├── ArtStationHeist.java │ │ │ ├── ArtStationReaver.java │ │ │ └── ArtStationStalker.java │ │ ├── DeviantArt/ │ │ │ ├── DeviantArtHeist.java │ │ │ ├── DeviantArtReaver.java │ │ │ └── DeviantArtStalker.java │ │ ├── DownloadCNet/ │ │ │ ├── DownloadCNetHeist.java │ │ │ ├── DownloadCNetReaver.java │ │ │ └── DownloadCNetStalker.java │ │ ├── PubChem/ │ │ │ ├── PubChemHeist.java │ │ │ ├── PubChemReaver.java │ │ │ └── PubChemStalker.java │ │ ├── Steam/ │ │ │ ├── SteamHeist.java │ │ │ └── SteamReaver.java │ │ └── Void/ │ │ ├── VoidHeist.java │ │ └── VoidReaver.java │ ├── heist-framework-architecture/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── sauron/ │ │ └── heist/ │ │ ├── heistron/ │ │ │ ├── CascadeHeist.java │ │ │ ├── ConfigNotFoundException.java │ │ │ ├── Crew.java │ │ │ ├── Crewnium.java │ │ │ ├── Embezzler.java │ │ │ ├── HeistConfigConstants.java │ │ │ ├── HeistException.java │ │ │ ├── HeistExecutionException.java │ │ │ ├── HeistOrchestrateException.java │ │ │ ├── HeistScheme.java │ │ │ ├── HeistStatusTerminatedException.java │ │ │ ├── Heistgram.java │ │ │ ├── Heistium.java │ │ │ ├── Heists.java │ │ │ ├── Heistum.java │ │ │ ├── LootAbortException.java │ │ │ ├── LootRecoveredException.java │ │ │ ├── Metier.java │ │ │ ├── Reaver.java │ │ │ ├── Stalker.java │ │ │ ├── chronic/ │ │ │ │ ├── MultiRaiderLoader.java │ │ │ │ ├── PeriodicHeist.java │ │ │ │ ├── PeriodicHeistKernel.java │ │ │ │ ├── PeriodicHeistRehearsal.java │ │ │ │ ├── Raider.java │ │ │ │ ├── RaiderFactory.java │ │ │ │ └── Raiderlet.java │ │ │ ├── event/ │ │ │ │ └── HeistLifecycleEventInterceptor.java │ │ │ ├── mapreduce/ │ │ │ │ ├── SchemeQuerier.java │ │ │ │ └── TaskScheme.java │ │ │ ├── orchestration/ │ │ │ │ ├── ChildHeistInstanceModifier.java │ │ │ │ ├── ChildHeistOrchestrator.java │ │ │ │ ├── Heistlet.java │ │ │ │ ├── HeistletOrchestrator.java │ │ │ │ ├── Hierarchy.java │ │ │ │ ├── Instructations.java │ │ │ │ ├── TaskTransaction.java │ │ │ │ └── Taskium.java │ │ │ └── scheduler/ │ │ │ ├── PageFrame64ConsumerAdapter.java │ │ │ ├── TaskConsumer.java │ │ │ ├── TaskFrame64Consumer.java │ │ │ ├── TaskFrame64Producer.java │ │ │ ├── TaskPage.java │ │ │ ├── TaskPageConsumer.java │ │ │ ├── TaskPageProducer.java │ │ │ ├── TaskProducer.java │ │ │ └── TaskSchedulerStrategy.java │ │ └── http/ │ │ └── HttpBrowserConf.java │ ├── heist-http-client-okhttp-suit/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── sauron/ │ │ └── heist/ │ │ └── okhttp/ │ │ ├── HeistOkHttpClientFactory.java │ │ ├── InMemoryCookieJar.java │ │ ├── OkClientConstructionScheme.java │ │ └── OkHttpFactory.java │ ├── heist-system-schedule/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── com/ │ │ │ │ └── sauron/ │ │ │ │ └── heist/ │ │ │ │ ├── heistron/ │ │ │ │ │ ├── ArchCrew.java │ │ │ │ │ ├── ArchHeistum.java │ │ │ │ │ ├── CrewPageProcessor.java │ │ │ │ │ ├── CrewPipeline.java │ │ │ │ │ ├── HTTPCrew.java │ │ │ │ │ ├── HTTPHeist.java │ │ │ │ │ ├── HTTPIndexHeist.java │ │ │ │ │ ├── Heist.java │ │ │ │ │ ├── HeistEntity.java │ │ │ │ │ ├── Heistotron.java │ │ │ │ │ ├── LocalCrewnium.java │ │ │ │ │ ├── LocalHeistium.java │ │ │ │ │ ├── MegaDOMIndexCrew.java │ │ │ │ │ ├── PatriarchalHeistScheme.java │ │ │ │ │ ├── chronic/ │ │ │ │ │ │ ├── ArchPeriodicHeistRehearsal.java │ │ │ │ │ │ ├── ExcludeRaiderletFilters.java │ │ │ │ │ │ ├── FeastInstructation.java │ │ │ │ │ │ ├── LocalMultiRaiderLoader.java │ │ │ │ │ │ ├── LocalRaiderFactory.java │ │ │ │ │ │ ├── LocalRaiderScopeSet.java │ │ │ │ │ │ ├── RaiderletAnnotationValueFilter.java │ │ │ │ │ │ └── SedationInstructation.java │ │ │ │ │ ├── mapreduce/ │ │ │ │ │ │ └── HTTPResourceTaskScheme.java │ │ │ │ │ ├── orchestration/ │ │ │ │ │ │ ├── ExcludeHeistletFilters.java │ │ │ │ │ │ ├── HeistTask.java │ │ │ │ │ │ ├── HeistletAnnotationValueFilter.java │ │ │ │ │ │ ├── LocalChildHeistOrchestrator.java │ │ │ │ │ │ ├── LocalHeistletFactory.java │ │ │ │ │ │ ├── LocalHeistletLoader.java │ │ │ │ │ │ └── LocalHeistumOrchestrator.java │ │ │ │ │ └── scheduler/ │ │ │ │ │ ├── ActiveTaskPageProducer.java │ │ │ │ │ ├── LocalMultiActiveTaskPageProducer.java │ │ │ │ │ ├── LocalPreemptiveSingleFrame64Consumer.java │ │ │ │ │ ├── LocalPreemptiveTaskFrame64Producer.java │ │ │ │ │ ├── LocalSingleTaskPageConsumer.java │ │ │ │ │ ├── LocalTaskSchedulerStrategy.java │ │ │ │ │ ├── RangedTaskPage.java │ │ │ │ │ └── SingleTaskPageConsumer.java │ │ │ │ └── http/ │ │ │ │ ├── AbstractDownloader.java │ │ │ │ ├── GenericHttpClientGenerator.java │ │ │ │ ├── HttpBrowserDownloader.java │ │ │ │ ├── HttpClientGenerator.java │ │ │ │ └── PageDownloader.java │ │ │ └── resources/ │ │ │ └── application.yaml │ │ └── test/ │ │ └── java/ │ │ └── com/ │ │ ├── Test.java │ │ └── others/ │ │ └── TestServgramTritium.java │ ├── pom.xml │ └── sauron-core/ │ ├── pom.xml │ └── src/ │ └── main/ │ └── java/ │ └── com/ │ └── sauron/ │ ├── Sauron.java │ └── system/ │ ├── Saunut.java │ └── SauronKingdom.java ├── Skynet/ │ ├── pom.xml │ ├── skynet-architecture/ │ │ └── pom.xml │ ├── skynet-cloud-deploy/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── acorn/ │ │ └── skynet/ │ │ └── deploy/ │ │ ├── CloudDeploy.java │ │ └── service/ │ │ ├── CollectiveServiceDeployRegiment.java │ │ └── SkyCollectiveServiceDeployRegiment.java │ └── skynet-system/ │ ├── pom.xml │ └── src/ │ └── main/ │ └── java/ │ └── com/ │ └── acorn/ │ └── skynet/ │ ├── Skynet.java │ └── system/ │ └── SkynetSubsystem.java ├── Sparta/ │ ├── pom.xml │ ├── sparta-api-uac/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ └── java/ │ │ │ └── com/ │ │ │ └── walnut/ │ │ │ └── sparta/ │ │ │ └── account/ │ │ │ ├── rpc/ │ │ │ │ ├── thrift/ │ │ │ │ │ └── AccountIface.java │ │ │ │ └── wolfmc/ │ │ │ │ └── xx.java │ │ │ └── xxx.java │ │ └── test/ │ │ └── java/ │ │ └── org/ │ │ └── example/ │ │ └── AppTest.java │ ├── sparta-api-uofs/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ └── java/ │ │ │ └── com/ │ │ │ └── walnut/ │ │ │ └── sparta/ │ │ │ └── uofs/ │ │ │ ├── thrift/ │ │ │ │ └── UOFSIface.java │ │ │ └── xxx.java │ │ └── test/ │ │ └── java/ │ │ └── cn/ │ │ └── ken/ │ │ └── AppTest.java │ ├── sparta-core-console/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── com/ │ │ │ │ └── walnut/ │ │ │ │ └── sparta/ │ │ │ │ ├── Sparta.java │ │ │ │ ├── SpartaBoot.java │ │ │ │ ├── config/ │ │ │ │ │ ├── SpringGlobalJSONConfig.java │ │ │ │ │ └── ibatis/ │ │ │ │ │ └── IBatisConfig.java │ │ │ │ ├── services/ │ │ │ │ │ ├── controller/ │ │ │ │ │ │ ├── Dummy.java │ │ │ │ │ │ ├── FileSystemController.java │ │ │ │ │ │ ├── UOFSController.java │ │ │ │ │ │ └── v2/ │ │ │ │ │ │ ├── RegistryMetaController.java │ │ │ │ │ │ ├── RegistryTreeController.java │ │ │ │ │ │ ├── ScenarioMetaController.java │ │ │ │ │ │ ├── ServiceMetaController.java │ │ │ │ │ │ ├── ServiceTreeController.java │ │ │ │ │ │ └── TaskMetaController.java │ │ │ │ │ ├── drivers/ │ │ │ │ │ │ ├── RegistryMasterManipulatorImpl.java │ │ │ │ │ │ ├── RegistryMasterTreeManipulatorImpl.java │ │ │ │ │ │ ├── ScenarioMasterManipulatorImpl.java │ │ │ │ │ │ ├── ScenarioTreeManipulatorSharerImpl.java │ │ │ │ │ │ ├── ServiceMasterManipulatorImpl.java │ │ │ │ │ │ └── ServiceMasterTreeManipulatorImpl.java │ │ │ │ │ ├── dto/ │ │ │ │ │ │ └── updateObjectDto.java │ │ │ │ │ ├── mapper/ │ │ │ │ │ │ └── FakeNews.java │ │ │ │ │ ├── pojo/ │ │ │ │ │ │ └── Dummy.java │ │ │ │ │ └── service/ │ │ │ │ │ ├── ServiceNodeService.java │ │ │ │ │ ├── ServiceTreeService.java │ │ │ │ │ └── serviceImpl/ │ │ │ │ │ ├── ServiceNodeServiceImpl.java │ │ │ │ │ └── ServiceTreeServiceImpl.java │ │ │ │ └── system/ │ │ │ │ ├── BasicResultResponse.java │ │ │ │ └── SystemController.java │ │ │ └── resources/ │ │ │ └── uid/ │ │ │ └── default-uid-spring.xml │ │ └── test/ │ │ └── java/ │ │ ├── UniTrieMaptronTest.java │ │ ├── com/ │ │ │ ├── ender/ │ │ │ │ └── TestEnderHydra.java │ │ │ ├── rpc/ │ │ │ │ └── TestGrpcService.java │ │ │ └── sparta/ │ │ │ ├── TestAccount.java │ │ │ ├── TestBucket.java │ │ │ ├── TestDeployTree.java │ │ │ ├── TestInnerTree.java │ │ │ ├── TestKOMKing.java │ │ │ ├── TestLayer.java │ │ │ ├── TestQueue.java │ │ │ ├── TestRegistry.java │ │ │ ├── TestRemoteProcess.java │ │ │ ├── TestRuntime.java │ │ │ ├── TestSFM.java │ │ │ ├── TestServiceManager.java │ │ │ ├── TestServiceTree.java │ │ │ ├── TestSparta.java │ │ │ ├── TestTaskTree.java │ │ │ ├── TestUOFS.java │ │ │ └── TestVolume.java │ │ └── test.java │ ├── sparta-uac-console/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── com/ │ │ │ │ └── walnut/ │ │ │ │ └── sparta/ │ │ │ │ └── account/ │ │ │ │ ├── SpartaAccountService.java │ │ │ │ ├── SpartaBoot.java │ │ │ │ ├── api/ │ │ │ │ │ ├── controller/ │ │ │ │ │ │ ├── v2/ │ │ │ │ │ │ │ └── AccountController.java │ │ │ │ │ │ └── xxx.java │ │ │ │ │ └── response/ │ │ │ │ │ └── BasicResultResponse.java │ │ │ │ ├── config/ │ │ │ │ │ └── WebConfig.java │ │ │ │ ├── domian/ │ │ │ │ │ └── vo/ │ │ │ │ │ ├── AccountLoginVO.java │ │ │ │ │ ├── QueryallUserVO.java │ │ │ │ │ └── UserLoginVO.java │ │ │ │ ├── interceptor/ │ │ │ │ │ ├── AuthenticationInterceptor.java │ │ │ │ │ └── RequiresAuthentication.java │ │ │ │ ├── properties/ │ │ │ │ │ └── JwtProperties.java │ │ │ │ ├── rpc/ │ │ │ │ │ └── thrift/ │ │ │ │ │ ├── AccountIfaceImpl.java │ │ │ │ │ └── AccountRPCService.java │ │ │ │ └── util/ │ │ │ │ └── JwtUtil.java │ │ │ └── resources/ │ │ │ └── Account.thrift │ │ └── test/ │ │ └── java/ │ │ └── com/ │ │ └── walnut/ │ │ └── sparta/ │ │ └── account/ │ │ └── TestSpartaAccount.java │ ├── sparta-ucdn-console/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ └── java/ │ │ │ └── com/ │ │ │ └── walnut/ │ │ │ └── sparta/ │ │ │ └── ucdn/ │ │ │ └── console/ │ │ │ ├── SpartaBoot.java │ │ │ ├── UCDNBoot.java │ │ │ ├── api/ │ │ │ │ └── controller/ │ │ │ │ ├── v2/ │ │ │ │ │ ├── CDNFileController.java │ │ │ │ │ ├── CDNFolderController.java │ │ │ │ │ ├── ClientController.java │ │ │ │ │ ├── SiteController.java │ │ │ │ │ ├── SiteNodeController.java │ │ │ │ │ ├── TransmitController.java │ │ │ │ │ └── VersionController.java │ │ │ │ └── xx.java │ │ │ ├── config/ │ │ │ │ ├── AppCDNMyBatisConfig.java │ │ │ │ ├── BeanConfig.java │ │ │ │ ├── WebConfig.java │ │ │ │ └── WebSocketConfig.java │ │ │ ├── domain/ │ │ │ │ └── service/ │ │ │ │ ├── FileSystemService.java │ │ │ │ ├── NodeFileDistributionService.java │ │ │ │ ├── cluster/ │ │ │ │ │ ├── ClusterFileSyncTransaction.java │ │ │ │ │ ├── ClusterFileSyncTransactionManager.java │ │ │ │ │ ├── ClusterFileSynchronizationService.java │ │ │ │ │ ├── ClusterFileSynchronizationServiceImpl.java │ │ │ │ │ ├── ClusterFileTransactionManager.java │ │ │ │ │ ├── FileSynchronizedEventSubscriber.java │ │ │ │ │ ├── MultiClusterFileSyncTransaction.java │ │ │ │ │ └── UFMTransactionSynchronizedNotifier.java │ │ │ │ └── impl/ │ │ │ │ ├── FileSystemServiceImpl.java │ │ │ │ └── NodeFileDistributionServiceImpl.java │ │ │ ├── infrastructure/ │ │ │ │ ├── ClusterLock.java │ │ │ │ ├── FSContentDeliveryService.java │ │ │ │ ├── SpartaUCDNService.java │ │ │ │ ├── UCDNConsoleContents.java │ │ │ │ ├── UCDNConstants.java │ │ │ │ ├── UCDNContentDelivery.java │ │ │ │ ├── UCDNService.java │ │ │ │ ├── WolfKingMessageWareStone.java │ │ │ │ ├── dto/ │ │ │ │ │ ├── ClusterFileSyncDTO.java │ │ │ │ │ ├── DownloadObjectByChannelDTO.java │ │ │ │ │ ├── LogicVolumeDTO.java │ │ │ │ │ ├── PhysicalVolumeDTO.java │ │ │ │ │ ├── RenameDTO.java │ │ │ │ │ ├── SiteNodeDTO.java │ │ │ │ │ ├── StorageExpansionDTO.java │ │ │ │ │ ├── UpdateFileNameDTO.java │ │ │ │ │ └── UpdateObjectByChannelDTO.java │ │ │ │ ├── service/ │ │ │ │ │ ├── UCDNCentralServiceManager.java │ │ │ │ │ └── UCDNServiceManager.java │ │ │ │ └── vo/ │ │ │ │ ├── FolderContentVo.java │ │ │ │ ├── SiteNodeVO.java │ │ │ │ └── SyncFinishedVO.java │ │ │ ├── interceptor/ │ │ │ │ └── JWTInterceptor.java │ │ │ ├── mapper/ │ │ │ │ └── ClusterFileSyncMapper.java │ │ │ ├── ufm/ │ │ │ │ ├── FMDTransactionBlock.java │ │ │ │ ├── FileMultiDistributionController.java │ │ │ │ ├── FileMultiDistributionIface.java │ │ │ │ ├── FileMultiDistributionService.java │ │ │ │ ├── SessionPhaser.java │ │ │ │ ├── SessionValidator.java │ │ │ │ ├── UCFMConfig.java │ │ │ │ ├── UFMConfig.java │ │ │ │ ├── UFMDClusterDO.java │ │ │ │ ├── UFMDClusterFrame.java │ │ │ │ ├── UFMSessionPhaser.java │ │ │ │ ├── UFMSessionValidator.java │ │ │ │ ├── UFMSessionValidatorController.java │ │ │ │ ├── UOFSFileMultiDistributionService.java │ │ │ │ ├── event/ │ │ │ │ │ └── UFMEventSubscriber.java │ │ │ │ ├── protocol/ │ │ │ │ │ ├── FileMeta64.java │ │ │ │ │ └── RequestHead.java │ │ │ │ └── session/ │ │ │ │ └── UFMTransaction.java │ │ │ └── util/ │ │ │ └── JWTUtil.java │ │ └── test/ │ │ └── java/ │ │ └── cn/ │ │ └── ken/ │ │ └── AppTest.java │ ├── sparta-ucdn-service/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── com/ │ │ └── walnut/ │ │ └── sparta/ │ │ └── ucdn/ │ │ └── service/ │ │ ├── SpartaBoot.java │ │ ├── UCDNBoot.java │ │ ├── api/ │ │ │ ├── controller/ │ │ │ │ └── v2/ │ │ │ │ ├── ClientController.java │ │ │ │ └── ConsoleController.java │ │ │ ├── iface/ │ │ │ │ └── v2/ │ │ │ │ └── FileSyncDistributionController.java │ │ │ └── response/ │ │ │ └── BasicResultResponse.java │ │ ├── infrastructure/ │ │ │ ├── FSContentDeliveryService.java │ │ │ ├── SpartaUCDNService.java │ │ │ ├── UCDNBeans.java │ │ │ ├── UCDNService.java │ │ │ ├── UOFSContentDelivery.java │ │ │ ├── constants/ │ │ │ │ └── PolicyConstants.java │ │ │ ├── dto/ │ │ │ │ └── UploadDTO.java │ │ │ └── exception/ │ │ │ └── IllegalPathException.java │ │ └── umct/ │ │ ├── FileSyncDistribution.java │ │ └── FileSyncDistributionImpl.java │ ├── sparta-uofs-console/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── com/ │ │ │ │ └── walnut/ │ │ │ │ └── sparta/ │ │ │ │ └── uofs/ │ │ │ │ └── console/ │ │ │ │ ├── SpartaBoot.java │ │ │ │ ├── UOFSBoot.java │ │ │ │ ├── api/ │ │ │ │ │ ├── controller/ │ │ │ │ │ │ ├── v2/ │ │ │ │ │ │ │ ├── BucketController.java │ │ │ │ │ │ │ ├── ExternalSymbolicController.java │ │ │ │ │ │ │ ├── FileController.java │ │ │ │ │ │ │ ├── FolderController.java │ │ │ │ │ │ │ ├── SiteController.java │ │ │ │ │ │ │ ├── TransmitController.java │ │ │ │ │ │ │ ├── UserController.java │ │ │ │ │ │ │ └── VolumeController.java │ │ │ │ │ │ └── xxx.java │ │ │ │ │ └── response/ │ │ │ │ │ └── BasicResultResponse.java │ │ │ │ ├── config/ │ │ │ │ │ ├── AsyncConfig.java │ │ │ │ │ └── WebConfig.java │ │ │ │ ├── domain/ │ │ │ │ │ ├── dto/ │ │ │ │ │ │ ├── CreateExternalDTO.java │ │ │ │ │ │ ├── CreateExternalSymbolicDTO.java │ │ │ │ │ │ ├── DownloadObjectByChannelDTO.java │ │ │ │ │ │ ├── LogicVolumeDTO.java │ │ │ │ │ │ ├── PhysicalVolumeDTO.java │ │ │ │ │ │ ├── RenameDTO.java │ │ │ │ │ │ ├── StorageExpansionDTO.java │ │ │ │ │ │ ├── UpdateFileNameDTO.java │ │ │ │ │ │ ├── UpdateObjectByChannelDTO.java │ │ │ │ │ │ └── UserLoginDTO.java │ │ │ │ │ └── vo/ │ │ │ │ │ └── FolderContentVo.java │ │ │ │ ├── infrastructure/ │ │ │ │ │ ├── SpartaUOFSService.java │ │ │ │ │ ├── UOFSCommonConfig.java │ │ │ │ │ ├── UOFSConfig.java │ │ │ │ │ ├── UOFSConsoleContents.java │ │ │ │ │ ├── UOFSContentDelivery.java │ │ │ │ │ ├── UOFSContentDeliveryService.java │ │ │ │ │ └── UOFSService.java │ │ │ │ ├── interceptor/ │ │ │ │ │ └── JWTInterceptor.java │ │ │ │ ├── service/ │ │ │ │ │ ├── FileService.java │ │ │ │ │ └── impl/ │ │ │ │ │ └── FileServiceImpl.java │ │ │ │ └── util/ │ │ │ │ └── JWTUtil.java │ │ │ └── resources/ │ │ │ └── UOFSIface.thrift │ │ └── test/ │ │ └── java/ │ │ └── com/ │ │ └── walnut/ │ │ └── sparta/ │ │ └── uofs/ │ │ ├── SparteUcdnServiceApplicationTests.java │ │ └── TestSparta.java │ └── sparta-uofs-service/ │ ├── pom.xml │ └── src/ │ ├── main/ │ │ └── java/ │ │ └── com/ │ │ └── walnut/ │ │ └── sparta/ │ │ └── uofs/ │ │ └── service/ │ │ ├── Sparta.java │ │ ├── SpartaBoot.java │ │ ├── api/ │ │ │ ├── controller/ │ │ │ │ └── xxx.java │ │ │ └── response/ │ │ │ └── BasicResultResponse.java │ │ └── infrastructure/ │ │ └── Contents.java │ └── test/ │ └── java/ │ └── com/ │ └── sparta/ │ └── TestSparta.java ├── TaskJuggler/ │ ├── TaskJuggler.iml │ ├── pom.xml │ └── src/ │ └── main/ │ └── java/ │ └── com/ │ └── genius/ │ ├── common/ │ │ └── Heist.java │ ├── config/ │ │ ├── HeistConfig.java │ │ └── MqConfig.java │ ├── core/ │ │ └── HeistCenter.java │ └── mq/ │ └── Harbor.java ├── TestJar.cmd ├── Walnuts/ │ ├── pom.xml │ └── sailor-stream-distribute-sdk/ │ ├── pom.xml │ └── src/ │ └── main/ │ └── java/ │ └── com/ │ └── walnut/ │ └── sailor/ │ └── stream/ │ ├── Dummy.java │ └── fm/ │ ├── FileMultiDistributionIface.java │ ├── SFMConfig.java │ ├── SFMDistributionController.java │ ├── SFMFileFrame.java │ ├── SFMSessionPhaser.java │ ├── SFMSessionValidator.java │ ├── SFMSessionValidatorController.java │ ├── SailorFMConfig.java │ ├── SailorFMDistributionService.java │ ├── SessionPhaser.java │ ├── SessionValidator.java │ ├── SingleStreamFileMultiDistributionService.java │ ├── event/ │ │ └── SFMEventSubscriber.java │ ├── protocol/ │ │ ├── RequestHead.java │ │ └── UFMCFileMeta64.java │ └── session/ │ └── SFMTransaction.java ├── gitignore.txt ├── pom.xml ├── prompt/ │ ├── base_front_standard.md │ ├── coding_standard.md │ └── mysql_table_standard.md └── system/ └── setup/ ├── CenterMessagram.json5 ├── ServersScope.json5 ├── SpringBootApplication.json5 ├── StorageSystem.json5 ├── config.json5 ├── heist.json5 ├── heists/ │ ├── Apesk.json5 │ ├── ArtStation.json5 │ ├── Chronicle.json5 │ ├── DeviantArt.json5 │ ├── DouBan.json5 │ ├── DownloadCNet.json5 │ ├── NeteaseMusic.json5 │ ├── PubChem.json5 │ ├── Steam.json5 │ ├── UrukhaiHeists.json5 │ ├── Void.json5 │ └── Wikipedia.json5 ├── lords/ │ ├── odin.json5 │ ├── redqueen.json5 │ └── skynet.json5 └── sparta/ ├── AccountServiceSpring.json5 ├── SpartaUCDNService.json5 ├── SpartaUISService.json5 ├── SpartaUOFSService.json5 └── SpartaUTASKService.json5 ================================================ FILE CONTENTS ================================================ ================================================ FILE: .gitignore ================================================ # Compiled class file *.class # Log file *.log # BlueJ files *.ctxt # Mobile Tools for Java (J2ME) .mtj.tmp/ # Package Files # *.jar *.war *.nar *.ear *.zip *.tar.gz *.rar # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml hs_err_pid* replay_pid* ================================================ FILE: .idea/.gitignore ================================================ # Default ignored files /shelf/ /workspace.xml # Editor-based HTTP Client requests /httpRequests/ # Datasource local storage ignored files /dataSources/ /dataSources.local.xml ================================================ FILE: .idea/ApifoxUploaderProjectSetting.xml ================================================ ================================================ FILE: .idea/codeStyles/codeStyleConfig.xml ================================================ ================================================ FILE: .idea/compiler.xml ================================================ ================================================ FILE: .idea/dataSources.xml ================================================ mysql.8 true com.mysql.cj.jdbc.Driver jdbc:mysql://node1.nutgit.com:13393 $ProjectFileDir$ mysql.8 true com.mysql.cj.jdbc.Driver jdbc:mysql://localhost:3306 $ProjectFileDir$ ================================================ FILE: .idea/dictionaries/project.xml ================================================ Clientile ================================================ FILE: .idea/dictionaries/undefined.xml ================================================ arraytron heistgram heistotron heistron indexable indexables maptron nonjron pinecone sauron servgram servtus sitemap ================================================ FILE: .idea/encodings.xml ================================================ ================================================ FILE: .idea/jarRepositories.xml ================================================ ================================================ FILE: .idea/misc.xml ================================================ ================================================ FILE: .idea/sqldialects.xml ================================================ ================================================ FILE: .idea/uiDesigner.xml ================================================ ================================================ FILE: .idea/vcs.xml ================================================ ================================================ FILE: Archcraft/ender-system-hydra/pom.xml ================================================ archcraft com.archcraft 2.5.1 4.0.0 com.walnut.ender.system ender-system-hydra 2.5.1 jar com.pinecone.hydra.kernel hydra-system-reign 2.1.0 compile com.pinecone.tritium hydra-system-tritium 2.1.0 compile com.acorn.redqueen.kernel redqueen-system 2.1.0 compile com.acorn.skynet.kernel skynet-system 2.1.0 compile com.acorn.redqueen.kernel redqueen-system 2.1.0 compile ================================================ FILE: Archcraft/ender-system-hydra/src/main/java/com/walnut/archcraft/ender/EnderHydra.java ================================================ package com.walnut.archcraft.ender; import com.acorn.redqueen.system.ServiceCentralControl; import com.acorn.skynet.system.SkynetSubsystem; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.system.regime.arch.Lord; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.name.UniNamespace; import com.pinecone.hydra.proc.ProcessManager; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.proc.UniformProcessManager; import com.pinecone.hydra.proc.image.FileSystemMappingImageLoader; import com.pinecone.hydra.proc.image.ImageLoader; import com.pinecone.hydra.proc.image.UniformMultiScopeImageLoader; import com.pinecone.hydra.proc.image.kom.VirtualExeImageInstrument; import com.pinecone.hydra.proc.image.kom.VirtualMappingExeImageInstrument; import com.pinecone.hydra.reign.UnixInstitutionalizedMetaImperiumPrivy; import com.pinecone.hydra.system.component.LogStatuses; import com.pinecone.hydra.system.imperium.ImperiumPrivy; import com.pinecone.hydra.system.imperium.KernelObjectRootMountPoint; import com.pinecone.hydra.system.ko.KernelObjectConfig; import com.pinecone.hydra.system.ko.runtime.GenericRuntimeInstrumentConfig; import com.pinecone.hydra.system.subsystem.CentralKernelLordFederation; import com.pinecone.hydra.system.subsystem.KernelLordFederation; import com.pinecone.tritium.Tritium; import com.pinecone.ulf.util.guid.GUIDs; import com.pinecone.ulf.util.guid.i64.GuidAllocator72; import com.pinecone.ulf.util.guid.i64.GuidAllocator72V2; import com.walnut.archcraft.ender.system.HydraEmpire; import com.walnut.archcraft.ender.system.Hydroxy; public class EnderHydra extends Tritium implements HydraEmpire { protected GuidAllocator mSystemGuidAllocator; protected GuidAllocator72 mSystemGuidAllocator72; protected ImageLoader mSystemImageLoader; protected ProcessManager mSystemProcessManager; protected UProcess mProxiedRootSystemProcess; protected KernelObjectConfig mFundamentalKernelObjectConfig; protected VirtualExeImageInstrument mVirtualExeImageInstrument; protected ImperiumPrivy mImperiumPrivy; protected KernelLordFederation mLordFederation; protected SkynetSubsystem mSkynetSubsystem; protected ServiceCentralControl mServiceCentralControl; public EnderHydra( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public EnderHydra( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override protected void prepare_system_skeleton() { super.prepare_system_skeleton(); this.prepare_uniform_system(); } protected void prepare_uniform_system_process_task_subsystem() { this.mVirtualExeImageInstrument = new VirtualMappingExeImageInstrument( this, "" ); this.infoLifecycle( " ProcessSubsystem[1] System VirtualExeImageInstrument Initialization", LogStatuses.StatusDone ); ImageLoader localMappingImageLoader = new FileSystemMappingImageLoader( this, this.mVirtualExeImageInstrument ); this.infoLifecycle( " ProcessSubsystem[2] System Scope LocalMappingImageLoader Initialization", LogStatuses.StatusDone ); this.mSystemImageLoader = new UniformMultiScopeImageLoader( this, localMappingImageLoader ); this.infoLifecycle( " ProcessSubsystem[3] System Scope UniformMultiScopeImageLoader Initialization", LogStatuses.StatusDone ); this.mSystemProcessManager = new UniformProcessManager( this, null, "SystemUniformProcessManager", "", null ); this.getDispenserCenter().getInstanceDispenser().registerInstance( "__SystemTaskManager__", this.mSystemProcessManager ); this.infoLifecycle( " ProcessSubsystem[4] System ProcessManager Initialization", LogStatuses.StatusDone ); this.mProxiedRootSystemProcess = new Hydroxy( this ); this.mSystemProcessManager.applyRootUProcess( this.mProxiedRootSystemProcess ); this.mSystemProcessManager.register( this.mProxiedRootSystemProcess ); this.infoLifecycle( " ProcessSubsystem[5] System Hydroxy Initialization", LogStatuses.StatusDone ); this.infoLifecycle( " Uniform System Process/Task Subsystem", LogStatuses.StatusDone ); } protected void prepare_uniform_system_imperium_privy() { this.mImperiumPrivy = new UnixInstitutionalizedMetaImperiumPrivy( new UniNamespace( "SystemUnixInstitutionalizedMetaImperiumPrivy" ), this, null, this.fundamentalKernelObjectConfig() ); this.infoLifecycle( " System ImperiumPrivy Initialization. (name: `" + this.mImperiumPrivy.getTargetingName() + "`, class: `" + this.mImperiumPrivy.getClass().getName() + "`)", LogStatuses.StatusDone ); this.mImperiumPrivy.getExpressInstrument().mount( KernelObjectRootMountPoint.SysImages.getMountPoint(), this.mVirtualExeImageInstrument ); this.infoLifecycle( " System VirtualExeImageInstrument Mount. (MountPoint: `/" + KernelObjectRootMountPoint.SysImages.getMountPoint() + "`)", LogStatuses.StatusDone ); this.infoLifecycle( " Uniform Imperium Privy", LogStatuses.StatusDone ); } @Override protected void prepare_system_skeleton_before() { this.prepare_uniform_system_guid_system(); } protected void prepare_uniform_system_guid_system() { this.mSystemGuidAllocator = GUIDs.newGuidAllocator( 1984 ); // TODO MachineId allocation. this.infoLifecycle( " System GUIDAllocator Initialization [Type: `" + this.mSystemGuidAllocator.getClass().getName() + "`]", LogStatuses.StatusDone ); this.mSystemGuidAllocator72 = new GuidAllocator72V2(); this.infoLifecycle( " System GUIDAllocator72 Initialization [Type: `" + this.mSystemGuidAllocator72.getClass().getName() + "`]", LogStatuses.StatusDone ); } protected void prepare_uniform_system() { this.infoLifecycle( " Uniform Operation System", LogStatuses.StatusStart ); this.init_uniform_system_configuration(); this.prepare_uniform_system_process_task_subsystem(); this.init_process_kernel_subsystem(); this.prepare_modularized_subsystem(); this.infoLifecycle( " Uniform Operation System", LogStatuses.StatusReady ); this.getLogger().info( "[Welcome] [ Welcome to join the imperial army!]" ); } protected void prepare_modularized_subsystem() { this.infoLifecycle( " [SummoningLords] Modularized Subsystem Initialization", LogStatuses.StatusStart ); this.mLordFederation = new CentralKernelLordFederation( this ); this.mSkynetSubsystem = (SkynetSubsystem) this.mLordFederation.get( "KernelSkynetLord" ); if ( this.mSkynetSubsystem != null ) { this.mSkynetSubsystem.vitalize(); } this.mServiceCentralControl = (ServiceCentralControl) this.mLordFederation.get( "KernelRedQueenLord" ); if ( this.mServiceCentralControl != null ) { this.mServiceCentralControl.vitalize(); } this.getLogger().info( "[ActionReport] [SummoningLords] Empire now has {} lords.", this.countEmpireLords() ); this.infoLifecycle( " [SummoningLords] Modularized Subsystem Initialization", LogStatuses.StatusDone ); } protected void init_process_kernel_subsystem() { this.infoLifecycle( "Uniform Process Subsystem", LogStatuses.StatusStart ); this.prepare_uniform_system_imperium_privy(); this.infoLifecycle( "Uniform Process Subsystem", LogStatuses.StatusDone ); } protected void init_uniform_system_configuration() { this.infoLifecycle( "Uniform System Configuration", LogStatuses.StatusStart ); this.mFundamentalKernelObjectConfig = new GenericRuntimeInstrumentConfig(); this.infoLifecycle( " System FundamentalKernelObjectConfig Initialization", LogStatuses.StatusDone ); this.infoLifecycle( "Uniform System Configuration", LogStatuses.StatusDone ); } @Override protected void traceWelcomeInfo() { this.pout().print( "---------------------------------------------------------------\n" ); this.pout().print( "\u001B[31mBean Nuts Pinecone Ursus for Java\u001B[0m\n" ); this.pout().print( "\u001B[31mHydra Kingdom Framework (Ender Hydra) \u001B[0m\n" ); this.pout().print( "\u001B[32mCopyright(C) 2008-2028 Bean Nuts Foundation. All rights reserved.\u001B[0m\n" ); this.pout().print( "---------------------------------------------------------------\n" ); this.pout().print( "\u001B[31mDragon King\u001B[0m\n" ); this.pout().print( "\u001B[32mWebsit: https://www.dragonking.cn/ \u001B[0m\n" ); this.traceSubsystemWelcomeInfo(); this.traceSystemBootingInfo(); this.prepare_system_log4j_logger(); this.infoLifecycle( "Initialization", LogStatuses.StatusStart ); } @Override public GuidAllocator getSystemGuidAllocator() { return this.mSystemGuidAllocator; } @Override public GuidAllocator72 getSystemGuidAllocator72() { return this.mSystemGuidAllocator72; } @Override public ProcessManager processManager() { return this.mSystemProcessManager; } @Override public ImageLoader imageLoader() { return this.mSystemImageLoader; } @Override public Processum ownedLocalProcess() { return this; } @Override public UProcess ownedUniformProcess() { return this.mProxiedRootSystemProcess; } @Override public KernelObjectConfig fundamentalKernelObjectConfig() { return this.mFundamentalKernelObjectConfig; } @Override public ImperiumPrivy imperiumPrivy() { return this.mImperiumPrivy; } @Override public ServiceCentralControl redQueen() { return this.mServiceCentralControl; } @Override public SkynetSubsystem skynet() { return this.mSkynetSubsystem; } @Override public Lord getEmpireLordsByName( String lordName ) { return this.mLordFederation.get( lordName ); } @Override public KernelLordFederation getLordFederation() { return this.mLordFederation; } @Override public int countEmpireLords() { return this.mLordFederation.size(); } @Override public VirtualExeImageInstrument virtualExeImageInstrument() { return this.mVirtualExeImageInstrument; } } ================================================ FILE: Archcraft/ender-system-hydra/src/main/java/com/walnut/archcraft/ender/system/HydraEmpire.java ================================================ package com.walnut.archcraft.ender.system; import com.acorn.redqueen.system.ServiceCentralControl; import com.acorn.skynet.system.SkynetSubsystem; import com.pinecone.framework.system.regime.arch.Lord; import com.pinecone.hydra.proc.InstitutionalProcess; import com.pinecone.hydra.proc.ProcessManager; import com.pinecone.hydra.proc.ProcessManagerSystema; import com.pinecone.hydra.proc.image.ImageLoader; import com.pinecone.hydra.proc.image.kom.VirtualExeImageInstrument; import com.pinecone.hydra.system.centrum.Centrum; import com.pinecone.hydra.system.component.Slf4jTraceable; import com.pinecone.hydra.system.subsystem.KernelLordFederation; import com.pinecone.hydra.system.types.HydraKingdom; public interface HydraEmpire extends Centrum, HydraKingdom, Slf4jTraceable, InstitutionalProcess, ProcessManagerSystema { ProcessManager processManager(); ImageLoader imageLoader(); VirtualExeImageInstrument virtualExeImageInstrument(); ServiceCentralControl redQueen(); SkynetSubsystem skynet(); Lord getEmpireLordsByName( String lordName ); KernelLordFederation getLordFederation(); int countEmpireLords(); } ================================================ FILE: Archcraft/ender-system-hydra/src/main/java/com/walnut/archcraft/ender/system/Hydroxy.java ================================================ package com.walnut.archcraft.ender.system; import java.util.HashMap; import java.util.Map; import com.pinecone.framework.system.RuntimeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.config.StartupCommandParser; import com.pinecone.hydra.proc.ArchUProcess; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.proc.image.ExecutionImage; import com.pinecone.hydra.proc.image.GenericClassImage; import com.pinecone.hydra.proc.ns.GenericSegregationSpace; import com.pinecone.hydra.proc.ns.ProcSpace; import com.pinecone.hydra.system.component.LogStatuses; public class Hydroxy extends ArchUProcess { public Hydroxy( HydraEmpire hostedSystem, UProcess parent, ExecutionImage image, ProcSpace procSpace, Map startupArgs, Map environmentVars ) { super( hostedSystem, parent, hostedSystem.processManager(), image, procSpace, startupArgs, environmentVars ); this.revealNearestSystem().infoLifecycle( "HydraSystemProcess [UProcessProxy] [Name: `" + this.getName() + "`]", LogStatuses.StatusStandby ); this.revealNearestSystem().infoLifecycle( "HydraSystemProcess Initialization", LogStatuses.StatusDone ); } public Hydroxy( HydraEmpire hostedSystem ) { this( hostedSystem, null, new HydroxyImage( hostedSystem ), new GenericSegregationSpace(), hostedSystem.getStartupCommandMap(), hostedSystem.getEnvironmentVars() ); } @Override public RuntimeSystem parentSystem() { return super.parentSystem(); } @Override public HydraEmpire revealNearestSystem() { return (HydraEmpire) super.revealNearestSystem(); } } ================================================ FILE: Archcraft/ender-system-hydra/src/main/java/com/walnut/archcraft/ender/system/HydroxyImage.java ================================================ package com.walnut.archcraft.ender.system; import java.util.Map; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.proc.image.ArchEntryPointRunnable; import com.pinecone.hydra.proc.image.EntryPointRunnable; import com.pinecone.hydra.proc.image.GenericClassImage; import com.pinecone.hydra.proc.image.ImageLoadProcedureException; import com.pinecone.hydra.proc.image.ImageLoader; public class HydroxyImage extends GenericClassImage { public HydroxyImage( HydraEmpire system, String name, EntryPointRunnable entryPoint, Class processClassType, ImageLoader imageLoader ) throws ImageLoadProcedureException { super( name, entryPoint, processClassType, imageLoader ); } public HydroxyImage( HydraEmpire system ) throws ImageLoadProcedureException { this( system, "SystemProcess", new ArchEntryPointRunnable() { @Override public int main( Map args ) { system.start(); return 0; } }, Hydroxy.class, system.imageLoader() ); } } ================================================ FILE: Archcraft/pom.xml ================================================ sauron com.sauron 1.2.7 4.0.0 com.archcraft archcraft pom 2.5.1 redstone-architecture redstone-message-stones ender-system-hydra ================================================ FILE: Archcraft/redstone-architecture/pom.xml ================================================ archcraft com.archcraft 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.walnut.redstone.kernel redstone-architecture 2.1.0 org.springframework.boot spring-boot-starter org.springframework.boot spring-boot-starter-test test org.springframework.boot spring-boot-starter-web 2.6.13 compile com.pinecone pinecone 2.5.1 compile com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.pinecone.summer.springram springram 2.1.0 compile com.pinecone.tritium hydra-system-tritium 2.1.0 test com.pinecone.hydra.kom.driver.default hydra-kom-default-driver 2.1.0 compile org.apache.thrift libthrift 0.18.0 compile com.pinecone.hydra.sdk.thrift hydra-lib-thrift-sdk 1.2.1 compile com.walnut.sparta.api.uac sparta-api-uac 2.1.0 compile ================================================ FILE: Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/architect/Bedrock.java ================================================ package com.walnut.archcraft.redstone.architect; public interface Bedrock extends Stone { } ================================================ FILE: Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/architect/Redstone.java ================================================ package com.walnut.archcraft.redstone.architect; public interface Redstone extends Stone { } ================================================ FILE: Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/architect/Stone.java ================================================ package com.walnut.archcraft.redstone.architect; import com.pinecone.framework.system.prototype.Pinenut; public interface Stone extends Pinenut { } ================================================ FILE: Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/response/ArchResponseObjectManager.java ================================================ package com.walnut.archcraft.redstone.response; import java.util.function.Supplier; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.system.ArchSystemCascadeComponent; import com.pinecone.hydra.system.HyComponent; import com.pinecone.hydra.system.Hydrogen; public abstract class ArchResponseObjectManager extends ArchSystemCascadeComponent implements ResponseObjectManager { public ArchResponseObjectManager( Namespace name, Hydrogen system, HyComponent parent ) { super( name, system, system.getComponentManager(), parent ); } public ArchResponseObjectManager( Hydrogen system, HyComponent parent ) { this( null, system, parent ); } public ArchResponseObjectManager( Hydrogen system ) { this( system, null ); } @Override public T newResponse(Supplier cons) { T response = cons.get(); response.setRequestId(this.nextTraceId()); return response; } } ================================================ FILE: Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/response/GenericResultResponse.java ================================================ package com.walnut.archcraft.redstone.response; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.json.JSONEncoder; import org.springframework.http.HttpStatus; public class GenericResultResponse implements RedResponseEntity { private Boolean success; private Integer code = HttpStatus.OK.value(); private String message; private String requestId; private String errorCode; private T data; public static GenericResultResponse success() { GenericResultResponse result = new GenericResultResponse<>(); result.code = HttpStatus.OK.value(); result.success = true; return result; } public static GenericResultResponse successMsg( String msg ) { GenericResultResponse result = GenericResultResponse.success(); result.message = msg; return result; } public static GenericResultResponse success( T object ) { GenericResultResponse result = GenericResultResponse.success(); result.data = object; return result; } public static GenericResultResponse error( String msg ) { GenericResultResponse result = new GenericResultResponse<>(); result.success = false; result.message = msg; result.code = HttpStatus.INTERNAL_SERVER_ERROR.value(); return result; } @Override public Boolean getSuccess() { return this.success; } @Override public void setSuccess( Boolean success ) { this.success = success; } @Override public Integer getCode() { return this.code; } @Override public void setCode( Integer code ) { this.code = code; } @Override public String getErrorCode() { return this.errorCode; } @Override public void setErrorCode( String errorCode ) { this.errorCode = errorCode; } @Override public void setRequestId( String requestId ) { this.requestId = requestId; } @Override public String getRequestId() { return this.requestId; } @Override public String getMessage() { return this.message; } @Override public void setMessage( String msg ) { this.message = msg; } @Override public T getData() { return this.data; } @Override public void setData( T data ) { this.data = data; } @Override public String toJSONString() { return JSONEncoder.stringifyMapFormat(new KeyValue[]{ new KeyValue<>( "success", this.getSuccess() ), new KeyValue<>( "code", this.code ), new KeyValue<>( "message", this.message ), new KeyValue<>( "errorCode", this.errorCode ), new KeyValue<>( "requestId", this.requestId ), new KeyValue<>( "data", this.data ) }); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/response/RedResponse.java ================================================ package com.walnut.archcraft.redstone.response; import java.io.Serializable; import com.pinecone.framework.system.prototype.Pinenut; public interface RedResponse extends Pinenut, Serializable { Boolean getSuccess(); void setSuccess( Boolean success ); Integer getCode(); void setCode( Integer code ); String getErrorCode(); void setErrorCode( String errorCode ); String getMessage(); void setMessage( String msg ); } ================================================ FILE: Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/response/RedResponseEntity.java ================================================ package com.walnut.archcraft.redstone.response; public interface RedResponseEntity extends RedTraceableResponse { T getData(); void setData( T data ); } ================================================ FILE: Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/response/RedTraceableResponse.java ================================================ package com.walnut.archcraft.redstone.response; public interface RedTraceableResponse extends RedResponse { void setRequestId( String requestId ); String getRequestId(); } ================================================ FILE: Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/response/ResponseObjectManager.java ================================================ package com.walnut.archcraft.redstone.response; import java.util.function.Supplier; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.system.HyComponent; public interface ResponseObjectManager extends Pinenut, HyComponent { String nextTraceId(); T newResponse(Supplier cons); } ================================================ FILE: Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/system/Dummy.java ================================================ package com.walnut.archcraft.redstone.system; public class Dummy { } ================================================ FILE: Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/util/Dummy.java ================================================ package com.walnut.archcraft.redstone.util; public class Dummy { } ================================================ FILE: Archcraft/redstone-message-stones/pom.xml ================================================ archcraft com.archcraft 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.walnut.redstone.stones redstone-message-stones 2.1.0 com.pinecone pinecone 2.5.1 compile com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.walnut.redstone.kernel redstone-architecture 2.1.0 compile com.pinecone.hydra.kernel hydra-message-broadcast 2.1.0 compile com.pinecone.hydra.kernel hydra-message-control 2.1.0 compile ================================================ FILE: Archcraft/redstone-message-stones/src/main/java/com/walnut/archcraft/redstone/Dummy.java ================================================ package com.walnut.archcraft.redstone; public class Dummy { } ================================================ FILE: Archcraft/redstone-message-stones/src/main/java/com/walnut/archcraft/redstone/messge/PrimaryMessageWareStone.java ================================================ package com.walnut.archcraft.redstone.messge; import com.pinecone.framework.system.RuntimeSystem; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.uma.DuplexAppointClient; import com.pinecone.hydra.uma.DuplexAppointServer; import com.pinecone.hydra.umb.wolf.UlfBroadcastControlNode; public interface PrimaryMessageWareStone extends Pinenut { RuntimeSystem parentSystem(); Processum getParentProcess(); DuplexAppointServer getWolfKingAppointServer(); DuplexAppointClient getWolfAppointClient(); UlfBroadcastControlNode getPrimaryKafkaClient(); UlfBroadcastControlNode getPrimaryRocketClient(); } ================================================ FILE: CHANGELOG.md ================================================ # 更新日志 ```markdown 格式: ## [版本号] - 日期 ### 模板名称 (可选 console-ui, console, FileModule, common, ...) - 🎈新增: {模块名称} {功能介绍} - 🐞Bug: #{issue号} {bug描述} - ⛏修复: #{issue号} {修复描述} - 📝文档: {文件名} 添加注释 - 🚀性能: {类} {方法} {描述} - 🎨样式: - 🧹重构: - 🧪测试: {类|方法} {测试结果} - 🛑更名: {旧名} ➡ {新名} - ❌移除: {模块|方法} - 🚧施工 ------ ``` ------ # 目录 * [V 1.0.1]() * [V 1.0.0]() ------ ## [V 1.0.0] - 2023.5.18 ### Messenger - ❌移除: `Message,MessageType`,移除所有的模块中与`Message`有关的代码 - 🧹重构: 重构`Message`为`UlfUMCMessage`,构建了全新协议UlfUMC - 🎈新增: 新增`MessageBuilder,MessageFactory`用来构建专属的`UlfUMCMessage`类 ------ ## [V 1.0.0] - 2023.5.13 ### 🎈 TaskJuggler 任务调度模块,主要负责节点的任务分配和处理 - 🎈新增: 新增`Heist 劫匪` 作为`HeistCenter`的工作线程,负责处理单个任务,有失败重试机制 - 🎈新增: 新增`HeistCenter` 任务调度中心,负责初始化任务进度,与`Harbor 港口`通信,是整个任务调度的**核心类** - 🎈新增: 新增`HeistConfig` 作为整个`Heist`家族的配置类 - 🎈新增: 新增`Harbor 港口` 与 Master节点通信的核心类,目前具备向master节点发送任务查询申请,任务缓存,`HeistCenter`任务获取 - 🎈新增: 新增`MqConfig` 消息队列初始化 ### 🎈 com.pinecone 整个项目的核心公共代码所在地 - 🎈新增: 新增`SystemConfig` 整个系统的配置类,可获取一些系统的全局变量 - 🎈新增: 新增`RadiumConstPool` 项目的公共常量池,存放一些常量 - 🎈新增: 新增`SystemUtils` 系统工具类,定制一些独属于该系统的工具 - 🎈🚧新增: 新增`TimeUtil` 时间工具 (不推荐使用) ### 🎈 Messenger 负责定义消息类型,消息结构以及消息队列的一些全局变量管理,专门用来定义消息的模块 - 🎈新增: 新增`Message` 消息类,目前消息队列通信的**核心类** - 🎈新增: 新增`MessageType` 消息类型类,目前有`Query,Post,Reply,ReplyPost,ShutDown` - 🎈新增: 新增`MessageConverterConfig` 主要负责mq中类传输的转化 - 🎈新增: 新增`FunctionNamePool` 主要存放Master中对应的方法名称 - 🎈新增: 新增`MqPool` 消息队列全局变量池 ### 🎈 Console 项目启动模块 ### 🎈 File 文件操作模块 - 🎈新增: 新增 `JsonFileUtil` 工具类,用于进行json文件的读写操作 - 🎈新增: 新增 `FileUtil` 工具类,用于进行文件复制文件删除等操作 - 🎈新增: 新增 `FileCondition` 方法,用于对文件递归删除进行条件过滤 - 🧪测试: 测试 `FileUtil` 工具类, 测试 `JsonFileUtil` 工具类 - 🎈新增: 新增 `FileCacheManagerInstance` 将整个FileCacheManager转变为全局单例,防止重复使用调用 - 🎈新增: 新增 `GlobalFileCache` 全局文件缓存,也负责为`FileCacheManagerInstance`提供初始化的文件缓存队列 - 🎈新增: 新增 `FileCache` 文件缓冲池类,负责缓存文件内容,文件的读取,修改,追加,能够根据刷入时间或者写入字节,来进行自动刷盘操作 - 🎈新增: 新增 `FileCacheManager` 文件缓冲池管理类,管理所有文件缓存池,轮询查看每个文件是否需要自动刷入,目前包含巡逻线程与刷入线程 - 🎈新增: 新增 `FileCacheManagerInit` 用于启动初始化FileCacheManager ------ ================================================ FILE: File/File.iml ================================================ ================================================ FILE: File/pom.xml ================================================ com.Sauron sauron 1.0-SNAPSHOT 4.0.0 File jar File http://maven.apache.org UTF-8 com.Sauron pinecone 3.3.1 com.aliyun.oss aliyun-sdk-oss 3.10.2 org.springframework.boot spring-boot-starter-web com.alibaba fastjson 1.2.75 org.springframework.boot spring-boot-starter-test test org.projectlombok lombok org.slf4j slf4j-api ================================================ FILE: File/src/main/java/com/genius/App.java ================================================ package com.genius; /** * Hello world! * */ public class App { public static void main( String[] args ) { System.out.println( "Hello World!" ); } } ================================================ FILE: File/src/main/java/com/genius/cache/FileCache.java ================================================ package com.genius.cache; import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSONArray; import com.alibaba.fastjson.JSONObject; import com.genius.exception.FileCacheException; import com.genius.pojo.ConfigFile; import com.genius.pojo.FileType; import com.genius.util.JsonFileUtil; import com.genius.util.TimeUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.nio.file.Paths; import java.util.Arrays; import java.util.Map; import java.util.Objects; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; /** * @author Genius * @date 2023/04/24 00:01 **/ //TODO 文件缓存写入优化 考虑是否要加一个缓存写入Buffer,将短时间内多个相同Key的内容存入Buffer中,合并存入 public class FileCache { private T configFile; //文件配置类,得到文件信息和文件夹结构,更新文件存入时间 private Logger logger; private ConcurrentHashMap jsonFile; //文件内容缓存 private static int MAX_WRITE_BUFFER_LIMIT = 4096; //最大写入缓存上线 private AtomicInteger writeByte; //当前写入的字节数 private BlockingQueue> syncChannel; //磁盘刷入阻塞队列 //TODO 优化 考虑是否采用一个定时线程管理所有FileCache的写入 private ExecutorService pool; //Sync线程池 private long autoSyncTime; //自动刷入时间 public FileCache(T configFile) throws FileCacheException { init(configFile,10); } /** * 构造方法 * @param configFile 指定的配置文件 * @param autoSyncTime 自动刷新时间 * @param maxWriteBufferLimit 写入上限 * @throws FileCacheException */ public FileCache(T configFile,long autoSyncTime,int maxWriteBufferLimit)throws FileCacheException { MAX_WRITE_BUFFER_LIMIT = maxWriteBufferLimit; init(configFile,autoSyncTime); } /** * 初始化方法 * @param configFile 配置文件 * @param autoSyncTime 自动刷入时间 * @throws FileCacheException */ private void init(T configFile, long autoSyncTime) throws FileCacheException { this.configFile = configFile; this.configFile.updateConfigTime();//更新一下当前的时间 this.logger = LoggerFactory.getLogger("FileCache:"+this.configFile.getFileName()); this.autoSyncTime = autoSyncTime; if(!load(getFullFilePath())){ throw new FileCacheException("FileCache Init Error,please Check if your path is correct"); } this.writeByte = new AtomicInteger(0); this.syncChannel = new ArrayBlockingQueue<>(20); this.pool = Executors.newSingleThreadExecutor(); pool.submit(new SyncMan()); } /** * 加载文件内容 * @return boolean */ private boolean load(String path){ Map stringObjectMap = JsonFileUtil.readJsonFile(path); if(Objects.isNull(stringObjectMap)){ logger.error("{}配置文件不存在!",path); return false; } this.jsonFile = new ConcurrentHashMap<>(stringObjectMap); return true; } /** * 重新加载内存池Map * @return boolean */ public synchronized boolean reload(){ return load(Paths.get(this.configFile.getFilePath(),this.configFile.getFileName()).toString()); } /** * 递进寻找JsonObject中的对象,并改写 * @param data * @param keys * @return * @throws InterruptedException */ public int writeKeys(Object data,String...keys) throws InterruptedException, FileCacheException { return writeKeys(false,data,keys); } private int writeKeys(boolean isAppend,Object data,String...keys) throws FileCacheException, InterruptedException { if(Objects.isNull(data)){return 0;} String jsonDataStr = JSON.toJSONString(data); int writeBytes = jsonDataStr.getBytes().length; if(writeBytes==0){return 0;} Object jsonData = writeInData(isAppend,data,keys); if(Objects.isNull(jsonData)){return 0;} ConcurrentHashMap temp = new ConcurrentHashMap<>(JSONObject.parseObject(JSON.toJSONString(jsonFile),Map.class)); int newBytes = writeByte.updateAndGet(x -> x + writeBytes >= MAX_WRITE_BUFFER_LIMIT ? 0 : x + writeBytes); //TODO 此处会发生脏读问题,即put进入的Map版本不是当前版本,但是目前没有发现该问题是否会影响到文件写入 if(newBytes==0){ logger.debug("缓冲区已满,刷入磁盘"); syncChannel.put(temp); } return writeBytes; } //写入 public int write(Object data,String key) throws InterruptedException, FileCacheException { return this.writeKeys(data,key); } private Object writeInData(boolean isAppend,Object value,String...keys) throws FileCacheException { String[] finds = Arrays.copyOf(keys, keys.length - 1); Object data = this.get("data"); Object temp = this.get(finds); if(temp instanceof JSONArray){ try{ //元素添加 int index = Integer.parseInt(keys[keys.length-1]); if(index==-1){ ((JSONArray) temp).add(value); }else{ String oldValue = ((JSONArray) temp).get(index).toString(); value = isAppend?oldValue+value.toString():value; if(oldValue.equals(value)){ return null; } ((JSONArray) temp).add(index,value); } }catch (Exception e){ return null; } } else if(temp instanceof JSONObject){ String key = keys[keys.length-1]; String oldValue = ((JSONObject) temp).get(key).toString(); value = isAppend?oldValue+value:value; if(oldValue.equals(value)){ return null; } ((JSONObject) temp).put(key,value); } else{ throw new FileCacheException("the keys is error!"); } return data; } /** * 追加内容,支持数组添加内容,添加内容,需要将最后一个key置为-1 * @param keys 要查找的key * @param append 追加内容 * @return * @throws InterruptedException */ public int append(Object append,String...keys) throws InterruptedException, FileCacheException { return writeKeys(true,append,keys); } /** * 根据key数组,不断向下获取内容 * @param keys * @return */ public Object get(String...keys){ Object jsonObject = this.get("data"); for (String key : keys) { if(jsonObject instanceof JSONObject){ jsonObject = ((JSONObject) jsonObject).get(key); } else if(jsonObject instanceof JSONArray){ jsonObject = ((JSONArray) jsonObject).get(Integer.parseInt(key)); }else{ return jsonObject; } } return jsonObject; } /** * 获取文件内容 * @param key * @return */ public Object get(String key){ return jsonFile.get(key); } /** * 清除已写入的字节数记录 */ protected void clearWriteBytes(){ writeByte.updateAndGet(x->0); } /** * 判断当前时间是否超过更新时间 * @return boolean */ public boolean needAutoSync(){ long now = TimeUtil.getCurrentSecond(); return now - TimeUtil.getSecond(configFile.getUpdateTime())>autoSyncTime; } /** * 强制刷入磁盘 */ public void forceSync(){ if(writeByte.get()==0){ logger.info("未发生版本变化"); return; } clearWriteBytes(); ConcurrentHashMap temp = new ConcurrentHashMap<>(JSONObject.parseObject(JSON.toJSONString(jsonFile),Map.class)); try { syncChannel.put(temp); } catch (InterruptedException e) { logger.error("自动刷入失败"); } } /** * 缓冲区刷入 * @return */ private boolean sync(ConcurrentHashMap take){ configFile.updateConfigTime(); //刷新配置文件刷入时间 String dir = getFullFilePath(); configFile.onlyUpdateTime(take); File file = JsonFileUtil.writeJsonFile(dir, take); logger.debug("正在写入{}新版本",dir); return Objects.isNull(file); } public BlockingQueue getFileChannel(){ return this.syncChannel; } public String getFullFilePath(){ return Paths.get(this.configFile.getFilePath(), this.configFile.getFileName()).toString(); } public String getFilePath(){ return this.configFile.getFilePath(); } public String getFileName(){ return this.configFile.getFileName(); } public long getSyncTime(){ return this.autoSyncTime; } public FileType getFileType(){ return this.configFile.getFileType(); } class SyncMan implements Runnable{ @Override public void run() { for(;;){ try { ConcurrentHashMap take = syncChannel.take(); sync(take); } catch (InterruptedException e) { throw new RuntimeException(e); } } } } @Override public int hashCode() { return Objects.hash(configFile); } @Override public boolean equals(Object obj) { if(obj instanceof FileCache){ if(((FileCache) obj).getFullFilePath().equals(this.getFullFilePath())){ return true; }else if(obj.hashCode() == this.hashCode()){ return true; } } return false; } } ================================================ FILE: File/src/main/java/com/genius/cache/FileCacheManager.java ================================================ package com.genius.cache; import org.example.util.TimeUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicLong; /** * @author Genius * @date 2023/04/24 17:35 **/ /** * 文件自动刷入管理类,不断监听文件是否需要自动写入 */ public class FileCacheManager { private Logger logger = LoggerFactory.getLogger(FileCacheManager.class); private final List fileCaches; private AtomicLong sleepTime; //睡眠时间 private ExecutorService watchPool; //巡逻线程 private ExecutorService autoSyncer; //生产者线程 private volatile Watcher watcher; protected FileCacheManager(List fileCaches){ this.fileCaches = new CopyOnWriteArrayList<>(fileCaches); initSleepTime(); this.watchPool = Executors.newSingleThreadExecutor(); this.autoSyncer = Executors.newFixedThreadPool(fileCaches.size()); } /** * 根据文件缓存的刷盘时间得到一个最小的睡眠时间,减少空转 */ private void initSleepTime(){ AtomicLong minSleepTime = new AtomicLong(Long.MAX_VALUE); fileCaches.forEach(item->{ minSleepTime.set(Long.min(minSleepTime.get(), item.getSyncTime())); }); this.sleepTime = minSleepTime; } public void start(){ if(!fileCaches.isEmpty()){ if(watcher==null){ synchronized (FileCacheManager.class){ if(watcher==null){ watcher = new Watcher(); this.watchPool.submit(watcher); } } } } } public boolean addFileCache(FileCache fileCache){ if (this.fileCaches.indexOf(fileCache)==-1) { fileCaches.add(fileCache); initSleepTime(); } return false; } public List getRunnableFileCaches(){ return this.fileCaches; } class Watcher implements Runnable{ @Override public void run() { for(;;){ long now = TimeUtil.getCurrentSecond(); for(FileCache cache:fileCaches){ BlockingQueue fileChannel = cache.getFileChannel(); if(fileChannel.isEmpty()){ if(cache.needAutoSync()){ logger.info("检测到需要强制刷新的文件 {}",cache.getFileName()); autoSyncer.submit(new AutoSyncer(cache)); } } } now -= TimeUtil.getCurrentSecond(); if(now fileCaches = GlobalFileCache.fileCaches; private static volatile FileCacheManager Instance; public static FileCacheManager getInstance(){ if(Instance==null){ synchronized (FileCacheManagerInstance.class){ if(Instance==null){ Instance = new FileCacheManager(fileCaches); } } } return Instance; } } ================================================ FILE: File/src/main/java/com/genius/constpool/GlobalFileCache.java ================================================ package com.genius.constpool; import com.genius.cache.FileCache; import java.util.List; /** * @author Genius * @date 2023/04/25 23:03 **/ /** * 全局文件缓存池,用于存放全局文件缓存,便于跨模块调用 */ public class GlobalFileCache { public static List fileCaches = List.of(); } ================================================ FILE: File/src/main/java/com/genius/exception/FileCacheException.java ================================================ package com.genius.exception; /** * @author Genius * @date 2023/04/24 00:57 **/ public class FileCacheException extends Exception{ String message; public FileCacheException(String ErrorMessage){ this.message = ErrorMessage; } @Override public String getMessage() { return message; } } ================================================ FILE: File/src/main/java/com/genius/method/FileCondition.java ================================================ package com.genius.method; import java.nio.file.Path; @FunctionalInterface public interface FileCondition { boolean condition(Path path); } ================================================ FILE: File/src/main/java/com/genius/pojo/CommonConfigFile.java ================================================ package com.genius.pojo; import org.example.common.ConfigFile; /** * @author Genius * @date 2023/04/26 00:12 **/ public class CommonConfigFile extends ConfigFile { public CommonConfigFile(String filePath, String fileName, Object data) { super(filePath, fileName, data); } } ================================================ FILE: File/src/main/java/com/genius/pojo/ConfigFile.java ================================================ package com.genius.pojo; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.Map; /** * @author Genius * @date 2023/04/21 02:24 **/ //配置文件的抽象类,只负责构建配置文件最基础的架构,一般不用来存放配置文件本身的内容 public abstract class ConfigFile { private FileType fileType; private String filePath; private String fileName; private T data; //json文件的结构不是文件的数据 例如 {username:"",password:""} //上一次更新时间 private LocalDateTime updateTime; /** * 用于最开始创建配置文件结构的打包 * @return Map */ public Map packageConfig() { return this.packageConfig(this.data); } /** * 用于给外部函数提供的内容打包 * @return */ public Map packageConfig(T data){ updateConfigTime(); return Map.of( "data",data, "updateTime", updateTime.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")) ); } /** * 只进行时间更新操作 * @param map * @return */ public Map onlyUpdateTime(Map map){ updateConfigTime(); if (map.containsKey("updateTime")) { map.put("updateTime",updateTime.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))); } return map; } /** * 更新配置文件类本身的时间 */ public void updateConfigTime(){ updateTime = LocalDateTime.now(); } public ConfigFile() { } public ConfigFile(String filePath, String fileName, T data) { this.filePath = filePath; this.fileName = fileName; this.data = data; this.updateTime = LocalDateTime.now(); this.fileType = FileType.COMMON; } public ConfigFile(String filePath,String fileName,T data,FileType fileType){ this.filePath = filePath; this.fileName = fileName; this.data = data; this.updateTime = LocalDateTime.now(); this.fileType = fileType; } public String getFilePath() { return this.filePath; } public String getFileName() { return this.fileName; } public LocalDateTime getUpdateTime() {return this.updateTime;} public FileType getFileType(){ return this.fileType; } //不推荐直接使用 public T getData() { return this.data; } public void setData(T data){this.data = data;} public void setFileName(String fileName) { this.fileName = fileName; } public void setFilePath(String filePath){ this.filePath = filePath; } } ================================================ FILE: File/src/main/java/com/genius/pojo/FileType.java ================================================ package com.genius.pojo; /** * @author Genius * @date 2023/04/26 01:48 **/ public enum FileType { LOGGER("日志"), COMMON("普通文件"); private final String name; FileType(String name){ this.name = name; } public String getName(){ return this.name; } } ================================================ FILE: File/src/main/java/com/genius/pojo/oss/AliyunOSS.java ================================================ package com.genius.pojo.oss; import com.aliyun.oss.OSS; import com.aliyun.oss.OSSClientBuilder; import com.aliyun.oss.OSSException; import com.aliyun.oss.model.*; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.annotation.Configuration; import org.springframework.stereotype.Component; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; /** * @author Genius * @date 2023/05/01 16:38 **/ @Data @AllArgsConstructor @NoArgsConstructor @Component @Configuration public class AliyunOSS implements OssAble { private Logger logger = LoggerFactory.getLogger(AliyunOSS.class); private String ENDPOINT; private String ACCESS_KEY_ID; private String ACCESS_KEY_SECRET; private long PART_SIZE = 1024*1024; public OSS getOssInstance(){ return new OSSClientBuilder().build(ENDPOINT,ACCESS_KEY_ID,ACCESS_KEY_SECRET); } /** * 单文件上传 * @param filePath * @param bucketName * @param objectName * @return boolean */ @Override public String simpleUpload(String filePath, String bucketName, String objectName) { File file = new File(filePath); try(InputStream inputStream = new FileInputStream(file)) { return this.simpleUpload(inputStream,bucketName,objectName); } catch (IOException e){ return null; } } @Override public String simpleUpload(InputStream inputStream, String bucketName, String objectName) { OSS ossClient = this.getOssInstance(); try{ PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName,objectName,inputStream); ossClient.putObject(putObjectRequest); }catch (OSSException e){ loggerError(e); return null; }finally { shutDown(ossClient); } return Paths.get(bucketName,objectName).toString(); } @Override public String multipartUpload(String filePath, String bucketName, String objectName) { OSS ossClient = this.getOssInstance(); try { InitiateMultipartUploadRequest request = new InitiateMultipartUploadRequest(bucketName,objectName); InitiateMultipartUploadResult upresult = ossClient.initiateMultipartUpload(request); String uploadId = upresult.getUploadId(); File sampleFile = new File(filePath); long fileLength = sampleFile.length(); int partCount = (int)(fileLength/PART_SIZE); if(fileLength%PART_SIZE!=0){ partCount++; } List partETags = new ArrayList(); for(int i=0;i fileNames, String bucketName) { OSS ossClient = this.getOssInstance(); try{ DeleteObjectsRequest deleteObjectsRequest = new DeleteObjectsRequest(bucketName); deleteObjectsRequest.setKeys(fileNames); ossClient.deleteObjects(deleteObjectsRequest); }catch (OSSException e){ loggerError(e); return false; }finally { shutDown(ossClient); } return true; } private void loggerError(OSSException e){ logger.error("Error Message:{}",e.getErrorMessage()); logger.error("Error Code:{}" + e.getErrorCode()); logger.error("Request ID:{}" + e.getRequestId()); logger.error("Host ID:{}" + e.getHostId()); } private void shutDown(OSS ossClient){ if(ossClient != null){ ossClient.shutdown(); } } } ================================================ FILE: File/src/main/java/com/genius/pojo/oss/OssAble.java ================================================ package com.genius.pojo.oss; import java.io.File; import java.io.InputStream; import java.util.List; public interface OssAble { String simpleUpload(String filePath,String bucketName,String objectName); String simpleUpload(InputStream inputStream,String bucketName,String objectName); String multipartUpload(String filePath,String bucketName,String objectName); boolean endPointUpload(String filePath,String bucketName,String objectName); File downloadFile(String downloadPath,String bucketName,String objectName); boolean deleteFile(String fileName,String bucketName); boolean deleteFiles(List fileNames,String bucketName); } ================================================ FILE: File/src/main/java/com/genius/util/FileUtil.java ================================================ package com.genius.util; /** * @author Genius * @date 2023/04/20 11:03 **/ import com.genius.method.FileCondition; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.nio.channels.FileChannel; import java.nio.file.*; import java.nio.file.attribute.BasicFileAttributes; /** * 文件工具类 */ public class FileUtil { private static Logger logger = LoggerFactory.getLogger(FileUtil.class); /** * 判断文件是否存在 * * @param dir 文件路径需要包含文件名 * @return Boolean */ public static Boolean isFileExist(String dir) { return new File(dir).exists(); } /** * 复制文件 比 Files更快 * * @param srcPath 源文件路径 * @param destPath 目标文件路径 * @return file */ public static File copyFile(String srcPath, String destPath) { try ( FileChannel src = new FileInputStream(srcPath).getChannel(); FileChannel dest = new FileInputStream(destPath).getChannel() ) { dest.transferFrom(src, 0, src.size()); } catch (IOException e) { logger.error("复制文件失败", e); return null; } return new File(destPath); } /** * 删除文件 * * @param path 文件路径 * @param filename 文件名 * @return boolean */ public static boolean deleteFile(String path, String filename) { return deleteFile(Paths.get(path, filename).toString()); } /** * 删除文件 * * @param path 文件路径 * @return boolean * @throws IOException IOException */ public static boolean deleteFile(String path) { try { Files.delete(Paths.get(path)); } catch (IOException e) { logger.error("删除文件失败", e); return false; } return true; } /** * 文件目录递归删除 * * @param path 文件路径 * @return boolean * @throws IOException IOException */ public static boolean deleteDirectory(String path) throws IOException { FileCondition condition = file -> !file.toString().startsWith("C:") || !file.toString().startsWith("root"); return deleteDirectory0(path, condition, condition, condition); } /** * 文件目录递归删除 * * @param path 文件路径 * @param visit 访问文件时触发该方法 * @param preVisit 访问子目录前触发该方法 * @param postVisit 访问目录之后触发该方法 * @return boolean * @throws IOException IOException */ public static boolean deleteDirectory(String path, FileCondition visit, FileCondition preVisit, FileCondition postVisit) throws IOException { return deleteDirectory0(path, visit, preVisit, postVisit); } /** * 递归删除 * * @param path 文件路径 * @return file */ private static boolean deleteDirectory0(String path, FileCondition visit, FileCondition preVisit, FileCondition postVisit) throws IOException { Files.walkFileTree(Paths.get(path), new SimpleFileVisitor() { // 在访问文件时触发该方法 @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (!visit.condition(file)) { logger.info("文件被跳过: {}", file); return FileVisitResult.SKIP_SUBTREE; } Files.delete(file); logger.info("文件被删除: {}", file); return FileVisitResult.CONTINUE; } // 在访问子目录前触发该方法 @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { if (!preVisit.condition(dir)) { logger.info("目录被跳过: {}", dir); return FileVisitResult.SKIP_SUBTREE; } logger.info("目录被访问: {}", dir); return FileVisitResult.CONTINUE; } // 在访问目录之后触发该方法 @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { if (!postVisit.condition(dir)) { logger.info("目录被跳过: {}", dir); return FileVisitResult.SKIP_SUBTREE; } Files.delete(dir); logger.info("目录被删除: {}", dir); return FileVisitResult.CONTINUE; } // 在访问失败时触发该方法 @Override public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException { // 写一些具体的业务逻辑 return super.visitFileFailed(file, exc); } } ); return true; } } ================================================ FILE: File/src/main/java/com/genius/util/JsonFileUtil.java ================================================ package com.genius.util; import com.alibaba.fastjson.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.Map; import java.util.Objects; /** * @author Genius * @date 2023/04/20 10:53 **/ /** * 操作json文件 */ public class JsonFileUtil { private static final Logger logger = LoggerFactory.getLogger(JsonFileUtil.class); /* ------------------read json file------------------ */ /** * 读取json文件 * @param filePath 文件路径,应该为全路径 * @param fileName 文件名 * @return Map 返回Map类型的数据 */ public static Map readJsonFile(String filePath, String fileName) { return readJsonFile(Paths.get(filePath, fileName).toString()); } /** * 读取json文件 * @param fullPath 文件路径,包含文件名 * @return Map 返回Map类型的数据 */ public static Map readJsonFile(String fullPath){ Map maps = null; JSONObject jsonObject = readJsonFileToJSONObject(fullPath); if (Objects.nonNull(jsonObject)) { maps = jsonObject.getInnerMap(); } return maps; } /** * 读取json文件转为JSONObject * @param fullPath 文件路径,包含文件名 * @return JSONObject */ public static JSONObject readJsonFileToJSONObject(String fullPath){ return readJsonFileToObject(fullPath, JSONObject.class); } /** 读取json文件转为对应的类 * @param fullPath 文件路径,包含文件名 * @param clazz 类 * @return T */ public static T readJsonFileToObject(String fullPath, Class clazz){ T t = null; Path dir = Paths.get(fullPath); try{ if (FileUtil.isFileExist(dir.toString())) { String res = Files.readString(dir, StandardCharsets.UTF_8); logger.debug("读取json文件成功, 文件内容为: {}", res); t = JSON.parseObject(res, clazz); } }catch (Exception e){ logger.error("读取json文件失败", e); } return t; } /* ------------------write json file------------------ */ /** * 写入json文件 * @param filePath 文件路径,应该为全路径 * @param fileName 文件名 * @param data 写入Json数据 * @return file */ public static File writeJsonFile(String filePath, String fileName, Map data) { return writeJsonFile(Paths.get(filePath, fileName).toString(), data); } /** * 读取json文件 * @param fullPath 文件路径,应该为全路径 * @param data 写入Json数据 * @return file */ public static File writeJsonFile(String fullPath, Map data) { return writeJsonFile0(fullPath, JSON.toJSONString(data,true),true); } /** * 写入json文件,如果文件不存在则报错 * @param filePath 文件路径,应该为全路径 * @param fileName 文件名 * @param data 写入Json数据 * @return file */ public static File writeJsonFileIsExist(String filePath, String fileName, T data) { return writeJsonFileIsExist(Paths.get(filePath, fileName).toString(), data); } /** * 写入json文件,如果文件不存在则报错 * @param fullPath 文件路径,应该为全路径 * @param data 写入Json数据 * @return file */ public static File writeJsonFileIsExist(String fullPath, T data) { return writeJsonFile0(fullPath, JSON.toJSONString(data,true),false); } /** * 将Obj写入json文件 * @param filePath 文件路径,应该为全路径 * @param fileName 文件名 * @param obj 写入Json数据 * @return file */ public static File writeJsonFile(String filePath, String fileName, T obj) { return writeJsonFile(Paths.get(filePath, fileName).toString(), obj); } /** * 将Obj写入json文件 * @param fullPath 文件路径,应该为全路径 * @param obj 写入Json数据 * @return file */ public static File writeJsonFile(String fullPath, T obj) { return writeJsonFile0(fullPath, JSON.toJSONString(obj,true),true); } /** * 写入json文件,如果文件不存在则自动创建 * @param fullPath 文件路径,应该为全路径 * @param json 写入Json数据 * @param autoCreate 是否自动创建文件 * @return file */ public static File writeJsonFile0(String fullPath, String json,boolean autoCreate) { Path dir = Paths.get(fullPath); try{ if(!FileUtil.isFileExist(dir.toString())&&autoCreate){ Files.createFile(dir); logger.debug("新建文件{}",dir); } if (FileUtil.isFileExist(dir.toString())) { Files.writeString(dir, json, StandardCharsets.UTF_8); logger.debug("写入json文件成功, 文件内容为: {}", json); }else { logger.error("写入json文件失败, 文件不存在"); return null; } }catch (Exception e){ logger.error("写入json文件失败", e); return null; } return dir.toFile(); } /** * 写入大j对象到Json文件中 * @param fullPath 文件路径,应该为全路径 * @param data 写入Json数据 * @return file */ public static File writeBigJsonFile(String fullPath, Map data) { Path dir = Paths.get(fullPath); try{ if(!FileUtil.isFileExist(dir.toString())){ Files.createFile(dir); logger.info("新建文件{}",dir); } if (FileUtil.isFileExist(dir.toString())) { JSONWriter writer = new JSONWriter(Files.newBufferedWriter(dir, StandardCharsets.UTF_8)); writer.startObject(); for (Map.Entry stringObjectEntry : data.entrySet()) { String key = stringObjectEntry.getKey(); Object value = stringObjectEntry.getValue(); writer.writeKey(key); writer.writeValue(value); logger.info("写入json类成功, 类内容为: {}:{}", key, value); } writer.endObject(); writer.close(); }else { logger.error("写入json文件失败, 文件不存在"); return null; } }catch (Exception e){ logger.error("写入json文件失败", e); return null; } return dir.toFile(); } /** * 写入大j对象到Json文件中 * @param fullPath 文件路径,应该为全路径 * @param Objs 写入大对象的数组 * @return file */ public static File writeBigJsonFile(String fullPath, List Objs) { Path dir = Paths.get(fullPath); try{ if(!FileUtil.isFileExist(dir.toString())){ Files.createFile(dir); logger.info("新建文件{}",dir); } if (FileUtil.isFileExist(dir.toString())) { JSONWriter writer = new JSONWriter(Files.newBufferedWriter(dir, StandardCharsets.UTF_8)); writer.startArray(); for (T obj : Objs) { writer.writeValue(obj); logger.info("写入json类成功, 类内容为: {}", obj); } writer.endArray(); writer.close(); }else { logger.error("写入json文件失败, 文件不存在"); return null; } }catch (Exception e){ logger.error("写入json文件失败", e); return null; } return dir.toFile(); } } ================================================ FILE: File/src/main/java/com/genius/util/OSSUtil.java ================================================ package com.genius.util; /** * @author Genius * @date 2023/05/01 16:34 **/ public class OSSUtil { } ================================================ FILE: File/src/test/java/com/genius/AppTest.java ================================================ package com.genius; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; /** * unit test for simple App. */ public class AppTest extends TestCase { /** * Create the test case * * @param testName name of the test case */ public AppTest( String testName ) { super( testName ); } /** * @return the suite of tests being tested */ public static Test suite() { return new TestSuite( AppTest.class ); } /** * Rigourous Test :-) */ public void testApp() { assertTrue( true ); } } ================================================ FILE: Hydra/hydra-architecture/pom.xml ================================================ hydra com.pinecone.hydra 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.pinecone.hydra.kernel hydra-architecture 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile com.pinecone.ulf ulfhedinn 1.2.1 compile com.pinecone.slime slime 2.1.0 compile ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/Hydra.java ================================================ package com.pinecone.hydra; import com.pinecone.framework.system.Framework; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.system.architecture.SystemComponentManager; import com.pinecone.hydra.system.HySkeleton; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.system.MultiComponentSystem; import com.pinecone.hydra.system.SystemSkeleton; import java.nio.file.Path; public abstract class Hydra extends Framework implements Hydrogen { private HySkeleton mComponentManager ; protected boolean mDebugMode ; protected Path mWorkingPath ; protected String mServiceID ; public Hydra(){ this( new String[0], null, null ); } public Hydra( String[] args ){ this( args, null, null ); } public Hydra( String[] args, String szName ){ this( args, szName, null ); } public Hydra( String[] args, CascadeSystem parent ){ this( args, null, parent ); } public Hydra( String[] args, String szName, CascadeSystem parent, HySkeleton manager ){ super( args, szName, parent ); if( manager == null ) { manager = new SystemSkeleton( this ); } this.mComponentManager = manager; } public Hydra( String[] args, String szName, CascadeSystem parent ){ this( args, szName, parent, null ); } protected void prepare_system_skeleton() { } @Override public HySkeleton getComponentManager() { return this.mComponentManager; } @Override public MultiComponentSystem apply( SystemComponentManager manager ) { this.mComponentManager = (HySkeleton)manager; return this; } @Override public boolean isDebugMode() { return this.mDebugMode; } @Override public Path getWorkingPath() { return this.mWorkingPath; } @Override public String getServiceID() { return this.mServiceID; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/Hydradom.java ================================================ package com.pinecone.hydra; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.hydra.system.HySkeleton; import com.pinecone.hydra.system.subsystem.CentralMicroSystemCabinet; import com.pinecone.hydra.system.subsystem.KernelMicroSystemCabinet; import com.pinecone.hydra.system.types.HydraKingdom; public abstract class Hydradom extends Hydra implements HydraKingdom { protected KernelMicroSystemCabinet mKernelMicroSystemCabinet; public Hydradom(){ this( new String[0], null, null ); } public Hydradom( String[] args ){ this( args, null, null ); } public Hydradom( String[] args, String szName ){ this( args, szName, null ); } public Hydradom( String[] args, CascadeSystem parent ){ this( args, null, parent ); } public Hydradom( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } public Hydradom( String[] args, String szName, CascadeSystem parent, HySkeleton manager ){ super( args, szName, parent, manager ); } @Override protected void prepare_system_skeleton() { this.mKernelMicroSystemCabinet = new CentralMicroSystemCabinet( this ); this.getComponentManager().addComponent( this.mKernelMicroSystemCabinet ); } public KernelMicroSystemCabinet getKernelMicroSystemCabinet() { return this.mKernelMicroSystemCabinet; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/deploy/Container.java ================================================ package com.pinecone.hydra.deploy; public interface Container extends Integration { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/deploy/Deploy.java ================================================ package com.pinecone.hydra.deploy; import com.pinecone.hydra.device.Deployment; public interface Deploy extends Deployment { String getStatus(); void setStatus(String status); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/deploy/Integration.java ================================================ package com.pinecone.hydra.deploy; public interface Integration extends Deploy { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/deploy/Namespace.java ================================================ package com.pinecone.hydra.deploy; public interface Namespace { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/deploy/PhysicalHost.java ================================================ package com.pinecone.hydra.deploy; public interface PhysicalHost extends Server { String getHardwareSpecs(); void setHardwareSpecs(String hardwareSpecs); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/deploy/Quick.java ================================================ package com.pinecone.hydra.deploy; public interface Quick extends Integration { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/deploy/Server.java ================================================ package com.pinecone.hydra.deploy; public interface Server extends Deploy { String getName(); void setName(String name); String getIpAddress(); void setIpAddress(String ipAddress); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/deploy/VirtualMachine.java ================================================ package com.pinecone.hydra.deploy; public interface VirtualMachine extends Server { PhysicalHost getAffiliateHost(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/device/Deployment.java ================================================ package com.pinecone.hydra.device; public interface Deployment extends Device { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/device/Device.java ================================================ package com.pinecone.hydra.device; import com.pinecone.framework.system.prototype.Pinenut; public interface Device extends Pinenut { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/device/Disk.java ================================================ package com.pinecone.hydra.device; public interface Disk extends Device { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/express/Deliver.java ================================================ package com.pinecone.hydra.express; import com.pinecone.framework.system.prototype.Pinenut; public interface Deliver extends Pinenut { String getName(); Express getExpress(); void toDispatch( Package that ) throws Exception; } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/express/Express.java ================================================ package com.pinecone.hydra.express; import com.pinecone.framework.system.prototype.Pinenut; public interface Express extends Pinenut { Deliver recruit ( String szName ); Express register( Deliver deliver ); Express fired ( Deliver deliver ); boolean hasOwnDeliver( Deliver deliver ); boolean hasOwnDeliver( String deliverName ); int size (); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/express/Package.java ================================================ package com.pinecone.hydra.express; import com.pinecone.framework.system.prototype.Pinenut; public interface Package extends Pinenut { Deliver getDeliver(); String getConsignee(); Package entrust( Deliver deliver ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ArchModularizedSubsystem.java ================================================ package com.pinecone.hydra.system; import org.slf4j.Logger; import com.pinecone.framework.system.ModularizedSubsystem; import com.pinecone.framework.util.config.PatriarchalConfig; public abstract class ArchModularizedSubsystem implements ModularizedSubsystem { protected Hydrogen mPrimarySystem; protected String mszName; protected Logger mLogger; protected PatriarchalConfig mSubsystemConfig; public ArchModularizedSubsystem( Hydrogen primarySystem, String name, PatriarchalConfig config ) { this.mPrimarySystem = primarySystem; this.mszName = name; this.mLogger = primarySystem.getTracerScope().newLogger( name ); this.mSubsystemConfig = config; } @Override public PatriarchalConfig getSubsystemConfig() { return this.mSubsystemConfig; } @Override public Hydrogen parentSystem() { return this.mPrimarySystem; } @Override public String getName() { return this.mszName; } public Logger getLogger() { return this.mLogger; } protected abstract void traceWelcomeInfo() ; @Override public void release() { } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ArchSystemAutoAssembleComponent.java ================================================ package com.pinecone.hydra.system; import com.pinecone.framework.system.architecture.CascadeComponent; import com.pinecone.framework.unit.affinity.ObjectOverrider; import com.pinecone.framework.unit.affinity.RecursiveUnitOverrider; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.framework.util.lang.GenericDynamicFactory; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.system.component.Slf4jTraceable; import org.slf4j.Logger; public abstract class ArchSystemAutoAssembleComponent extends ArchSystemCascadeComponent implements Slf4jTraceable { protected Logger mLogger; protected ObjectOverrider mObjectOverrider ; protected DynamicFactory mUniformFactory ; protected ArchSystemAutoAssembleComponent(Namespace name, Hydrogen system, SystemCascadeComponentManager manager, CascadeComponent parent ) { super( name, system, manager, parent ); this.mLogger = system.getTracerScope().newLogger( this.className() ); this.mObjectOverrider = new RecursiveUnitOverrider<>(); this.mUniformFactory = new GenericDynamicFactory( system.getTaskManager().getClassLoader() ); } protected ArchSystemAutoAssembleComponent( Namespace name, SystemCascadeComponentManager manager, CascadeComponent parent ) { this( name, manager.getSystem(), manager, parent ); } protected ArchSystemAutoAssembleComponent( Namespace name, SystemCascadeComponentManager manager ) { this( name, manager, null ); } public DynamicFactory getSharedUniformFactory() { return this.mUniformFactory; } @Override public Logger getLogger() { return this.mLogger; } public ObjectOverrider getObjectOverrider() { return this.mObjectOverrider; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ArchSystemCascadeComponent.java ================================================ package com.pinecone.hydra.system; import com.pinecone.framework.system.architecture.ArchCascadeComponent; import com.pinecone.framework.system.architecture.CascadeComponent; import com.pinecone.framework.util.name.Namespace; public abstract class ArchSystemCascadeComponent extends ArchCascadeComponent implements HyComponent { private Hydrogen mSystem; protected ArchSystemCascadeComponent( Namespace name, Hydrogen system, SystemCascadeComponentManager manager, CascadeComponent parent ) { super( name, manager, parent ); this.mSystem = system; } protected ArchSystemCascadeComponent( Namespace name, SystemCascadeComponentManager manager, CascadeComponent parent ) { this( name, manager.getSystem(), manager, parent ); } protected ArchSystemCascadeComponent( Namespace name, SystemCascadeComponentManager manager ) { this( name, manager, null ); } @Override public SystemCascadeComponentManager getComponentManager() { return (SystemCascadeComponentManager) super.getComponentManager(); } @Override public Hydrogen getSystem() { return this.mSystem; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ArchSystemCascadeComponentManager.java ================================================ package com.pinecone.hydra.system; import com.pinecone.framework.system.architecture.ArchCascadeComponentManager; public abstract class ArchSystemCascadeComponentManager extends ArchCascadeComponentManager implements SystemCascadeComponentManager { protected Hydrogen mSystem; protected ArchSystemCascadeComponentManager( Hydrogen system ){ super(); this.mSystem = system; } @Override public Hydrogen getSystem() { return this.mSystem; } @Override public SystemCascadeComponent getRootComponentByFullName(String fullName) { return (SystemCascadeComponent)super.getRootComponentByFullName(fullName); } @Override public SystemCascadeComponent getComponentByFullName(String fullName) { return (SystemCascadeComponent)super.getComponentByFullName(fullName); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/BlockSystem.java ================================================ package com.pinecone.hydra.system; import com.pinecone.framework.system.prototype.Pinenut; public interface BlockSystem extends Pinenut { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/DistributedSystem.java ================================================ package com.pinecone.hydra.system; import com.pinecone.framework.system.prototype.Pinenut; public interface DistributedSystem extends Pinenut { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/FederalSystem.java ================================================ package com.pinecone.hydra.system; import com.pinecone.framework.system.prototype.Pinenut; public interface FederalSystem extends Pinenut { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/HierarchySystem.java ================================================ package com.pinecone.hydra.system; import com.pinecone.framework.system.prototype.Pinenut; public interface HierarchySystem extends Pinenut { HyHierarchy getServiceArch(); boolean isTopmostArchy(); HyHierarchy getTopmostArchy(); boolean isBottommostArchy(); HyHierarchy getBottommostArchy(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/HyComponent.java ================================================ package com.pinecone.hydra.system; public interface HyComponent extends SystemCascadeComponent { @Override Hydrogen getSystem(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/HyHierarchy.java ================================================ package com.pinecone.hydra.system; import com.pinecone.framework.system.prototype.Pinenut; public interface HyHierarchy extends Pinenut { String getName(); boolean isDominantClass(); boolean isWorkerClass(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/HySkeleton.java ================================================ package com.pinecone.hydra.system; public interface HySkeleton extends SystemCascadeComponentManager { Hydrogen getSystem(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/Hydrogen.java ================================================ package com.pinecone.hydra.system; import com.pinecone.framework.system.Pinecore; import com.pinecone.hydra.system.component.Slf4jTracerScope; import java.nio.file.Path; public interface Hydrogen extends Pinecore, ScopedSystem, MultiComponentSystem { @Override HySkeleton getComponentManager(); String getServiceID(); Path getWorkingPath(); boolean isDebugMode(); Slf4jTracerScope getTracerScope(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/MultiComponentSystem.java ================================================ package com.pinecone.hydra.system; import com.pinecone.framework.system.architecture.SystemComponentManager; import com.pinecone.framework.system.prototype.Pinenut; public interface MultiComponentSystem extends Pinenut { SystemComponentManager getComponentManager(); MultiComponentSystem apply( SystemComponentManager manager ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ScopedSystem.java ================================================ package com.pinecone.hydra.system; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.unit.MultiScopeMap; public interface ScopedSystem extends Pinenut { MultiScopeMap getGlobalConfigScope(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/SystemCascadeComponent.java ================================================ package com.pinecone.hydra.system; import com.pinecone.framework.system.architecture.CascadeComponent; import com.pinecone.framework.system.architecture.SystemComponent; public interface SystemCascadeComponent extends CascadeComponent, SystemComponent { @Override SystemCascadeComponentManager getComponentManager(); default Hydrogen getSystem() { return this.getComponentManager().getSystem(); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/SystemCascadeComponentManager.java ================================================ package com.pinecone.hydra.system; import com.pinecone.framework.system.architecture.CascadeComponentManager; import com.pinecone.framework.system.architecture.SystemComponentManager; public interface SystemCascadeComponentManager extends SystemComponentManager, CascadeComponentManager { Hydrogen getSystem(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/SystemSkeleton.java ================================================ package com.pinecone.hydra.system; public class SystemSkeleton extends ArchSystemCascadeComponentManager implements HySkeleton { public SystemSkeleton( Hydrogen system ){ super( system ); } @Override public Hydrogen getSystem() { return this.mSystem; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/centrum/CentralControlSubsystem.java ================================================ package com.pinecone.hydra.system.centrum; import com.pinecone.framework.system.ModularizedSubsystem; import com.pinecone.framework.system.regime.arch.Lord; import com.pinecone.hydra.system.component.Slf4jTraceable; public interface CentralControlSubsystem extends ModularizedSubsystem, Lord, Slf4jTraceable { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/centrum/Centrum.java ================================================ package com.pinecone.hydra.system.centrum; /** * Pinecone Ursus For Java, Uniformity Centralized Metasystem * Author: Harald.E (Dragon King) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Uniformity Centralized Metasystem * 统一中央集权元系统 * ***************************************************************************************** * 1). Top-level abstraction and aggregation of large-scale distributed systems, * enabling centralized global control-ability and simplified manipulation of complex systems. * 2). Meta-level aggregation of global resources, tasks, services, data, and intelligence, * aimed at centralized planning and large-scale absolute control. * 3). Core architecture: atomic-level design of meta-information, control scheduling, intelligence, * and auditing, centralized aggregation, and highest-level external abstraction. * ***************************************************************************************** * 1). 大型分布式系统的顶级抽象化、汇总化,面向中央全局可控,大型系统简单操纵。 * 2). 面向全局资源、任务、服务、数据、情报等元汇总,全局统筹规划、大规模绝对控制。 * 3). 核心架构:元信息、控制调度、情报、审计原子化,中央汇总,对外最高抽象。 * ***************************************************************************************** */ public interface Centrum extends UniformCentralSystem, Metasystem { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/centrum/Metasystem.java ================================================ package com.pinecone.hydra.system.centrum; import com.pinecone.framework.system.prototype.Pinenut; public interface Metasystem extends Pinenut { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/centrum/UniformCentralSystem.java ================================================ package com.pinecone.hydra.system.centrum; import com.pinecone.framework.system.RuntimeSystem; import com.pinecone.framework.system.architecture.Component; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.system.DistributedSystem; import com.pinecone.hydra.system.HierarchySystem; import com.pinecone.hydra.system.imperium.ImperiumPrivy; import com.pinecone.hydra.system.ko.KernelObjectConfig; import com.pinecone.ulf.util.guid.i64.GuidAllocator72; public interface UniformCentralSystem extends HierarchySystem, DistributedSystem { GuidAllocator getSystemGuidAllocator(); GuidAllocator72 getSystemGuidAllocator72(); Component imageLoader(); KernelObjectConfig fundamentalKernelObjectConfig(); ImperiumPrivy imperiumPrivy(); static UniformCentralSystem evalCentralSystem( Processum that ) { if ( that instanceof UniformCentralSystem ) { return (UniformCentralSystem) that; } RuntimeSystem rs = that.parentSystem(); if ( rs instanceof UniformCentralSystem ) { return (UniformCentralSystem) rs; } throw new IllegalArgumentException( "Not in UniformCentralSystem family." ); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/component/ComponentInitializationException.java ================================================ package com.pinecone.hydra.system.component; import com.pinecone.framework.system.prototype.Pinenut; public class ComponentInitializationException extends Exception implements Pinenut { public ComponentInitializationException () { super(); } public ComponentInitializationException ( String message ) { super(message); } public ComponentInitializationException ( String message, Throwable cause ) { super(message, cause); } public ComponentInitializationException ( Throwable cause ) { super(cause); } protected ComponentInitializationException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) { super( message, cause, enableSuppression, writableStackTrace ); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/component/GenericResourceDispenserCenter.java ================================================ package com.pinecone.hydra.system.component; import com.pinecone.framework.system.construction.StructureInstanceDispenser; import com.pinecone.framework.system.construction.UnifyCentralInstanceDispenser; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.system.ArchSystemCascadeComponent; import com.pinecone.hydra.system.HyComponent; import com.pinecone.hydra.system.Hydrogen; public class GenericResourceDispenserCenter extends ArchSystemCascadeComponent implements ResourceDispenserCenter { protected StructureInstanceDispenser mInstanceDispenser; public GenericResourceDispenserCenter(Namespace name, Hydrogen system, HyComponent parent ) { super( name, system, system.getComponentManager(), parent ); this.mInstanceDispenser = new UnifyCentralInstanceDispenser(); } public GenericResourceDispenserCenter(Hydrogen system, HyComponent parent ) { this( null, system, parent ); } public GenericResourceDispenserCenter( Hydrogen system ) { this( system, null ); } @Override public StructureInstanceDispenser getInstanceDispenser() { return this.mInstanceDispenser; } @Override public Hydrogen getSystem() { return super.getSystem(); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/component/GenericTracerScope.java ================================================ package com.pinecone.hydra.system.component; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.system.ArchSystemCascadeComponent; import com.pinecone.hydra.system.HyComponent; import com.pinecone.hydra.system.Hydrogen; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class GenericTracerScope extends ArchSystemCascadeComponent implements Slf4jTracerScope { public GenericTracerScope(Namespace name, Hydrogen system, HyComponent parent ) { super( name, system, system.getComponentManager(), parent ); } public GenericTracerScope(Hydrogen system, HyComponent parent ) { this( null, system, parent ); } public GenericTracerScope( Hydrogen system ) { this( system, null ); } @Override public Hydrogen getSystem() { return super.getSystem(); } @Override public String getLoggerName( String name ){ return String.format( "%s<%s>", this.getSystem().className(), name ); } @Override public Logger newLogger( String name ){ return LoggerFactory.getLogger( this.getLoggerName( name ) ); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/component/LogStatuses.java ================================================ package com.pinecone.hydra.system.component; public final class LogStatuses { public static final String StatusStart = "Start"; public static final String StatusDone = "Done"; public static final String StatusVitalization = "Vitalization"; public static final String StatusVitalized = "Vitalized"; public static final String StatusTermination = "Termination"; public static final String StatusTerminated = "Terminated"; public static final String StatusStandby = "Standby"; public static final String StatusReady = "Ready"; } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/component/LoggingConfigurator.java ================================================ package com.pinecone.hydra.system.component; import java.util.Map; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.Logger; import ch.qos.logback.classic.LoggerContext; import org.slf4j.LoggerFactory; import com.pinecone.framework.system.ConformitySystem; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.framework.util.config.PatriarchalConfig; public class LoggingConfigurator implements TracerConfigurator { protected org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger( this.getClass() ); protected ConformitySystem mConformitySystem; public LoggingConfigurator( ConformitySystem conformitySystem ) { this.mConformitySystem = conformitySystem; } @Override public void apply() { PatriarchalConfig tracer = this.mConformitySystem.getSystemConfig().getChild( "Tracer" ); if ( tracer != null ) { PatriarchalConfig logging = tracer.getChild( "Logging" ); if ( logging != null ) { PatriarchalConfig levels = logging.getChild( "Levels" ); if ( levels instanceof JSONConfig ) { JSONConfig joLevels = (JSONConfig) levels; apply( joLevels ); log.info( "[Lifecycle] Set logging levels. " ); } } } } public static void apply( Map levelMap ) { LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory(); for (Map.Entry entry : levelMap.entrySet()) { String loggerName = entry.getKey(); String levelStr = entry.getValue().toString(); Level level = Level.toLevel(levelStr, Level.INFO); if ( "root".equalsIgnoreCase(loggerName) ) { context.getLogger(Logger.ROOT_LOGGER_NAME).setLevel(level); } else { context.getLogger(loggerName).setLevel(level); } } } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/component/ResourceDispenserCenter.java ================================================ package com.pinecone.hydra.system.component; import com.pinecone.framework.system.construction.StructureInstanceDispenser; import com.pinecone.hydra.system.HyComponent; public interface ResourceDispenserCenter extends HyComponent { StructureInstanceDispenser getInstanceDispenser() ; } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/component/Slf4jTraceable.java ================================================ package com.pinecone.hydra.system.component; import com.pinecone.framework.system.prototype.Pinenut; import org.slf4j.Logger; public interface Slf4jTraceable extends Pinenut { Logger getLogger(); default Slf4jTraceable infoLifecycle( String szWhat, String szStateOrExtra ) { this.getLogger().info( "[Lifecycle] [{}] <{}>", szWhat, szStateOrExtra ); return this; } default Slf4jTraceable infoLifecycle( String szStateOrExtra ) { StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace(); return this.infoLifecycle( stackTraceElements[ 2 ].getMethodName(), szStateOrExtra ); } default Slf4jTraceable infoLifecycleDone( String szWhat ) { return this.infoLifecycle( szWhat, LogStatuses.StatusDone ); } default Slf4jTraceable infoLifecycleInitializationDone() { return this.infoLifecycle( this.className() + "::Constructor", LogStatuses.StatusDone ); } default Slf4jTraceable infoCriticalOperation( String szWhat, String szStateOrExtra ) { this.getLogger().info( "[CriticalOperation] [{}] <{}>", szWhat, szStateOrExtra ); return this; } default Slf4jTraceable warnSimple( String szStateOrExtra ) { StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace(); this.getLogger().warn( "[{}] <{}>", stackTraceElements[ 2 ].getMethodName(), szStateOrExtra ); return this; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/component/Slf4jTracerScope.java ================================================ package com.pinecone.hydra.system.component; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public interface Slf4jTracerScope extends TracerScope { @Override default Logger newLogger( String name ){ return LoggerFactory.getLogger( this.getLoggerName( name ) ); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/component/TracerConfigurator.java ================================================ package com.pinecone.hydra.system.component; import com.pinecone.framework.system.prototype.Pinenut; public interface TracerConfigurator extends Pinenut { void apply(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/component/TracerScope.java ================================================ package com.pinecone.hydra.system.component; import com.pinecone.hydra.system.HyComponent; import com.pinecone.hydra.system.Hydrogen; public interface TracerScope extends HyComponent { @Override Hydrogen getSystem(); String getLoggerName( String name ); Object newLogger( String name ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/identifier/KOPathResolver.java ================================================ package com.pinecone.hydra.system.identifier; import com.pinecone.framework.util.name.path.BasicPathResolver; import com.pinecone.hydra.system.ko.KernelObjectConfig; public class KOPathResolver extends BasicPathResolver { public KOPathResolver( KernelObjectConfig config ) { super( config.getPathNameSeparator(), config.getPathNameSepRegex() ); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/imperium/ImperiumPrivy.java ================================================ package com.pinecone.hydra.system.imperium; import com.pinecone.hydra.system.HyComponent; import com.pinecone.hydra.system.ko.runtime.CentralizedRuntimeInstrument; /** * Pinecone Ursus For Java Imperium Privy Council * Author: Harald.E (Dragon King), Ken * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Imperium Privy Council * Information and control are separated in this architecture, where the Privy serves as a central intelligence marshaling database, * and in principle, does not have direct control privilege over specific objects. * Its core functions are to centralize the collection of information from all objects, facilitating uniformed orchestration, analysis, and control. * * Imperium Privy Council (枢密院,掌文书、行咨询), * 信息、控制分离架构,枢密是中央情报编组数据库,原则上没有具体对象的实际控制权。 * 其核心职能为:中心化收集所有对象的信息,便于统一统筹决策、分析和控制。 * * e.g. \Device\HarddiskVolume3\Users\dragonking\AppData\Local\ => {name: xxx, handle: 123, typeId: 37} * e.g. /proc/137/status => {name: scsi_eh_26, State: S, Pid: 137} * ***************************************************************************************** */ public interface ImperiumPrivy extends HyComponent { CentralizedRuntimeInstrument getExpressInstrument(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/imperium/KernelObjectRootMountPoint.java ================================================ package com.pinecone.hydra.system.imperium; public enum KernelObjectRootMountPoint { KernelConfig ( "KernelConfig", KernelRootMountPoint.Config.getConfigSection() + ".Kernel", KernelRootMountPoint.Config.getMountPoint() + "/kernel" ), Registry ( "Registry", KernelRootMountPoint.Config.getConfigSection() + ".Registry", KernelRootMountPoint.Config.getMountPoint() + "/registry" ), TaskMeta ( "TaskMeta", KernelRootMountPoint.Meta.getConfigSection() + ".Task", KernelRootMountPoint.Meta.getMountPoint() + "/task" ), ServiceMeta ( "ServiceMeta", KernelRootMountPoint.Meta.getConfigSection() + ".Service", KernelRootMountPoint.Meta.getMountPoint() + "/service" ), DeployMeta ( "DeployMeta", KernelRootMountPoint.Meta.getConfigSection() + ".Deploy", KernelRootMountPoint.Device.getMountPoint() + "/deploy" ), SysImages ( "SysImages", KernelRootMountPoint.System.getConfigSection() + ".Images", KernelRootMountPoint.System.getMountPoint() + "/public/global/exe/images" ) ; private final String name; private final String configSection; private final String mountPoint; KernelObjectRootMountPoint( String name, String configSection, String mountPoint ) { this.name = name; this.configSection = configSection; this.mountPoint = mountPoint; } public String getName() { return this.name; } public String getConfigSection() { return this.configSection; } public String getMountPoint() { return this.mountPoint; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/imperium/KernelPrivyFileSystemConstants.java ================================================ package com.pinecone.hydra.system.imperium; public final class KernelPrivyFileSystemConstants { public static final String Root = "$"; public static final String NomenclatureSeparator = "."; public static final String FileSystemRoot = Root + NomenclatureSeparator + "KPFS"; // $.KPFS public static final String PathNameSeparator = "/"; } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/imperium/KernelRootMountPoint.java ================================================ package com.pinecone.hydra.system.imperium; public enum KernelRootMountPoint { Config ( "Config", KernelPrivyFileSystemConstants.FileSystemRoot + ".Config", "conf" ), Device ( "Device", KernelPrivyFileSystemConstants.FileSystemRoot + ".Device", "dev" ), UserHome ( "UserHome", KernelPrivyFileSystemConstants.FileSystemRoot + ".UserHome", "home" ), Mount ( "Mount", KernelPrivyFileSystemConstants.FileSystemRoot + ".Mount", "mnt" ), System ( "System", KernelPrivyFileSystemConstants.FileSystemRoot + ".System", "sys" ), Process ( "Process", KernelPrivyFileSystemConstants.FileSystemRoot + ".Process", "proc" ), Variable ( "Variable", KernelPrivyFileSystemConstants.FileSystemRoot + ".Variable", "var" ), Meta ( "Meta", KernelPrivyFileSystemConstants.FileSystemRoot + ".Meta", "meta" ), ; private final String name; private final String configSection; private final String mountPoint; KernelRootMountPoint( String name, String configSection, String mountPoint ) { this.name = name; this.configSection = configSection; this.mountPoint = mountPoint; } public String getName() { return this.name; } public String getConfigSection() { return this.configSection; } public String getMountPoint() { return this.mountPoint; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/ArchKernelObjectConfig.java ================================================ package com.pinecone.hydra.system.ko; import java.util.Map; import com.pinecone.framework.system.Nullable; public abstract class ArchKernelObjectConfig implements KernelObjectConfig { protected String mszPathNameSeparator = KernelObjectConstants.PathNameSeparator; protected String mszFullNameSeparator = KernelObjectConstants.FullNameSeparator; protected String mszPathNameSepRegex = KernelObjectConstants.PathNameSepRegex; protected String mszFullNameSepRegex = KernelObjectConstants.FullNameSepRegex; protected int mnShortPathLength = KernelObjectConstants.ShortPathLength; protected ArchKernelObjectConfig() { } public ArchKernelObjectConfig( @Nullable Map config ){ this(); if ( config == null ) { return; } this.mszPathNameSeparator = (String) config.getOrDefault("PathNameSeparator", KernelObjectConstants.PathNameSeparator); this.mszFullNameSeparator = (String) config.getOrDefault("FullNameSeparator", KernelObjectConstants.FullNameSeparator); this.mszPathNameSepRegex = (String) config.getOrDefault("PathNameSepRegex", KernelObjectConstants.PathNameSepRegex); this.mszFullNameSepRegex = (String) config.getOrDefault("FullNameSepRegex", KernelObjectConstants.FullNameSepRegex); this.mnShortPathLength = ( (Number) config.getOrDefault("ShortPathLength", KernelObjectConstants.ShortPathLength) ).intValue(); } @Override public String getPathNameSeparator() { return this.mszPathNameSeparator; } @Override public String getFullNameSeparator() { return this.mszFullNameSeparator; } @Override public String getPathNameSepRegex() { return this.mszPathNameSepRegex; } @Override public String getFullNameSepRegex() { return this.mszFullNameSepRegex; } @Override public int getShortPathLength() { return this.mnShortPathLength; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/CascadeInstrument.java ================================================ package com.pinecone.hydra.system.ko; import com.pinecone.framework.system.regime.Instrument; import com.pinecone.framework.system.regimentation.UniformCascadeNodus; import com.pinecone.framework.util.name.Namespace; public interface CascadeInstrument extends UniformCascadeNodus, Instrument { String EmptySuperiorPathScope = ""; @Override CascadeInstrument parent(); void setParent( CascadeInstrument parent ); @Override default boolean isRoot() { return this.parent() == null; } default CascadeInstrument root() { return (CascadeInstrument) UniformCascadeNodus.super.root(); } @Override Namespace getTargetingName(); @Override void setTargetingName( Namespace name ); @Override default void setTargetingName( String name ) { UniformCascadeNodus.super.setTargetingName( name ); } @Override default String getSimpleName() { return this.getTargetingName().getSimpleName(); } @Override default String getFullName() { return this.getTargetingName().getFullName(); } /** * Superior Path Scope (Specialized namespace) * 上级键空间(专门命名空间) * e.g. `/proc`/pid/name => Scope : `/proc` * @return ParentPathScope */ String getSuperiorPathScope(); void applySuperiorPathScope( String superiorPathScope ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/CascadeKOTreeInstrument.java ================================================ package com.pinecone.hydra.system.ko; public interface CascadeKOTreeInstrument extends CascadeKernelObjectInstrument, KernelObjectTreeInstrument { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/CascadeKernelObjectInstrument.java ================================================ package com.pinecone.hydra.system.ko; public interface CascadeKernelObjectInstrument extends KernelObjectInstrument, CascadeInstrument { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/InstrumentException.java ================================================ package com.pinecone.hydra.system.ko; import com.pinecone.framework.system.prototype.Pinenut; public class InstrumentException extends Exception implements Pinenut { public InstrumentException() { super(); } public InstrumentException( String message ) { super(message); } public InstrumentException( String message, Throwable cause ) { super(message, cause); } public InstrumentException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/KernelObject.java ================================================ package com.pinecone.hydra.system.ko; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface KernelObject extends Pinenut { GUID getGuid(); String objectFunctionName(); String objectCategoryName(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/KernelObjectConfig.java ================================================ package com.pinecone.hydra.system.ko; import com.pinecone.framework.system.prototype.Pinenut; public interface KernelObjectConfig extends Pinenut { String getPathNameSeparator(); String getFullNameSeparator(); String getPathNameSepRegex(); String getFullNameSepRegex(); int getShortPathLength(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/KernelObjectConstants.java ================================================ package com.pinecone.hydra.system.ko; import com.pinecone.hydra.unit.imperium.ImperialTreeConstants; public final class KernelObjectConstants { public static String PathNameSeparator = "/"; public static String FullNameSeparator = "."; public static String PathNameSepRegex = "/"; public static String FullNameSepRegex = "\\."; public static int ShortPathLength = ImperialTreeConstants.DefaultShortPathLength; } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/KernelObjectInstrument.java ================================================ package com.pinecone.hydra.system.ko; import com.pinecone.framework.system.regime.Instrument; import com.pinecone.framework.util.id.GuidAllocator; public interface KernelObjectInstrument extends Instrument { GuidAllocator getGuidAllocator(); KernelObjectConfig getConfig(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/KernelObjectTreeInstrument.java ================================================ package com.pinecone.hydra.system.ko; import com.pinecone.hydra.unit.imperium.ImperialTree; public interface KernelObjectTreeInstrument extends KernelObjectInstrument { ImperialTree getMasterTrieTree(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/MetaPersistenceException.java ================================================ package com.pinecone.hydra.system.ko; public class MetaPersistenceException extends InstrumentException { public MetaPersistenceException() { super(); } public MetaPersistenceException( String message ) { super(message); } public MetaPersistenceException( String message, Throwable cause ) { super(message, cause); } public MetaPersistenceException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/QueryableInstrument.java ================================================ package com.pinecone.hydra.system.ko; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.entity.EntityNode; public interface QueryableInstrument extends KernelObjectInstrument { String getPath( GUID objectGuid ); String querySystemKernelObjectPath( GUID objectGuid ) ; GUID queryGUIDByPath( String path ); EntityNode queryNode( String path ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/UOIUtils.java ================================================ package com.pinecone.hydra.system.ko; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.framework.util.uoi.UniformObjectLoaderFactory; public final class UOIUtils { public static UOI createJavaClass( String className, String resourceDetail ) { return UOI.create( String.format( "%s://%s/%s", UniformObjectLoaderFactory.DefaultJavaClassType, resourceDetail, className ) ); } public static UOI createLocalJavaClass( String className ) { return UOIUtils.createJavaClass( className, "" ); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/action/ActionObject.java ================================================ package com.pinecone.hydra.system.ko.action; import com.pinecone.hydra.system.ko.KernelObject; public interface ActionObject extends KernelObject { String FunctionName = ActionObject.class.getSimpleName().replace( "Object", "" ); @Override default String objectFunctionName() { return FunctionName; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/action/EventObject.java ================================================ package com.pinecone.hydra.system.ko.action; public interface EventObject extends ActionObject { String FunctionName = EventObject.class.getSimpleName().replace( "Object", "" ); @Override default String objectFunctionName() { return FunctionName; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/control/ControlObject.java ================================================ package com.pinecone.hydra.system.ko.control; import com.pinecone.hydra.system.ko.KernelObject; public interface ControlObject extends KernelObject { String FunctionName = ControlObject.class.getSimpleName().replace( "Object", "" ); @Override default String objectFunctionName() { return FunctionName; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/dao/GUIDNameManipulator.java ================================================ package com.pinecone.hydra.system.ko.dao; import java.util.List; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface GUIDNameManipulator extends Pinenut { List getGuidsByName( String name ); List getGuidsByNameID( String name, GUID guid ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/driver/KOIMappingDriver.java ================================================ package com.pinecone.hydra.system.ko.driver; import java.util.Map; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.system.homotype.StereotypicInjector; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.system.Hydrogen; public interface KOIMappingDriver extends Pinenut { String getVersionSignature(); Hydrogen getSystem(); Processum getSuperiorProcess(); KOIMasterManipulator getMasterManipulator(); // Temp, TODO StereotypicInjector autoConstruct( Class stereotype, Map config, Object instance ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/driver/KOIMappingDriverFactory.java ================================================ package com.pinecone.hydra.system.ko.driver; import com.pinecone.framework.system.prototype.Pinenut; public interface KOIMappingDriverFactory extends Pinenut { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/driver/KOIMasterManipulator.java ================================================ package com.pinecone.hydra.system.ko.driver; import com.pinecone.framework.system.prototype.Pinenut; public interface KOIMasterManipulator extends Pinenut { KOISkeletonMasterManipulator getSkeletonMasterManipulator(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/driver/KOISkeletonMasterManipulator.java ================================================ package com.pinecone.hydra.system.ko.driver; import com.pinecone.framework.system.prototype.Pinenut; public interface KOISkeletonMasterManipulator extends Pinenut { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/entity/ObjectHandle.java ================================================ package com.pinecone.hydra.system.ko.entity; import com.pinecone.hydra.system.ko.handle.HandleObject; public interface ObjectHandle extends HandleObject { String FunctionName = HandleObject.class.getSimpleName(); @Override default String objectFunctionName() { return FunctionName; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/entity/ObjectTable.java ================================================ package com.pinecone.hydra.system.ko.entity; import com.pinecone.framework.system.prototype.Pinenut; public interface ObjectTable extends Pinenut { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/handle/AppliableKHandle.java ================================================ package com.pinecone.hydra.system.ko.handle; import com.pinecone.framework.util.id.GUID; public interface AppliableKHandle extends KHandle { KHandle applyTreeNodeName( String szTreeNodeName ); KHandle applyTreeNodeGuid( GUID treeNodeGuid ) ; } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/handle/ArchKHandle.java ================================================ package com.pinecone.hydra.system.ko.handle; import com.pinecone.framework.util.id.GUID; public abstract class ArchKHandle implements KHandle { protected String mszTreeNodeName; protected GUID mTreeNodeGuid; public ArchKHandle( String treeNodeName, GUID treeNodeGuid ) { this.mszTreeNodeName = treeNodeName; this.mTreeNodeGuid = treeNodeGuid; } protected ArchKHandle() { this( null, null ); } public KHandle applyTreeNodeName( String szTreeNodeName ) { this.mszTreeNodeName = szTreeNodeName; return this; } public KHandle applyTreeNodeGuid( GUID treeNodeGuid ) { this.mTreeNodeGuid = treeNodeGuid; return this; } @Override public String getName() { return this.mszTreeNodeName; } @Override public GUID getGuid() { return this.mTreeNodeGuid; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/handle/HandleObject.java ================================================ package com.pinecone.hydra.system.ko.handle; import com.pinecone.hydra.system.ko.KernelObject; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public interface HandleObject extends TreeNode, KernelObject { String FunctionName = HandleObject.class.getSimpleName().replace( "Object", "" ); @Override default String objectFunctionName() { return FunctionName; } @Override default String objectCategoryName() { return "Handle"; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/handle/HandleType.java ================================================ package com.pinecone.hydra.system.ko.handle; public enum HandleType { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/handle/KHandle.java ================================================ package com.pinecone.hydra.system.ko.handle; public interface KHandle extends HandleObject { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/handle/KOMMountPointHandle.java ================================================ package com.pinecone.hydra.system.ko.handle; import com.pinecone.hydra.system.ko.kom.KOMInstrument; public interface KOMMountPointHandle extends ObjectTreeAddressingSectionHandle, KOMInstrument { KOMInstrument revealWrapped(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/handle/ObjectTreeAddressingSectionHandle.java ================================================ package com.pinecone.hydra.system.ko.handle; import com.pinecone.hydra.unit.imperium.entity.EntityNode; public interface ObjectTreeAddressingSectionHandle extends KHandle, SectionHandle { EntityNode queryNode( String path ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/handle/ObjectTreeGUIDAddressingSectionHandle.java ================================================ package com.pinecone.hydra.system.ko.handle; import com.pinecone.framework.util.id.GUID; public interface ObjectTreeGUIDAddressingSectionHandle extends KHandle, SectionHandle { GUID queryGUIDByPath( String path ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/handle/SectionHandle.java ================================================ package com.pinecone.hydra.system.ko.handle; public interface SectionHandle extends HandleObject { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/ArchKOMTree.java ================================================ package com.pinecone.hydra.system.ko.kom; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.framework.util.lang.GenericDynamicFactory; import com.pinecone.framework.util.name.Namespace; import com.pinecone.framework.util.name.path.PathResolver; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.system.centrum.UniformCentralSystem; import com.pinecone.hydra.system.ko.CascadeInstrument; import com.pinecone.hydra.system.ko.KernelObjectConfig; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.hydra.unit.imperium.ArchRegimentObjectModel; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.ImperialTree; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.EntityNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.unit.imperium.operator.OperatorFactory; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.ulf.util.guid.GUIDs; import java.util.ArrayList; import java.util.List; import java.util.Objects; public abstract class ArchKOMTree extends ArchRegimentObjectModel implements KOMInstrument { protected Namespace mThisNamespace; protected KOMInstrument mParentInstrument; protected Hydrogen hydrogen; protected Processum superiorProcess; protected GuidAllocator guidAllocator; protected OperatorFactory operatorFactory; protected PathResolver pathResolver; protected PathSelector pathSelector; protected DynamicFactory dynamicFactory; public ArchKOMTree ( Processum superiorProcess, KOIMasterManipulator masterManipulator, OperatorFactory operatorFactory, KernelObjectConfig kernelObjectConfig, PathSelector pathSelector, KOMInstrument parent, String name, String superiorPathScope, @Nullable GuidAllocator guidAllocator ){ this( superiorProcess, masterManipulator, kernelObjectConfig, parent, name, superiorPathScope, guidAllocator ); this.pathSelector = pathSelector; this.operatorFactory = operatorFactory; } public ArchKOMTree ( Processum superiorProcess, KOIMasterManipulator masterManipulator, KernelObjectConfig kernelObjectConfig, KOMInstrument parent, String name, String superiorPathScope, @Nullable GuidAllocator guidAllocator ){ super( masterManipulator, kernelObjectConfig, superiorPathScope ); this.superiorProcess = superiorProcess; if ( this.superiorProcess instanceof Hydrogen) { this.hydrogen = (Hydrogen) this.superiorProcess; } else { this.hydrogen = (Hydrogen) superiorProcess.parentSystem(); } this.guidAllocator = guidAllocator; this.dynamicFactory = new GenericDynamicFactory( this.superiorProcess.getTaskManager().getClassLoader() ); this.mParentInstrument = parent; this.setTargetingName( name ); this.prepare_uniform_skeleton(); } protected void prepare_uniform_skeleton() { if ( this.superiorProcess != null ) { if ( this.guidAllocator == null && this.hydrogen instanceof UniformCentralSystem ) { UniformCentralSystem system = (UniformCentralSystem) this.hydrogen; this.guidAllocator = system.getSystemGuidAllocator(); } } if ( this.guidAllocator == null ) { throw new IllegalArgumentException( "GUIDAllocator is undefined." ); } } //************************************** CascadeInstrument ************************************** @Override public KOMInstrument parent() { return this.mParentInstrument; } @Override public Processum getSuperiorProcess() { return this.superiorProcess; } @Override public void setParent( CascadeInstrument parent ) { this.mParentInstrument = (KOMInstrument) parent; } @Override public Namespace getTargetingName() { return this.mThisNamespace; } @Override public void setTargetingName( Namespace name ) { this.mThisNamespace = name; } //************************************** CascadeInstrument End ************************************** @Override public void applyGuidAllocator( GuidAllocator guidAllocator ) { this.guidAllocator = guidAllocator; } @Override public GUID put( TreeNode treeNode ) { TreeNodeOperator operator = this.operatorFactory.getOperator( treeNode.getMetaType() ); return operator.insert( treeNode ); } @Override public boolean contains( GUID nodeGuid ) { return this.imperialTree.contains( nodeGuid ); } @Override public TreeNode get( GUID guid, int depth ) { return this.getOperatorByGuid( guid ).get( guid, depth ); } @Override public TreeNode getAsRootDepth( GUID guid ) { return this.getOperatorByGuid( guid ).getAsRootDepth( guid ); } protected String getNS( GUID guid, String szSeparator ) { String path = this.imperialTree.getCachePath(guid); if ( path != null ) { return path; } ImperialTreeNode node = this.imperialTree.getNode(guid); if ( node == null ) { return null; } GUID owner = this.imperialTree.getOwner(guid); if ( owner == null ){ String assemblePath = this.getNodeName(node); while ( !node.getParentGUIDs().isEmpty() && this.allNonNull( node.getParentGUIDs() ) ){ List parentGuids = node.getParentGUIDs(); for( int i = 0; i < parentGuids.size(); ++i ){ if ( parentGuids.get(i) != null ){ node = this.imperialTree.getNode( parentGuids.get(i) ); break; } } String nodeName = this.getNodeName(node); assemblePath = nodeName + szSeparator + assemblePath; } this.imperialTree.insertCachePath( guid, assemblePath ); return assemblePath; } else{ String assemblePath = this.getNodeName( node ); while ( !node.getParentGUIDs().isEmpty() && this.allNonNull( node.getParentGUIDs() ) ){ node = this.imperialTree.getNode( owner ); String nodeName = this.getNodeName( node ); assemblePath = nodeName + szSeparator + assemblePath; owner = this.imperialTree.getOwner( node.getGuid() ); } this.imperialTree.insertCachePath( guid, assemblePath ); return assemblePath; } } @Override public String getPath( GUID guid ) { return this.getNS( guid, this.kernelObjectConfig.getPathNameSeparator() ); } @Override public String getFullName( GUID guid ) { return this.getNS( guid, this.kernelObjectConfig.getFullNameSeparator() ); } protected TreeNodeOperator getOperatorByGuid( GUID guid ) { ImperialTreeNode node = this.imperialTree.getNode( guid ); if ( node == null ){ return null; } TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class[]{this.getClass()}, this ); return this.operatorFactory.getOperator( newInstance.getMetaType() ); } @Override public TreeNode get( GUID guid ) { TreeNodeOperator operator = this.getOperatorByGuid( guid ); if( operator == null ) { return null; } return operator.get( guid ); } /** Final Solution 20240929: 无法获取类型 */ @Override public GUID queryGUIDByNS( String path, String szBadSep, String szTargetSep ) { if( szTargetSep != null ) { path = path.replace( szBadSep, szTargetSep ); } String[] parts = this.pathResolver.segmentPathParts( path ); List resolvedParts = this.pathResolver.resolvePath( parts ); path = this.pathResolver.assemblePath( resolvedParts ); GUID guid = this.imperialTree.queryGUIDByPath( path ); if ( guid != null ){ return guid; } guid = this.pathSelector.searchGUID( resolvedParts ); if( guid != null ){ this.imperialTree.insertCachePath( guid, path ); } return guid; } @Override public GUID queryGUIDByPath( String path ) { return this.queryGUIDByNS( path, null, null ); } @Override public void remove( GUID guid ) { GUIDImperialTrieNode node = this.imperialTree.getNode( guid ); TreeNode newInstance = (TreeNode)node.getType().newInstance(); TreeNodeOperator operator = this.operatorFactory.getOperator( newInstance.getMetaType() ); operator.purge( guid ); } @Override public abstract Object queryEntityHandleByNS( String path, String szBadSep, String szTargetSep ) ; public Object queryEntityHandle( String path ) { return this.queryEntityHandleByNS( path, null, null ); } @Override public void remove( String path ) { Object handle = this.queryEntityHandle( path ); if( handle instanceof GUID ) { this.remove( (GUID) handle ); } } @Override public List getChildren( GUID guid ) { List childNodes = this.imperialTree.getChildren( guid ); ArrayList nodes = new ArrayList<>(); for( GUIDImperialTrieNode node : childNodes ){ TreeNode treeNode = this.get(node.getGuid()); nodes.add( treeNode ); } return nodes; } @Override public List fetchChildrenGuids( GUID guid ) { return this.imperialTree.fetchChildrenGuids( guid ); } public EntityNode queryNodeByNS( String path, String szBadSep, String szTargetSep ) { Object ret = this.queryEntityHandleByNS( path, szBadSep, szTargetSep ); if( ret instanceof EntityNode ) { return (EntityNode) ret; } else if( ret instanceof GUID ) { return this.get( (GUID) ret ); } return null; } public TreeNode queryTreeNodeByNS( String path, String szBadSep, String szTargetSep ) { Object ret = this.queryEntityHandleByNS( path, szBadSep, szTargetSep ); if( ret instanceof TreeNode ) { return (TreeNode) ret; } else if( ret instanceof GUID ) { return this.get( (GUID) ret ); } return null; } @Override public List fetchRoot() { List guids = this.imperialTree.fetchRoot(); ArrayList treeNodes = new ArrayList<>(); for( GUID guid : guids ){ TreeNode treeNode = this.get(guid); treeNodes.add(treeNode); } return treeNodes; } @Override public void rename( GUID guid, String name ) { GUIDImperialTrieNode node = this.imperialTree.getNode(guid); TreeNode newInstance = (TreeNode)node.getType().newInstance(); TreeNodeOperator operator = this.operatorFactory.getOperator( newInstance.getMetaType() ); operator.updateName( guid, name ); this.imperialTree.removeCachePath( guid ); } @Override public EntityNode queryNode( String path ) { return this.queryNodeByNS( path, null, null ); } @Override public TreeNode queryTreeNode( String path ) { return this.queryTreeNodeByNS( path, null, null ); } @Override public GUID queryGUIDByFN( String fullName ) { return this.queryGUIDByNS( fullName, this.kernelObjectConfig.getFullNameSeparator(), this.kernelObjectConfig.getPathNameSeparator() ); } private String getNodeName( ImperialTreeNode node ){ UOI type = node.getType(); TreeNode newInstance = (TreeNode)type.newInstance(); TreeNodeOperator operator = this.operatorFactory.getOperator(newInstance.getMetaType()); TreeNode treeNode = operator.get(node.getGuid()); return treeNode.getName(); } private boolean allNonNull( List list ) { return list.stream().noneMatch( Objects::isNull ); } @Override public GuidAllocator getGuidAllocator() { return this.guidAllocator; } @Override public ImperialTree getMasterTrieTree() { return this.imperialTree; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/ArchReparseKOMTree.java ================================================ package com.pinecone.hydra.system.ko.kom; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.system.identifier.KOPathResolver; import com.pinecone.hydra.system.ko.CascadeInstrument; import com.pinecone.hydra.system.ko.KernelObjectConfig; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode; import com.pinecone.hydra.unit.imperium.operator.OperatorFactory; public abstract class ArchReparseKOMTree extends ArchKOMTree implements ReparseKOMTree { protected ReparseKOMTreeAddition mReparseKOM; public ArchReparseKOMTree( Processum superiorProcess, KOIMasterManipulator masterManipulator , OperatorFactory operatorFactory, KernelObjectConfig kernelObjectConfig, PathSelector pathSelector, KOMInstrument parent, String name, String superiorPathScope, @Nullable GuidAllocator guidAllocator ){ this( superiorProcess, masterManipulator, kernelObjectConfig, parent, name, superiorPathScope, guidAllocator ); this.pathResolver = new KOPathResolver( kernelObjectConfig ); this.pathSelector = pathSelector; this.operatorFactory = operatorFactory; this.mReparseKOM = new GenericReparseKOMTreeAddition( this ); } public ArchReparseKOMTree ( Processum superiorProcess, KOIMasterManipulator masterManipulator ,KernelObjectConfig kernelObjectConfig, KOMInstrument parent, String name, String superiorPathScope, @Nullable GuidAllocator guidAllocator ){ super( superiorProcess, masterManipulator, kernelObjectConfig, parent, name, superiorPathScope, guidAllocator ); } public ArchReparseKOMTree ( Processum superiorProcess, KOIMasterManipulator masterManipulator ,KernelObjectConfig kernelObjectConfig, KOMInstrument parent, String name, @Nullable GuidAllocator guidAllocator ){ this( superiorProcess, masterManipulator, kernelObjectConfig, parent, name, CascadeInstrument.EmptySuperiorPathScope, guidAllocator ); } @Override public ReparseLinkNode queryReparseLinkByNS( String path, String szBadSep, String szTargetSep ) { return this.mReparseKOM.queryReparseLinkByNS( path, szBadSep, szTargetSep ); } @Override public ReparseLinkNode queryReparseLink( String path ) { return this.queryReparseLinkByNS( path, null, null ); } @Override public void affirmOwnedNode( GUID parentGuid, GUID childGuid ) { this.mReparseKOM.affirmOwnedNode( parentGuid, childGuid ); } @Override public void newHardLink( GUID sourceGuid, GUID targetGuid ) { this.mReparseKOM.newHardLink( sourceGuid, targetGuid ); } @Override public void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName ) { this.mReparseKOM.newLinkTag( originalGuid, dirGuid, tagName); } @Override public void updateLinkTag( GUID tagGuid, String tagName ) { this.mReparseKOM.updateLinkTag( tagGuid, tagName ); } @Override public void removeReparseLink( GUID guid ) { this.mReparseKOM.removeReparseLink( guid ); } @Override public void newLinkTag( String originalPath, String dirPath, String tagName ) { this.mReparseKOM.newLinkTag( originalPath, dirPath, tagName ); } /** ReparseLinkNode or GUID **/ @Override public Object queryEntityHandleByNS( String path, String szBadSep, String szTargetSep ) { return this.mReparseKOM.queryEntityHandleByNS( path, szBadSep, szTargetSep ); } @Override public void remove( String path ) { this.mReparseKOM.remove( path ); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/ExpressInstrument.java ================================================ package com.pinecone.hydra.system.ko.kom; import com.pinecone.hydra.system.ko.runtime.CentralizedRuntimeInstrument; import com.pinecone.hydra.system.ko.runtime.DirectMappingTrieRuntimeInstrument; public interface ExpressInstrument extends CentralizedRuntimeInstrument, DirectMappingTrieRuntimeInstrument { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/GenericReparseKOMTreeAddition.java ================================================ package com.pinecone.hydra.system.ko.kom; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.ImperialTree; import com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode; public class GenericReparseKOMTreeAddition implements ReparseKOMTreeAddition { protected ArchKOMTree mKOMTree; protected ImperialTree mImperialTree; protected ReparsePointSelector mReparsePointSelector; public GenericReparseKOMTreeAddition( ArchKOMTree tree, ReparsePointSelector reparsePointSelector ) { this.mKOMTree = tree; this.mImperialTree = tree.getMasterTrieTree(); this.mReparsePointSelector = reparsePointSelector ; } public GenericReparseKOMTreeAddition( ArchKOMTree tree ) { this.mKOMTree = tree; this.mImperialTree = tree.getMasterTrieTree(); this.mReparsePointSelector = new ReparseLinkSelector( (MultiFolderPathSelector) this.mKOMTree.pathSelector ) ; } @Override public ReparseLinkNode queryReparseLinkByNS(String path, String szBadSep, String szTargetSep ) { if( szTargetSep != null ) { path = path.replace( szBadSep, szTargetSep ); } String[] parts = this.mKOMTree.pathResolver.segmentPathParts( path ); return this.mReparsePointSelector.searchLinkNode( parts ); } @Override public ReparseLinkNode queryReparseLink(String path) { return this.queryReparseLinkByNS( path, null, null ); } @Override public void affirmOwnedNode( GUID parentGuid, GUID childGuid ) { this.mImperialTree.affirmOwnedNode( childGuid, parentGuid ); } @Override public void newHardLink( GUID sourceGuid, GUID targetGuid ) { this.mImperialTree.newHardLink( sourceGuid, targetGuid ); } @Override public void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName ) { this.mImperialTree.newLinkTag( originalGuid, dirGuid, tagName, this.mKOMTree ); } @Override public void updateLinkTag( GUID tagGuid, String tagName ) { this.mImperialTree.updateLinkTagName( tagGuid, tagName ); } @Override public void removeReparseLink( GUID guid ) { this.mImperialTree.removeReparseLink( guid ); } @Override public void newLinkTag(String originalPath, String dirPath, String tagName) { GUID originalGuid = this.mKOMTree.queryGUIDByPath( originalPath ); GUID dirGuid = this.mKOMTree.queryGUIDByPath( dirPath ); if( this.mImperialTree.getOriginalGuid( tagName, dirGuid ) == null ) { this.mImperialTree.newLinkTag( originalGuid, dirGuid, tagName, this.mKOMTree ); } } /** ReparseLinkNode or GUID **/ @Override public Object queryEntityHandleByNS( String path, String szBadSep, String szTargetSep ) { if( szTargetSep != null ) { path = path.replace( szBadSep, szTargetSep ); } String[] parts = this.mKOMTree.pathResolver.segmentPathParts( path ); return this.mReparsePointSelector.search( parts ); } @Override public void remove( String path ) { Object handle = this.mKOMTree.queryEntityHandle( path ); if( handle instanceof GUID ) { this.mKOMTree.remove( (GUID) handle ); } else if( handle instanceof ReparseLinkNode ) { ReparseLinkNode linkNode = (ReparseLinkNode) handle; this.removeReparseLink( linkNode.getTagGuid() ); } } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/KOMInstrument.java ================================================ package com.pinecone.hydra.system.ko.kom; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.system.ko.CascadeKOTreeInstrument; import com.pinecone.hydra.system.ko.QueryableInstrument; import com.pinecone.hydra.unit.imperium.entity.EntityNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import java.util.Collection; import java.util.List; public interface KOMInstrument extends CascadeKOTreeInstrument, QueryableInstrument { @Override KOMInstrument parent(); void applyGuidAllocator( GuidAllocator guidAllocator ); @Override default void setTargetingName( String name ) { CascadeKOTreeInstrument.super.setTargetingName( name ); } @Override String getPath( GUID objectGuid ); @Override String querySystemKernelObjectPath( GUID objectGuid ) ; String getFullName( GUID objectGuid ); @Override GUID queryGUIDByPath( String path ); GUID queryGUIDByFN ( String fullName ); default GUID assertPath( String path, String pathType ) throws IllegalArgumentException { GUID guid = this.queryGUIDByPath( path ); if( guid == null ) { throw new IllegalArgumentException( "Undefined " + pathType + " '" + path + "'" ); } return guid; } default GUID assertPath( String path ) throws IllegalArgumentException { return this.assertPath( path, "path" ); } boolean contains( GUID nodeGuid ); GUID put( TreeNode treeNode ); TreeNode get( GUID objectGuid ); GUID queryGUIDByNS( String path, String szBadSep, String szTargetSep ); TreeNode get( GUID guid, int depth ); TreeNode getAsRootDepth( GUID guid ); void remove( GUID guid ); void remove( String path ); Collection getChildren( GUID guid ); Collection fetchChildrenGuids( GUID guid ); Object queryEntityHandleByNS( String path, String szBadSep, String szTargetSep ); @Override EntityNode queryNode( String path ); TreeNode queryTreeNode( String path ); List fetchRoot(); void rename( GUID guid, String name ); Processum getSuperiorProcess(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/KOMSelector.java ================================================ package com.pinecone.hydra.system.ko.kom; import java.util.List; import com.pinecone.framework.system.prototype.Pinenut; /** * Kernel Object Model * Same as Document Object Model (DOM) */ public interface KOMSelector extends Pinenut { // Return with json. Object querySelectorJ ( String szSelector ); Object querySelector ( String szSelector ); List querySelectorAll ( String szSelector ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/MultiFolderPathSelector.java ================================================ package com.pinecone.hydra.system.ko.kom; import java.util.ArrayList; import java.util.List; import java.util.Stack; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSON; import com.pinecone.framework.util.name.path.PathResolver; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.unit.imperium.ImperialTree; public class MultiFolderPathSelector implements PathSelector { protected PathResolver pathResolver; protected ImperialTree imperialTree; protected GUIDNameManipulator[] dirManipulators; protected GUIDNameManipulator[] fileManipulators; public MultiFolderPathSelector(PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator[] dirMans, GUIDNameManipulator[] fileMans ) { this.pathResolver = pathResolver; this.imperialTree = trieTree; this.dirManipulators = dirMans; this.fileManipulators = fileMans; } @Override public GUID searchGUID( String[] parts ) { return this.searchGUID( parts, null ); } @Override public GUID searchGUID( String[] parts, @Nullable String[] lpResolvedPath ) { List resolvedParts = this.pathResolver.resolvePath( parts ); if( lpResolvedPath != null ) { lpResolvedPath[ 0 ] = this.pathResolver.assemblePath( resolvedParts ); } return this.searchGUID( resolvedParts ); } @Override public GUID searchGUID( List resolvedParts ) { //return dfsSearchGUID(fileMan, dirMan, resolvedParts, 0, null); return (GUID) this.dfsSearch( resolvedParts ); } @Override public GUID searchGUID( GUID parentId, String[] parts ) { return this.searchGUID( parentId, parts, null ); } @Override public GUID searchGUID( GUID parentId, String[] parts, @Nullable String[] lpResolvedPath ) { List resolvedParts = this.pathResolver.resolvePath( parts ); if( lpResolvedPath != null ) { lpResolvedPath[ 0 ] = this.pathResolver.assemblePath( resolvedParts ); } return this.searchGUID( parentId, resolvedParts ); } @Override public GUID searchGUID( GUID parentId, List resolvedParts ) { //return dfsSearchGUID(fileMan, dirMan, resolvedParts, 0, null); return (GUID) this.dfsSearch( parentId, resolvedParts ); } @Override public Object querySelector( String szSelector ) { return this.searchGUID( this.pathResolver.resolvePathParts( szSelector ) ); } @Override public List querySelectorAll( String szSelector ) { return List.of( this.querySelector( szSelector ) ) ; } @Override public Object querySelectorJ( String szSelector ) { return JSON.stringify( this.querySelector( szSelector ) ); } protected Object dfsSearch( List parts ) { return this.dfsSearch( null, parts ); } /** Iterative DFS, 迭代 DFS 法 **/ protected Object dfsSearch( GUID parentId, List parts ) { Stack stack = new Stack<>(); stack.push( new StandardPathSelector.SearchArgs( parentId, 0 ) ); while ( !stack.isEmpty() ) { StandardPathSelector.SearchArgs currentArgs = stack.pop(); int depth = currentArgs.depth; GUID parentGuid = currentArgs.parentGuid; // If we've reached the last part, try to match the current part with all file manipulators // 如果是第一个部分,判断路径长度,来决定查询器的使用 if ( depth == parts.size() ) { continue; } String currentPart = parts.get( depth ); List guids; if ( depth == 0 ) { if ( parts.size() > 1 ) { // Case1: If more than one part, first part can only be a directory. guids = this.searchDirAndLinksFirstCase( currentPart ); } else { // Case2: If there's only one part, it could be either file or directory. // 只有一个部分,可能是文件或文件夹,查询所有操纵器. [且必须是Root] guids = this.fetchAllGuidsRootCase( currentPart ); } } else { // Case3: For middle and last parts, retrieve children GUIDs using distributedTrieTree guids = this.imperialTree.fetchChildrenGuids( parentGuid ); } if ( guids == null || guids.isEmpty() ) { continue; } for ( GUID guid : guids ) { Object blocker = this.tryTerminationBlock( currentPart, guid ); if ( blocker != null ) { return blocker; } if ( this.isGuidMatchingPartName( guid, currentPart, depth, parts.size() ) ) { if ( depth == parts.size() - 1 ) { return this.beforeDFSTermination( currentPart, guid ); } stack.push( new StandardPathSelector.SearchArgs( guid, depth + 1 ) ); } } } return null; } /** Recursive DFS, 废弃递归 DFS 法,留着考古**/ @Deprecated protected Object dfsSearch( List parts, int depth, GUID parentGuid ) { String currentPart = parts.get(depth); List guids; if ( depth == 0 ) { if ( parts.size() > 1 ) { // Case1: If more than one part, first part can only be a directory. guids = this.searchDirAndLinksFirstCase( currentPart ); } else { // Case2: If there's only one part, it could be either file or directory. // 只有一个部分,可能是文件或文件夹,查询所有操纵器. [且必须是Root] guids = /*this.*/fetchAllGuidsRootCase( currentPart ); } } else { // Case3: For middle and last parts, retrieve children GUIDs using distributedTrieTree guids = this.imperialTree.fetchChildrenGuids( parentGuid ); } if ( guids == null || guids.isEmpty() ) { return null; } // 索引法遍历所有可能的 GUID,并继续向下递归. // Indexing method traverses all possible GUIs and continues to recursively descend. for ( GUID guid : guids ) { // Using index to find. Object blocker = this.tryTerminationBlock( currentPart, guid ); if ( blocker != null ) { return blocker; } if ( this.isGuidMatchingPartName( guid, currentPart, depth, parts.size() ) ) { if ( depth == parts.size() - 1 ) { return this.beforeDFSTermination( currentPart, guid ); } Object result = this.dfsSearch( parts, depth + 1, guid ); if ( result != null ) { return result; } } } return null; } protected Object beforeDFSTermination( String currentPart, GUID guid ) { return guid; } protected Object tryTerminationBlock( String currentPart, GUID guid ) { return null; } protected boolean checkPartInAllManipulators( GUID guid, String partName ) { for ( GUIDNameManipulator manipulator : this.fileManipulators ) { List guids = manipulator.getGuidsByNameID( partName, guid ); if ( guids != null && !guids.isEmpty() ) { return true; } } List guids = this.searchDirAndLinks( guid, partName ); return guids != null && !guids.isEmpty(); } protected boolean isGuidMatchingPartName( GUID guid, String partName, int depth, int nParts ) { // 在中间部分只匹配文件夹,最后一部分匹配文件和文件夹 // In the last part, check both files and directories if ( depth == nParts - 1 ) { return this.checkPartInAllManipulators( guid, partName ); } else { // Middle part: Directory only. //List guids = this.dirManipulator.getGuidsByNameID( partName, guid ); List guids = this.searchDirAndLinks( guid, partName ); return guids != null && !guids.isEmpty(); } } protected List searchLinks ( GUID guid, String partName ) { GUID linkGuid = this.imperialTree.getOriginalGuidByNodeGuid( partName, guid ); if( linkGuid != null ) { return List.of( linkGuid ); } return null; } protected List searchDirAndLinks ( GUID guid, String partName ) { for( GUIDNameManipulator dirMans : this.dirManipulators ) { List guids = dirMans.getGuidsByNameID( partName, guid ); if( guids != null && !guids.isEmpty() ) { return guids; } } return this.searchLinks( guid, partName ); } protected List searchLinksFirstCase ( String partName ) { return this.imperialTree.fetchOriginalGuidRoot( partName ); } protected List searchDirAndLinksFirstCase ( String partName ) { for( GUIDNameManipulator dirMans : this.dirManipulators ) { List guids = dirMans.getGuidsByName( partName ); if( guids != null && !guids.isEmpty() ) { return guids; } } return this.searchLinksFirstCase( partName ); } protected List fetchDirsAllGuids( String partName ) { if( this.dirManipulators.length > 0 ) { List guids = this.dirManipulators[ 0 ].getGuidsByName( partName ); for ( int i = 1; i < this.dirManipulators.length; ++i ) { guids.addAll( this.dirManipulators[ i ].getGuidsByName( partName ) ); } guids.removeIf( guid -> !this.imperialTree.isRoot( guid ) ); return guids; } return new ArrayList<>(); } protected void fetchAllOriginalGuidsRootCase( List guids, String partName ) { guids.addAll( this.imperialTree.fetchOriginalGuidRoot( partName ) ); } protected List fetchAllGuidsRootCase( String partName ) { List guids = this.fetchDirsAllGuids( partName ); // Notice: Critical error, querying root element should checks if it is the root. for ( GUIDNameManipulator manipulator : this.fileManipulators ) { List gs = manipulator.getGuidsByName( partName ); for( GUID guid : gs ) { if( this.imperialTree.isRoot( guid ) ) { guids.add( guid ); } } } this.fetchAllOriginalGuidsRootCase( guids, partName ); return guids; } static class SearchArgs { GUID parentGuid; int depth; SearchArgs( GUID parentGuid, int depth ) { this.parentGuid = parentGuid; this.depth = depth; } } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/PathSelector.java ================================================ package com.pinecone.hydra.system.ko.kom; import java.util.List; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.id.GUID; public interface PathSelector extends KOMSelector { GUID searchGUID( String[] parts ); GUID searchGUID( String[] parts, @Nullable String[] lpResolvedPath ); GUID searchGUID( List resolvedParts ); GUID searchGUID( GUID parentID, String[] parts ); GUID searchGUID( GUID parentID, String[] parts, @Nullable String[] lpResolvedPath ); GUID searchGUID( GUID parentID, List resolvedParts ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/ProxiedKOMMountPointHandle.java ================================================ package com.pinecone.hydra.system.ko.kom; import java.util.Collection; import java.util.List; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.system.ko.CascadeInstrument; import com.pinecone.hydra.system.ko.KernelObjectConfig; import com.pinecone.hydra.system.ko.handle.ArchKHandle; import com.pinecone.hydra.system.ko.handle.KOMMountPointHandle; import com.pinecone.hydra.unit.imperium.ImperialTree; import com.pinecone.hydra.unit.imperium.entity.EntityNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public class ProxiedKOMMountPointHandle extends ArchKHandle implements KOMMountPointHandle { protected KOMInstrument mWarpedInstrument; public ProxiedKOMMountPointHandle( String treeNodeName, GUID treeNodeGuid, KOMInstrument warpedInstrument ) { super( treeNodeName, treeNodeGuid ); this.mWarpedInstrument = warpedInstrument; } @Override public void applyGuidAllocator( GuidAllocator guidAllocator ) { this.mWarpedInstrument.applyGuidAllocator( guidAllocator ); } @Override public KOMInstrument revealWrapped() { return this.mWarpedInstrument; } @Override public KOMInstrument parent() { return this.mWarpedInstrument.parent(); } @Override public void setParent( CascadeInstrument parent ) { this.mWarpedInstrument.setParent( parent ); } @Override public Namespace getTargetingName() { return this.mWarpedInstrument.getTargetingName(); } @Override public void setTargetingName( Namespace name ) { this.mWarpedInstrument.setTargetingName( name ); } @Override public String getSuperiorPathScope() { return this.mWarpedInstrument.getSuperiorPathScope(); } @Override public void applySuperiorPathScope( String superiorPathScope ) { this.mWarpedInstrument.applySuperiorPathScope( superiorPathScope ); } @Override public String getPath( GUID objectGuid ) { return this.mWarpedInstrument.getPath( objectGuid ); } @Override public String querySystemKernelObjectPath( GUID objectGuid ) { return this.mWarpedInstrument.querySystemKernelObjectPath( objectGuid ); } @Override public String getFullName( GUID objectGuid ) { return this.mWarpedInstrument.getFullName( objectGuid ); } @Override public GUID queryGUIDByPath( String path ) { return this.mWarpedInstrument.queryGUIDByPath( path ); } @Override public GUID queryGUIDByFN( String fullName ) { return this.mWarpedInstrument.queryGUIDByFN( fullName ); } @Override public boolean contains( GUID nodeGuid ) { return this.mWarpedInstrument.contains( nodeGuid ); } @Override public GUID put( TreeNode treeNode ) { return this.mWarpedInstrument.put( treeNode ); } @Override public TreeNode get( GUID objectGuid ) { return this.mWarpedInstrument.get( objectGuid ); } @Override public GUID queryGUIDByNS( String path, String szBadSep, String szTargetSep ) { return this.mWarpedInstrument.queryGUIDByNS( path, szBadSep, szTargetSep ); } @Override public TreeNode get( GUID guid, int depth ) { return this.mWarpedInstrument.get( guid, depth ); } @Override public TreeNode getAsRootDepth( GUID guid ) { return this.mWarpedInstrument.getAsRootDepth( guid ); } @Override public void remove( GUID guid ) { this.mWarpedInstrument.remove( guid ); } @Override public void remove( String path ) { this.mWarpedInstrument.remove( path ); } @Override public Collection getChildren( GUID guid ) { return this.mWarpedInstrument.getChildren( guid ); } @Override public Collection fetchChildrenGuids( GUID guid ) { return this.mWarpedInstrument.fetchChildrenGuids( guid ); } @Override public Object queryEntityHandleByNS( String path, String szBadSep, String szTargetSep ) { return this.mWarpedInstrument.queryEntityHandleByNS( path, szBadSep, szTargetSep ); } @Override public EntityNode queryNode( String path ) { return this.mWarpedInstrument.queryNode( path ); } @Override public TreeNode queryTreeNode( String path ) { return this.mWarpedInstrument.queryTreeNode( path ); } @Override public List fetchRoot() { return this.mWarpedInstrument.fetchRoot(); } @Override public void rename( GUID guid, String name ) { this.mWarpedInstrument.rename( guid, name ); } @Override public Processum getSuperiorProcess() { return this.mWarpedInstrument.getSuperiorProcess(); } @Override public GuidAllocator getGuidAllocator() { return this.mWarpedInstrument.getGuidAllocator(); } @Override public ImperialTree getMasterTrieTree() { return this.mWarpedInstrument.getMasterTrieTree(); } @Override public KernelObjectConfig getConfig() { return this.mWarpedInstrument.getConfig(); } @Override public String getName() { return this.mszTreeNodeName; } @Override public GUID getGuid() { return this.mTreeNodeGuid; } @Override public String toJSONString() { return this.mWarpedInstrument.toJSONString(); } @Override public String toString() { return this.mWarpedInstrument.toString(); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/ReparseKOMTree.java ================================================ package com.pinecone.hydra.system.ko.kom; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode; public interface ReparseKOMTree extends KOMInstrument { void newLinkTag( String originalPath, String dirPath, String tagName ); void removeReparseLink( GUID guid ); void affirmOwnedNode( GUID parentGuid, GUID childGuid ); void newHardLink( GUID sourceGuid, GUID targetGuid ); void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName ); void updateLinkTag( GUID tagGuid, String tagName ); ReparseLinkNode queryReparseLinkByNS(String path, String szBadSep, String szTargetSep ); /** ReparseLinkNode or GUID **/ Object queryEntityHandleByNS( String path, String szBadSep, String szTargetSep ); ReparseLinkNode queryReparseLink( String path ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/ReparseKOMTreeAddition.java ================================================ package com.pinecone.hydra.system.ko.kom; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode; public interface ReparseKOMTreeAddition extends Pinenut { ReparseLinkNode queryReparseLinkByNS( String path, String szBadSep, String szTargetSep ) ; ReparseLinkNode queryReparseLink( String path ); void affirmOwnedNode( GUID parentGuid, GUID childGuid ) ; void newHardLink( GUID sourceGuid, GUID targetGuid ) ; void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName ) ; void updateLinkTag( GUID tagGuid, String tagName ) ; void removeReparseLink( GUID guid ) ; void newLinkTag( String originalPath, String dirPath, String tagName ) ; void remove( String path ); Object queryEntityHandleByNS( String path, String szBadSep, String szTargetSep ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/ReparseLinkSelector.java ================================================ package com.pinecone.hydra.system.ko.kom; import java.util.List; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.name.path.PathResolver; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.unit.imperium.ImperialTree; import com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode; public class ReparseLinkSelector extends MultiFolderPathSelector implements ReparsePointSelector { public ReparseLinkSelector( MultiFolderPathSelector pathSelector ) { super( pathSelector.pathResolver, pathSelector.imperialTree, pathSelector.dirManipulators, pathSelector.fileManipulators ); } public ReparseLinkSelector(PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator dirMan, GUIDNameManipulator[] fileMans ) { super( pathResolver, trieTree, new GUIDNameManipulator[]{ dirMan }, fileMans ); } public ReparseLinkSelector(PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator[] dirMans, GUIDNameManipulator[] fileMans ) { super( pathResolver, trieTree, dirMans, fileMans ); } @Override public Object search( String[] parts ) { List resolvedParts = this.pathResolver.resolvePath(parts); return this.dfsSearch( resolvedParts ); } @Override public ReparseLinkNode searchLinkNode( String[] parts ) { Object result = this.search( parts ); if( result instanceof ReparseLinkNode ) { return (ReparseLinkNode) result; } return null; } @Override protected Object beforeDFSTermination( String currentPart, GUID guid ) { ReparseLinkNode reparseLinkNode = this.imperialTree.getReparseLinkNodeByNodeGuid( currentPart, guid ); if ( reparseLinkNode != null ) { return reparseLinkNode; } return guid; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/ReparsePointSelector.java ================================================ package com.pinecone.hydra.system.ko.kom; import com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode; public interface ReparsePointSelector extends PathSelector { ReparseLinkNode searchLinkNode( String[] parts ); Object search( String[] parts ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/SimpleMultiFolderPathSelector.java ================================================ package com.pinecone.hydra.system.ko.kom; import java.util.List; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.name.path.PathResolver; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.unit.imperium.ImperialTree; public class SimpleMultiFolderPathSelector extends MultiFolderPathSelector { public SimpleMultiFolderPathSelector( PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator[] dirMans, GUIDNameManipulator[] fileMans ) { super( pathResolver, trieTree, dirMans, fileMans ); } @Override protected List searchLinks ( GUID guid, String partName ) { return null; } @Override protected List searchLinksFirstCase ( String partName ) { return null; } @Override protected void fetchAllOriginalGuidsRootCase( List guids, String partName ) { } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/SimplePathSelector.java ================================================ package com.pinecone.hydra.system.ko.kom; import java.util.List; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.name.path.PathResolver; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.unit.imperium.ImperialTree; public class SimplePathSelector extends StandardPathSelector { public SimplePathSelector( PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator dirMan, GUIDNameManipulator[] fileMans ) { super( pathResolver, trieTree, dirMan, fileMans ); } @Override protected List searchLinks ( GUID guid, String partName ) { return null; } @Override protected List searchLinksFirstCase ( String partName ) { return null; } @Override protected void fetchAllOriginalGuidsRootCase( List guids, String partName ) { } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/StandardPathSelector.java ================================================ package com.pinecone.hydra.system.ko.kom; import java.util.List; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.name.path.PathResolver; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.unit.imperium.ImperialTree; public class StandardPathSelector extends MultiFolderPathSelector implements PathSelector { public StandardPathSelector(PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator dirMan, GUIDNameManipulator[] fileMans ) { super( pathResolver, trieTree, new GUIDNameManipulator[]{ dirMan }, fileMans ); } public GUIDNameManipulator getDirManipulator() { return this.dirManipulators[ 0 ]; } @Override protected List searchDirAndLinks ( GUID guid, String partName ) { List guids = this.dirManipulators[ 0 ].getGuidsByNameID( partName, guid ); if( guids != null && !guids.isEmpty() ) { return guids; } GUID linkGuid = this.imperialTree.getOriginalGuidByNodeGuid( partName, guid ); if( linkGuid != null ) { return List.of( linkGuid ); } return null; } @Override protected List searchDirAndLinksFirstCase ( String partName ) { List guids = this.dirManipulators[ 0 ].getGuidsByName( partName ); if( guids != null && !guids.isEmpty() ) { return guids; } return this.imperialTree.fetchOriginalGuidRoot( partName ); } @Override protected List fetchDirsAllGuids(String partName ) { List guids = this.dirManipulators[ 0 ].getGuidsByName( partName ); guids.removeIf( guid -> !this.imperialTree.isRoot( guid ) ); return guids; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/meta/ElementObject.java ================================================ package com.pinecone.hydra.system.ko.meta; import com.pinecone.hydra.system.ko.KernelObject; import com.pinecone.hydra.unit.imperium.entity.ElementumNode; public interface ElementObject extends ElementumNode, KernelObject { String FunctionName = ElementObject.class.getSimpleName().replace( "Object", "" ); @Override default String objectFunctionName() { return FunctionName; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/runtime/ArchDirectMappingTrieRuntimeKOMTree.java ================================================ package com.pinecone.hydra.system.ko.runtime; import java.util.Collection; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.unit.trie.DirectoryNode; import com.pinecone.framework.unit.trie.TrieNode; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.system.ko.KernelObjectConfig; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public abstract class ArchDirectMappingTrieRuntimeKOMTree extends ArchRuntimeKOMTree implements DirectMappingTrieRuntimeInstrument { public ArchDirectMappingTrieRuntimeKOMTree( @Nullable Processum superiorProcess, String superiorPathScope, KernelObjectConfig kernelObjectConfig, @Nullable GuidAllocator guidAllocator ) { super( superiorProcess, superiorPathScope, kernelObjectConfig, guidAllocator ); } @Override public boolean hasOwnProperty( Object elm ) { return this.mNodeIndex.hasOwnProperty( elm ); } @Override public boolean containsKey( Object key ) { return this.queryGUIDByPath( key.toString() ) != null; } @Override public TrieNode getOwnProperty( String path ) { return this.mNodeIndex.queryNode( path ); } @Override public DirectoryNode fetchOwnChildren( String path ) { TrieNode self = this.getOwnProperty( path ); if ( self == null ) { return null; } return self.evinceDirectory(); } @Override public Collection fetchOwnMappingPath() { return this.mNodeIndex.keySet(); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/runtime/ArchRuntimeKOMTree.java ================================================ package com.pinecone.hydra.system.ko.runtime; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.RuntimeSystem; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.unit.Units; import com.pinecone.framework.unit.trie.TrieMap; import com.pinecone.framework.unit.trie.UniTrieMaptron; import com.pinecone.framework.util.CollectionUtils; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.system.centrum.UniformCentralSystem; import com.pinecone.hydra.system.ko.CascadeInstrument; import com.pinecone.hydra.system.ko.KernelObjectConfig; import com.pinecone.hydra.system.ko.handle.ObjectTreeAddressingSectionHandle; import com.pinecone.hydra.system.ko.handle.ObjectTreeGUIDAddressingSectionHandle; import com.pinecone.hydra.system.ko.kom.KOMInstrument; import com.pinecone.hydra.unit.imperium.ArchUniformInstitutionalizedInstrument; import com.pinecone.hydra.unit.imperium.ImperialTree; import com.pinecone.hydra.unit.imperium.entity.EntityNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.ulf.util.guid.GUIDs; public abstract class ArchRuntimeKOMTree extends ArchUniformInstitutionalizedInstrument implements RuntimeInstrument { protected Namespace mThisNamespace; protected KOMInstrument mParentInstrument; protected TrieMap mNodeIndex; protected Map mNodeTable; protected Processum superiorProcess; protected RuntimeSystem superiorSystem; protected GuidAllocator guidAllocator; protected DynamicFactory dynamicFactory; protected KernelObjectConfig kernelObjectConfig; public ArchRuntimeKOMTree( @Nullable Processum superiorProcess, String superiorPathScope, KernelObjectConfig kernelObjectConfig, @Nullable GuidAllocator guidAllocator ) { super( superiorPathScope ); this.kernelObjectConfig = kernelObjectConfig; this.mNodeIndex = new UniTrieMaptron<>( ConcurrentHashMap::new ); this.mNodeTable = new ConcurrentHashMap<>(); this.superiorProcess = superiorProcess; this.guidAllocator = guidAllocator; if ( this.superiorProcess != null ) { if ( this.superiorProcess instanceof RuntimeSystem ) { this.superiorSystem = (RuntimeSystem) this.superiorProcess; } else { this.superiorSystem = this.superiorProcess.parentSystem(); } if ( this.guidAllocator == null && this.superiorSystem instanceof UniformCentralSystem ) { UniformCentralSystem system = (UniformCentralSystem) this.superiorSystem; this.guidAllocator = system.getSystemGuidAllocator(); } } if ( this.guidAllocator == null ) { throw new IllegalArgumentException( "GUIDAllocator is undefined." ); } } //************************************** CascadeInstrument ************************************** @Override public KOMInstrument parent() { return this.mParentInstrument; } @Override public Processum getSuperiorProcess() { return this.superiorProcess; } @Override public void setParent( CascadeInstrument parent ) { this.mParentInstrument = (KOMInstrument) parent; } @Override public Namespace getTargetingName() { return this.mThisNamespace; } @Override public void setTargetingName( Namespace name ) { this.mThisNamespace = name; } //************************************** CascadeInstrument End ************************************** @Override public void applyGuidAllocator( GuidAllocator guidAllocator ) { this.guidAllocator = guidAllocator; } @Override public GuidAllocator getGuidAllocator() { return this.guidAllocator; } @Override public String getSuperiorPathScope() { return this.superiorPathScope; } @Override public void applySuperiorPathScope( String superiorPathScope ) { this.superiorPathScope = superiorPathScope; } @Override @SuppressWarnings( "unchecked" ) public Collection fetchTreeNodes() { return (Collection) this.mNodeTable.values(); } @Override public KOMInstrument implicated( GUID objectGuid ) { RuntimeTreeNode treeNode = this.mNodeTable.get( objectGuid ); if ( treeNode == null ) { for( RuntimeTreeNode node : this.mNodeTable.values() ) { if ( node.treeNode instanceof KOMInstrument ) { KOMInstrument instrument = (KOMInstrument) node.treeNode; TreeNode sn = instrument.get( objectGuid ); if ( sn != null ) { return instrument; } } } return null; } if ( treeNode instanceof KOMInstrument ) { return (KOMInstrument) treeNode; } return null; } @Override public KernelObjectConfig getConfig() { return this.kernelObjectConfig; } @Override public String getPath( GUID guid ) { RuntimeTreeNode treeNode = this.mNodeTable.get( guid ); if ( treeNode == null ) { for( RuntimeTreeNode node : this.mNodeTable.values() ) { if ( node.treeNode instanceof KOMInstrument ) { KOMInstrument instrument = (KOMInstrument) node.treeNode; String path = instrument.getPath( guid ); if ( StringUtils.isNoneEmpty( path ) ) { return path; } } } return null; } return treeNode.getPath(); } @Override public String getFullName( GUID guid ) { return this.getPath( guid ); } @Override public GUID queryGUIDByPath( String path ) { return this.queryGUIDByPathForward(path); } protected GUID queryGUIDByPathBackward( String path ) { TreeNode treeNode = this.mNodeIndex.get( path ); if ( treeNode != null ) { return treeNode.getGuid(); } String[] split = path.split(this.kernelObjectConfig.getPathNameSepRegex()); for( int i = split.length - 2; i >= 0; --i ) { TreeNode node = this.mNodeIndex.get( this.concatenateFullPathBySegments(split, 0, i) ); if( node instanceof RuntimeTreeNode ) { RuntimeTreeNode rtn = (RuntimeTreeNode)node; if ( rtn.treeNode instanceof ObjectTreeGUIDAddressingSectionHandle ) { ObjectTreeGUIDAddressingSectionHandle pointHandle = (ObjectTreeGUIDAddressingSectionHandle) rtn.treeNode; GUID guid = pointHandle.queryGUIDByPath( this.concatenateFullPathBySegments(split, i + 1, split.length - 1) ); //this.mNodeIndex.put( path, pointHandle.get(guid) ); return guid; } } } return null; } protected GUID queryGUIDByPathForward( String path ) { TreeNode treeNode = this.mNodeIndex.get( path ); if ( treeNode != null ) { return treeNode.getGuid(); } String[] split = path.split(this.kernelObjectConfig.getPathNameSeparator()); for( int i = 0; i < split.length; ++i ) { TreeNode node = this.mNodeIndex.get( this.concatenateFullPathBySegments(split, 0, i) ); if( node instanceof RuntimeTreeNode ) { RuntimeTreeNode rtn = (RuntimeTreeNode)node; if ( rtn.treeNode instanceof ObjectTreeGUIDAddressingSectionHandle ) { ObjectTreeGUIDAddressingSectionHandle pointHandle = (ObjectTreeGUIDAddressingSectionHandle) rtn.treeNode; GUID guid = pointHandle.queryGUIDByPath( this.concatenateFullPathBySegments(split, i + 1, split.length - 1) ); //this.mNodeIndex.put( path, pointHandle.get(guid) ); return guid; } } } return null; } protected String concatenateFullPathBySegments( String[] segments, int start, int end ) { StringBuilder stringBuilder = new StringBuilder(); for( int i = start; i <= end; ++i ) { if (stringBuilder.length() > 0) { stringBuilder.append(this.kernelObjectConfig.getPathNameSeparator()); } stringBuilder.append( segments[ i ] ); } return stringBuilder.toString(); } @Override public GUID queryGUIDByFN( String fullName ) { return this.queryGUIDByPath( fullName ); } @Override public boolean contains( GUID nodeGuid ) { return this.mNodeTable.containsKey( nodeGuid ); } @Override public GUID put( TreeNode treeNode ) throws IllegalArgumentException { RuntimeTreeNode runtimeTreeNode; if ( treeNode instanceof RuntimeTreeNode ) { runtimeTreeNode = (RuntimeTreeNode) treeNode; } else { throw new IllegalArgumentException( "TreeNode which been putted should be `RuntimeTreeNode`." ); } this.mNodeTable.put( treeNode.getGuid(), runtimeTreeNode ); return treeNode.getGuid(); } @Override public TreeNode add( String mountPointPath, TreeNode that ) { RuntimeTreeNode runtimeTreeNode; if ( that instanceof RuntimeTreeNode ) { runtimeTreeNode = (RuntimeTreeNode) that; } else { runtimeTreeNode = new RuntimeTreeNode( that, mountPointPath ); } this.mNodeTable.put( that.getGuid(), runtimeTreeNode ); this.mNodeIndex.put( mountPointPath, runtimeTreeNode ); return that; } @Override public TreeNode get( GUID guid ) { return this.mNodeTable.get( guid ); } @Override public TreeNode get( GUID guid, int depth ) { return this.mNodeTable.get( guid ); } @Override public TreeNode getAsRootDepth( GUID guid ) { return this.mNodeTable.get( guid ); } @Override public void remove( GUID guid ) { RuntimeTreeNode treeNode = this.mNodeTable.get( guid ); if ( treeNode != null ) { this.mNodeIndex.remove( treeNode.getPath() ); this.mNodeTable.remove( guid ); } } @Override public void remove( String path ) { GUID guid = this.queryGUIDByPath( path ); if ( guid != null ) { this.remove( guid ); } } @Override public void rename( GUID guid, String name ) { throw new UnsupportedOperationException(); } @Override public Collection getChildren( GUID guid ) { for( RuntimeTreeNode node : this.mNodeTable.values() ) { if ( node.treeNode instanceof KOMInstrument ) { KOMInstrument instrument = (KOMInstrument) node.treeNode; Collection cs = instrument.getChildren( guid ); if ( CollectionUtils.isNoneEmpty( cs ) ) { return cs; } } } return Units.emptyList(); } @Override public Collection fetchChildrenGuids( GUID guid ) { for( RuntimeTreeNode node : this.mNodeTable.values() ) { if ( node.treeNode instanceof KOMInstrument ) { KOMInstrument instrument = (KOMInstrument) node.treeNode; Collection cs = instrument.fetchChildrenGuids( guid ); if ( CollectionUtils.isNoneEmpty( cs ) ) { return cs; } } } return Units.emptyList(); } @Override public List fetchRoot() { throw new UnsupportedOperationException(); } @Override public Object queryEntityHandleByNS( String path, String szBadSep, String szTargetSep ) { if( szTargetSep != null ) { path = path.replace( szBadSep, szTargetSep ); } TreeNode treeNode = this.mNodeIndex.get( path ); if ( treeNode != null ) { if( treeNode instanceof RuntimeTreeNode ) { return ( (RuntimeTreeNode) treeNode).treeNode; } return treeNode; } String[] split = path.split( this.kernelObjectConfig.getPathNameSeparator() ); for( int i = 0; i < split.length; ++i ) { TreeNode node = this.mNodeIndex.get( this.concatenateFullPathBySegments(split, 0, i) ); if( node instanceof RuntimeTreeNode ) { RuntimeTreeNode rtn = (RuntimeTreeNode)node; if ( rtn.treeNode instanceof ObjectTreeAddressingSectionHandle ) { ObjectTreeAddressingSectionHandle pointHandle = (ObjectTreeAddressingSectionHandle) rtn.treeNode; EntityNode entityNode = pointHandle.queryNode( this.concatenateFullPathBySegments(split, i + 1, split.length - 1) ); return entityNode; } } } return null; } @Override public EntityNode queryNode( String path ) { Object o = this.queryEntityHandleByNS( path, null, null ); if( o instanceof EntityNode ) { return (EntityNode) o; } return null; } @Override public TreeNode queryTreeNode( String path ) { Object o = this.queryEntityHandleByNS( path, null, null ); if( o instanceof TreeNode ) { return (TreeNode) o; } // Runtime KOM shouldn`t be GUID. // else if( o instanceof GUID ) { // return this.get( (GUID) o ); // } return null; } @Override public GUID queryGUIDByNS( String path, String szBadSep, String szTargetSep ) { if( szTargetSep != null ) { path = path.replace( szBadSep, szTargetSep ); } return this.queryGUIDByPath( path ); } @Override public String querySystemKernelObjectPath( GUID objectGuid ) { String thisScopePath = this.getPath( objectGuid ); if ( thisScopePath == null ) { return null; } KOMInstrument imp = this.implicated( objectGuid ); if ( imp != null ) { thisScopePath = imp.querySystemKernelObjectPath( objectGuid ); } return this.getSuperiorPathScope() + this.getConfig().getPathNameSeparator() + thisScopePath; } @Override public ImperialTree getMasterTrieTree() { return null; } static class RuntimeTreeNode implements TreeNode { private TreeNode treeNode; private String path; public RuntimeTreeNode( TreeNode treeNode, String path ) { this.treeNode = treeNode; this.path = path; } @Override public String getName() { return this.treeNode.getName(); } @Override public GUID getGuid() { return this.treeNode.getGuid(); } public TreeNode getTreeNode() { return this.treeNode; } public String getPath() { return this.path; } @Override public String toJSONString() { return this.treeNode.toJSONString(); } @Override public String toString() { return this.treeNode.toString(); } } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/runtime/CentralizedRuntimeInstrument.java ================================================ package com.pinecone.hydra.system.ko.runtime; import com.pinecone.hydra.system.ko.handle.ObjectTreeAddressingSectionHandle; import com.pinecone.hydra.system.ko.kom.KOMInstrument; public interface CentralizedRuntimeInstrument extends RuntimeInstrument { KOMInstrument mount( String mountPointPath, KOMInstrument that ); KOMInstrument mount( String mountPointPath, String treeNodeName, KOMInstrument that ); ObjectTreeAddressingSectionHandle directMount( String mountPointPath, ObjectTreeAddressingSectionHandle that ); ObjectTreeAddressingSectionHandle directMount( String mountPointPath, String treeNodeName, ObjectTreeAddressingSectionHandle that ); KOMInstrument getMountedInstrument ( String mountPointPath ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/runtime/DirectMappingTrieRuntimeInstrument.java ================================================ package com.pinecone.hydra.system.ko.runtime; import java.util.Collection; import com.pinecone.framework.system.prototype.PineUnit; import com.pinecone.framework.unit.trie.DirectoryNode; import com.pinecone.framework.unit.trie.TrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public interface DirectMappingTrieRuntimeInstrument extends RuntimeInstrument, PineUnit { TrieNode getOwnProperty( String path ); DirectoryNode fetchOwnChildren( String path ); Collection fetchOwnMappingPath(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/runtime/GenericRuntimeInstrumentConfig.java ================================================ package com.pinecone.hydra.system.ko.runtime; import java.util.Map; import com.pinecone.framework.system.Nullable; import com.pinecone.hydra.system.ko.ArchKernelObjectConfig; import com.pinecone.hydra.system.ko.KernelObjectConfig; public class GenericRuntimeInstrumentConfig extends ArchKernelObjectConfig implements KernelObjectConfig { public GenericRuntimeInstrumentConfig() { super(); } public GenericRuntimeInstrumentConfig( @Nullable Map config ){ super( config ); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/runtime/KernelExpressInstrument.java ================================================ package com.pinecone.hydra.system.ko.runtime; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.system.ko.KernelObjectConfig; import com.pinecone.hydra.system.ko.handle.AppliableKHandle; import com.pinecone.hydra.system.ko.handle.KOMMountPointHandle; import com.pinecone.hydra.system.ko.handle.ObjectTreeAddressingSectionHandle; import com.pinecone.hydra.system.ko.kom.ExpressInstrument; import com.pinecone.hydra.system.ko.kom.KOMInstrument; import com.pinecone.hydra.system.ko.kom.ProxiedKOMMountPointHandle; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public class KernelExpressInstrument extends ArchDirectMappingTrieRuntimeKOMTree implements ExpressInstrument { public KernelExpressInstrument( @Nullable Processum superiorProcess, String superiorPathScope, KernelObjectConfig kernelObjectConfig, @Nullable GuidAllocator guidAllocator ) { super( superiorProcess, superiorPathScope, kernelObjectConfig, guidAllocator ); } public KernelExpressInstrument( @Nullable Processum superiorProcess, String superiorPathScope, KernelObjectConfig kernelObjectConfig ) { this( superiorProcess, superiorPathScope, kernelObjectConfig, null ); } @Override public KOMInstrument mount( String mountPointPath, KOMInstrument that ) { String[] debris = mountPointPath.split( this.getConfig().getPathNameSepRegex() ); if ( debris.length < 1 ) { throw new IllegalArgumentException( "Path given should not be empty." ); } this.mount( mountPointPath, debris[ debris.length - 1 ], that ); that.setParent( this ); that.applySuperiorPathScope( mountPointPath ); return that; } @Override public KOMInstrument mount( String mountPointPath, String treeNodeName, KOMInstrument that ) { KOMMountPointHandle handle = new ProxiedKOMMountPointHandle( treeNodeName, this.guidAllocator.nextGUID(), that ); this.add( mountPointPath, handle ); return that; } @Override public ObjectTreeAddressingSectionHandle directMount( String mountPointPath, ObjectTreeAddressingSectionHandle that ) { if ( that instanceof AppliableKHandle ) { String[] debris = mountPointPath.split( this.getConfig().getPathNameSepRegex() ); if ( debris.length < 1 ) { throw new IllegalArgumentException( "Path given should not be empty." ); } this.directMount( mountPointPath, debris[ debris.length - 1 ], that ); } this.add( mountPointPath, that ); return that; } @Override public ObjectTreeAddressingSectionHandle directMount( String mountPointPath, String treeNodeName, ObjectTreeAddressingSectionHandle that ) { if ( that instanceof AppliableKHandle ) { AppliableKHandle handle = (AppliableKHandle) that; if ( that.getGuid() == null ) { handle.applyTreeNodeGuid( this.guidAllocator.nextGUID() ); } handle.applyTreeNodeName( treeNodeName ); } this.add( mountPointPath, that ); return that; } @Override public KOMInstrument getMountedInstrument( String mountPointPath ) { TreeNode tn = this.mNodeIndex.get( mountPointPath ); if ( tn instanceof RuntimeTreeNode ) { tn = ((RuntimeTreeNode) tn).getTreeNode(); } if ( tn instanceof KOMMountPointHandle ) { return ((KOMMountPointHandle) tn).revealWrapped(); } else if ( tn instanceof KOMInstrument ) { return (KOMInstrument) tn; } return null; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/runtime/RuntimeInstrument.java ================================================ package com.pinecone.hydra.system.ko.runtime; import java.util.Collection; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.system.ko.kom.KOMInstrument; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public interface RuntimeInstrument extends KOMInstrument { Collection fetchTreeNodes(); TreeNode add( String mountPointPath, TreeNode that ); KOMInstrument implicated( GUID objectGuid ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/polity/RepublicSystem.java ================================================ package com.pinecone.hydra.system.polity; import com.pinecone.hydra.system.FederalSystem; import com.pinecone.hydra.system.HierarchySystem; public interface RepublicSystem extends HierarchySystem, FederalSystem { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/subsystem/ArchMicroSystem.java ================================================ package com.pinecone.hydra.system.subsystem; import com.pinecone.framework.util.config.PatriarchalConfig; import com.pinecone.hydra.system.Hydrogen; public abstract class ArchMicroSystem implements MicroSystem { protected String mszName; protected Hydrogen mSystem; protected PatriarchalConfig mSubsystemConfig; public ArchMicroSystem( String name, Hydrogen system, PatriarchalConfig config ) { this.mszName = name; this.mSystem = system; this.mSubsystemConfig = config; } @Override public String getName() { return this.mszName; } @Override public Hydrogen getMasterSystem(){ return this.mSystem; } @Override public PatriarchalConfig getSubsystemConfig() { return this.mSubsystemConfig; } protected abstract void traceWelcomeInfo(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/subsystem/ArchSubsystemDirector.java ================================================ package com.pinecone.hydra.system.subsystem; import java.io.IOException; import java.nio.file.Path; import java.util.Map; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.util.ClassUtils; import com.pinecone.framework.util.config.PatriarchalConfig; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.framework.util.lang.GenericDynamicFactory; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.system.ArchSystemCascadeComponent; import com.pinecone.hydra.system.HyComponent; import com.pinecone.hydra.system.Hydrogen; public abstract class ArchSubsystemDirector extends ArchSystemCascadeComponent implements SubsystemDirector { protected DynamicFactory mDynamicFactory; protected PatriarchalConfig mSubsystemConfig; protected PatriarchalConfig mSegmentConfig; protected boolean mSegmentEnabled; public ArchSubsystemDirector( Namespace name, Hydrogen system, HyComponent parent ) { super( name, system, system.getComponentManager(), parent ); this.mDynamicFactory = new GenericDynamicFactory( this.getSystem().getTaskManager().getClassLoader() ); this.mSubsystemConfig = this.getSystem().getSystemConfig().getChild( "Subsystem" ); this.prepare_segment(); this.mSegmentEnabled = (boolean) this.mSegmentConfig.get( "Enable" ); } public ArchSubsystemDirector( Hydrogen system, HyComponent parent ) { this( null, system, parent ); } public ArchSubsystemDirector( Hydrogen system ) { this( system, null ); } protected abstract void prepare_segment(); protected abstract void prepare_each_sub( String key, Object dy ); protected abstract Object instantiate( Map config, String name ) throws ClassNotFoundException ; @SuppressWarnings( "unchecked" ) protected void prepare_init_subsystem_config( PatriarchalConfig seg ) { if ( seg instanceof Map ) { Map cms = (Map) seg; for ( Map.Entry kv : cms.entrySet() ) { Object dy = kv.getValue(); if( dy instanceof String ) { try { PatriarchalConfig sysConfig = seg.getChildFromPath( Path.of((String) dy) ); cms.put( kv.getKey(), sysConfig ); dy = sysConfig; } catch ( IOException e ) { throw new ProxyProvokeHandleException( e ); } } else if( dy.getClass().isPrimitive() || ClassUtils.isPrimitiveWrapper( dy.getClass() ) ) { continue; } if ( dy instanceof Map ) { Map tm = (Map) dy; Boolean lifecycleWithPrimarySystem = (Boolean) tm.get( "LifecycleWithPrimarySystem" ); if ( lifecycleWithPrimarySystem != null && !lifecycleWithPrimarySystem) { continue; } } this.prepare_each_sub( kv.getKey(), dy ); } } } @Override public PatriarchalConfig getSubsystemConfig() { return this.mSubsystemConfig; } @Override public PatriarchalConfig getSegmentConfig() { return this.mSegmentConfig; } @Override public Object instantiate( String fullName ) { try { Object c = this.mSegmentConfig.get( fullName ); if ( c instanceof Map ) { Map tm = (Map) c; return this.instantiate( tm, fullName ); } } catch ( ClassNotFoundException e ) { return null; } return null; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/subsystem/Cabinet.java ================================================ package com.pinecone.hydra.system.subsystem; import com.pinecone.framework.system.regime.arch.Director; public interface Cabinet extends Director { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/subsystem/CentralKernelLordFederation.java ================================================ package com.pinecone.hydra.system.subsystem; import java.io.IOException; import java.nio.file.Path; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.system.regime.arch.Lord; import com.pinecone.framework.util.CollectionUtils; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.framework.util.config.PatriarchalConfig; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.system.HyComponent; import com.pinecone.hydra.system.Hydrogen; public class CentralKernelLordFederation extends ArchSubsystemDirector implements KernelLordFederation { protected Logger log = LoggerFactory.getLogger( "CentralKernelLordFederation" ); protected Map mEmpireLords; // Domain subsystem. public CentralKernelLordFederation( Namespace name, Hydrogen system, HyComponent parent ) { super( name, system, parent ); this.mEmpireLords = new ConcurrentHashMap<>(); this.prepare_init_subsystem_config( this.mSegmentConfig ); this.log.info( "[Lifecycle] LordFederation prepared, ready to start. " ); } public CentralKernelLordFederation( Hydrogen system, HyComponent parent ) { this( null, system, parent ); } public CentralKernelLordFederation( Hydrogen system ) { this( system, null ); } @Override protected void prepare_segment() { this.mSegmentConfig = this.mSubsystemConfig.getChild( "SystemFederation" ); } @Override protected void prepare_each_sub( String key, Object dy ) { if ( !this.mSegmentEnabled ) { return; } if( dy instanceof Map ) { try { Map tm = (Map) dy; String name = (String) tm.get( "Name" ); if( name == null ) { name = key; } Lord lord = this.instantiate( tm, name ); this.register( name, lord ); if( lord == null ) { throw new IllegalArgumentException( "Instancing Lord compromised with illegal arguments." ); } } catch ( ClassNotFoundException e ) { throw new ProxyProvokeHandleException( e ); } } else { throw new IllegalArgumentException( "Lord config should be map or json format." ); } } @Override @SuppressWarnings( "unchecked" ) protected Lord instantiate( Map config, String name ) throws ClassNotFoundException { Class clazz = this.mDynamicFactory.getClassLoader().loadClass( (String)config.get( KernelLordFederation.KeyMainClass ) ); JSONConfig p = null; if ( this.mSegmentConfig instanceof JSONConfig ) { p = (JSONConfig) this.mSegmentConfig; } Object ins = this.mDynamicFactory.optNewInstance( clazz, new Object[] { this.getSystem(), name, new JSONConfig( (Map) config, p ) } ); return (Lord) ins; } @Override public Lord instantiate( String fullName ) { Lord ms = (Lord) super.instantiate( fullName ); this.register( fullName, ms ); return ms; } @Override public void register( String name, Lord system ) { this.mEmpireLords.put( name, system ); } @Override public void deregister( String name ) { this.mEmpireLords.remove( name ); } @Override public Lord get( String name ) { return this.mEmpireLords.get( name ); } @Override public void clearLords() { for ( Lord system : this.mEmpireLords.values() ) { system.release(); } this.mEmpireLords.clear(); } @Override public Set > entrySet() { return this.mEmpireLords.entrySet(); } @Override public int size() { return this.mEmpireLords.size(); } @Override public Map addConfig( String key, Object dyPathOrObject ) { Map cms = CollectionUtils.genericConvert( (Map) this.mSegmentConfig ); if( dyPathOrObject instanceof String ) { try { PatriarchalConfig sysConfig = this.mSegmentConfig.getChildFromPath( Path.of((String) dyPathOrObject) ); cms.put( key, sysConfig ); return CollectionUtils.genericConvert( (Map) sysConfig ); } catch ( IOException e ) { return null; } } else { cms.put( key, dyPathOrObject ); } return CollectionUtils.genericConvert( (Map) dyPathOrObject ); } @Override public Lord instantiate( String fullName, Object confPathOrObject ) { if ( !this.mSegmentConfig.containsKey( fullName ) ) { Map m = this.addConfig( fullName, confPathOrObject ); if ( m == null ) { return null; } } return this.instantiate( fullName ); } @Override public Hydrogen getSystem() { return super.getSystem(); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/subsystem/CentralMicroSystemCabinet.java ================================================ package com.pinecone.hydra.system.subsystem; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.framework.util.config.PatriarchalConfig; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.system.HyComponent; import com.pinecone.hydra.system.Hydrogen; public class CentralMicroSystemCabinet extends ArchSubsystemDirector implements KernelMicroSystemCabinet { protected ConcurrentHashMap mRegistry; protected PatriarchalConfig mConfMicroSystems; public CentralMicroSystemCabinet( Namespace name, Hydrogen system, HyComponent parent ) { super( name, system, parent ); this.mRegistry = new ConcurrentHashMap<>(); this.prepare_init_subsystem_config( this.mConfMicroSystems ); } public CentralMicroSystemCabinet( Hydrogen system, HyComponent parent ) { this( null, system, parent ); } public CentralMicroSystemCabinet( Hydrogen system ) { this( system, null ); } @Override protected void prepare_segment() { this.mSegmentConfig = this.mSubsystemConfig.getChild( "SystemCabinet" ); this.mConfMicroSystems = this.mSegmentConfig.getChild( "MicroSystems" ); } @Override protected void prepare_each_sub( String key, Object dy ) { if ( !this.mSegmentEnabled ) { return; } if( dy instanceof Map ) { try { Map tm = (Map) dy; String name = (String) tm.get( "Name" ); if( name == null ) { name = key; } MicroSystem is = this.instantiate( tm, name ); this.register( name, is ); if( is == null ) { throw new IllegalArgumentException( "Instancing MicroSystem compromised with illegal arguments." ); } } catch ( ClassNotFoundException e ) { throw new ProxyProvokeHandleException( e ); } } else { throw new IllegalArgumentException( "MicroSystem config should be map or json format." ); } } @Override @SuppressWarnings( "unchecked" ) protected MicroSystem instantiate( Map config, String name ) throws ClassNotFoundException { Class clazz = this.mDynamicFactory.getClassLoader().loadClass( (String)config.get( KernelMicroSystemCabinet.KeyMainClass ) ); JSONConfig p = null; if ( this.mSegmentConfig instanceof JSONConfig ) { p = (JSONConfig) this.mSegmentConfig; } Object ins = this.mDynamicFactory.optNewInstance( clazz, new Object[] { name, this.getSystem(), new JSONConfig( (Map) config, p ) } ); return (MicroSystem) ins; } @Override public MicroSystem instantiate( String fullName ) { MicroSystem ms = (MicroSystem) super.instantiate( fullName ); this.register( fullName, ms ); return ms; } @Override public void register( String name, MicroSystem system ) { this.mRegistry.put( name, system ); } @Override public void deregister( String name ) { this.mRegistry.remove( name ); } @Override public MicroSystem get( String name ) { return this.mRegistry.get( name ); } @Override public void clearCabinet() { for( MicroSystem system : this.mRegistry.values() ) { system.release(); } this.mRegistry.clear(); } @Override public Set > entrySet() { return this.mRegistry.entrySet(); } @Override public int size() { return this.mRegistry.size(); } @Override public Hydrogen getSystem() { return super.getSystem(); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/subsystem/Federation.java ================================================ package com.pinecone.hydra.system.subsystem; import com.pinecone.framework.system.regime.arch.Director; public interface Federation extends Director { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/subsystem/KernelLordFederation.java ================================================ package com.pinecone.hydra.system.subsystem; import java.util.Map; import java.util.Set; import com.pinecone.framework.system.regime.arch.Lord; public interface KernelLordFederation extends SubsystemDirector, Federation { String KeyMainClass = "MainClass"; void register( String name, Lord system ); void deregister( String name ); Lord get( String name ); void clearLords() ; Set > entrySet(); int size(); Map addConfig(String key, Object dyPathOrObject ); Lord instantiate( String fullName, Object confPathOrObject ); Lord instantiate( String fullName ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/subsystem/KernelMicroSystemCabinet.java ================================================ package com.pinecone.hydra.system.subsystem; import java.util.Map; import java.util.Set; public interface KernelMicroSystemCabinet extends SubsystemDirector, Cabinet { String KeyMainClass = "MainClass"; void register( String name, MicroSystem system ); void deregister( String name ); MicroSystem get( String name ); void clearCabinet() ; Set > entrySet(); int size(); MicroSystem instantiate( String fullName ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/subsystem/MicroSystem.java ================================================ package com.pinecone.hydra.system.subsystem; import com.pinecone.framework.system.RuntimeSystem; import com.pinecone.framework.system.executum.Systema; import com.pinecone.framework.util.config.PatriarchalConfig; public interface MicroSystem extends Systema { void release(); RuntimeSystem getMasterSystem(); PatriarchalConfig getSubsystemConfig(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/subsystem/SubsystemDirector.java ================================================ package com.pinecone.hydra.system.subsystem; import com.pinecone.framework.system.regime.arch.Director; import com.pinecone.framework.util.config.PatriarchalConfig; import com.pinecone.hydra.system.HyComponent; public interface SubsystemDirector extends Director, HyComponent { PatriarchalConfig getSubsystemConfig(); PatriarchalConfig getSegmentConfig(); Object instantiate( String fullName ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/types/HydraKingdom.java ================================================ package com.pinecone.hydra.system.types; import com.pinecone.hydra.system.HierarchySystem; import com.pinecone.hydra.system.Hydrogen; public interface HydraKingdom extends Hydrogen, HierarchySystem { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/ArchRegimentObjectModel.java ================================================ package com.pinecone.hydra.unit.imperium; import com.pinecone.hydra.system.ko.KernelObjectConfig; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.hydra.system.ko.kom.KOMInstrument; import com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator; public abstract class ArchRegimentObjectModel extends ArchUniformInstitutionalizedInstrument implements KOMInstrument { protected ImperialTree imperialTree; protected TreeMasterManipulator treeMasterManipulator; protected KernelObjectConfig kernelObjectConfig; public ArchRegimentObjectModel( TreeMasterManipulator masterManipulator, KernelObjectConfig kernelObjectConfig, String superiorPathScope ) { super( superiorPathScope ); this.treeMasterManipulator = masterManipulator; // [1st] this.kernelObjectConfig = kernelObjectConfig; // [2st] this.imperialTree = new RegimentedImperialTree( this ); } public ArchRegimentObjectModel( KOIMasterManipulator masterManipulator, KernelObjectConfig kernelObjectConfig, String superiorPathScope ) { this( (TreeMasterManipulator) masterManipulator.getSkeletonMasterManipulator(), kernelObjectConfig, superiorPathScope ); } public ImperialTree getMasterTrieTree() { return this.imperialTree; } TreeMasterManipulator getTreeMasterManipulator() { return this.treeMasterManipulator; } @Override public KernelObjectConfig getConfig() { return this.kernelObjectConfig; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/ArchUniformInstitutionalizedInstrument.java ================================================ package com.pinecone.hydra.unit.imperium; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.system.ko.kom.KOMInstrument; /** * Pinecone Ursus For Java Uniform Institutionalized Instrument * Author: Harald.E (Dragon King) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Uniform Institutionalized Instrument * 统一编制化元信息中级模型 * ***************************************************************************************** */ public abstract class ArchUniformInstitutionalizedInstrument implements KOMInstrument { protected String superiorPathScope; public ArchUniformInstitutionalizedInstrument( String superiorPathScope ) { this.superiorPathScope = superiorPathScope; } @Override public String querySystemKernelObjectPath( GUID objectGuid ) { String thisScopePath = this.getPath( objectGuid ); if ( thisScopePath == null ) { return null; } return this.getSuperiorPathScope() + this.getConfig().getPathNameSeparator() + thisScopePath; } @Override public String getSuperiorPathScope() { return this.superiorPathScope; } @Override public void applySuperiorPathScope( String superiorPathScope ) { this.superiorPathScope = superiorPathScope; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/GUIDImperialTrieNode.java ================================================ package com.pinecone.hydra.unit.imperium; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSONEncoder; import com.pinecone.framework.util.uoi.UOI; import java.util.List; /** * Pinecone Ursus For Java GUIDDistributedTrieNode * Author: * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ********************************************************** * Thanks for genius`s contribution. * ********************************************************** */ public class GUIDImperialTrieNode implements ImperialTreeNode { //节点id protected long enumId; //节点uuid protected GUID guid; //父节点uuid protected List parentGUID; //基础信息uuid protected GUID attributesGUID; //元信息uuid protected GUID nodeMetadataGUID; //节点的类型方便获取数据 protected UOI type; public GUIDImperialTrieNode() { } public GUIDImperialTrieNode( long enumId, GUID guid, List parentGUID, GUID baseDataGUID, GUID nodeMetadataGUID, UOI type ) { this.enumId = enumId; this.guid = guid; this.parentGUID = parentGUID; this.attributesGUID = baseDataGUID; this.nodeMetadataGUID = nodeMetadataGUID; this.type = type; } @Override public long getEnumId() { return this.enumId; } @Override public void setEnumId( long enumId ) { this.enumId = enumId; } @Override public GUID getGuid() { return this.guid; } @Override public void setGuid( GUID guid ) { this.guid = guid; } @Override public List getParentGUIDs() { return this.parentGUID; } @Override public void setParentGUID( List parentGUID ) { this.parentGUID = parentGUID; } @Override public GUID getAttributesGUID() { return this.attributesGUID; } @Override public void setBaseDataGUID( GUID baseDataGUID ) { this.attributesGUID = baseDataGUID; } @Override public GUID getNodeMetadataGUID() { return this.nodeMetadataGUID; } @Override public void setNodeMetadataGUID( GUID nodeMetadataGUID ) { this.nodeMetadataGUID = nodeMetadataGUID; } @Override public UOI getType() { return this.type; } @Override public void setType(UOI type) { this.type = type; } @Override public String toString() { return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "class", this.className() ), new KeyValue<>( "guid", this.getGuid() ), new KeyValue<>( "type", this.getType() ) } ); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/ImperialTree.java ================================================ package com.pinecone.hydra.unit.imperium; import com.pinecone.framework.system.prototype.PineUnit; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.system.ko.KernelObjectInstrument; import com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode; import java.util.List; /** * Pinecone Ursus For Java Imperial Tree * Author: Harald.E (Dragon King), Ken * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Imperium (Imperial Tree) * It is a distributed uniformed institutionalization system tree used for uniformed and systematic institutionalization of controlled distributed objects. * Similar to kernel object management in other OS, it ensures that kernel objects and target-controlled objects in the system are marshaled * and accessed in a uniformed URL-style. * This data structure is based on a prefix tree and a GUID system, which can also be utilized for other marshaling purposes. * * Imperium (统治树), * 是一种分布式统一编制体系树,用于对受控分布式对象进行统一系统性编制。 * 与其他操作系统内核对象管理类似,使得系统中的内核对象和欲控对象,整整齐齐的被编制和统一URL式访问。 * 该数据结构基于前缀树和GUID身份证体系,是一类通用数据结构,也可用于其他编制或编组目的的实现。 * * e.g. \Device\HarddiskVolume3\Users\dragonking\AppData\Local\ * e.g. /proc/137/task * ***************************************************************************************** */ public interface ImperialTree extends PineUnit { void insert( ImperialTreeNode distributedConfTreeNode ); void affirmOwnedNode( GUID nodeGUID, GUID parentGUID ); GUIDImperialTrieNode getNode(GUID guid ); void purge( GUID guid ); void removeTreeNodeOnly( GUID guid ); void put( GUID guid, GUIDImperialTrieNode distributedTreeNode ); boolean contains( GUID key ); boolean containsChild( GUID parentGuid, GUID childGuid ); GUID queryGUIDByPath( String path ); List getChildren(GUID guid ); List fetchChildrenGuids( GUID parentGuid ); List fetchParentGuids( GUID guid ); void removeInheritance( GUID childGuid,GUID parentGuid ); String getCachePath( GUID guid ); void removeCachePath( GUID guid ); GUID getOwner( GUID guid ); void setOwner( GUID sourceGuid, GUID targetGuid ); void setGuidLineage( GUID sourceGuid, GUID targetGuid ); List getSubordinates( GUID guid ); void insertCachePath( GUID guid,String path ); List fetchRoot(); boolean isRoot( GUID guid ); /** Link / Reference */ long queryLinkedCount( GUID guid, LinkedType linkedType ); long queryAllLinkedCount( GUID guid ); default long queryStrongLinkedCount( GUID guid ) { return this.queryLinkedCount( guid, LinkedType.Owned ); } default long queryWeakLinkedCount( GUID guid ) { return this.queryLinkedCount( guid, LinkedType.Hard ); } void newHardLink( GUID sourceGuid, GUID targetGuid ); void moveTo( GUID sourceGuid, GUID destinationGuid ); void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName, KernelObjectInstrument instrument ); void updateLinkTagName( GUID tagGuid, String tagName ); /** Link Tag */ GUID getOriginalGuid( String tagName, GUID parentDirGUID ); GUID getOriginalGuidByNodeGuid( String tagName, GUID nodeGUID ); List fetchOriginalGuid( String tagName ); List fetchOriginalGuidRoot( String tagName ); ReparseLinkNode getReparseLinkNode( String tagName, GUID parentDirGuid ); ReparseLinkNode getReparseLinkNodeByNodeGuid( String tagName, GUID nodeGUID ); GUID getOriginalGuid( GUID tagGuid ); void removeReparseLink( GUID guid ); boolean isTagGuid( GUID guid ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/ImperialTreeConstants.java ================================================ package com.pinecone.hydra.unit.imperium; public final class ImperialTreeConstants { public static int DefaultShortPathLength = 330; } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/ImperialTreeNode.java ================================================ package com.pinecone.hydra.unit.imperium; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.uoi.UOI; import java.util.List; /** * Pinecone Ursus For Java * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Uniform Distributed Trie Tree (UDTT) * ***************************************************************************************** */ public interface ImperialTreeNode extends Pinenut { long getEnumId(); void setEnumId(long enumId); GUID getGuid(); void setGuid(GUID guid); List getParentGUIDs(); void setParentGUID(List parentGUID); GUID getAttributesGUID(); void setBaseDataGUID(GUID baseDataGUID); GUID getNodeMetadataGUID(); void setNodeMetadataGUID(GUID nodeMetadataGUID); UOI getType(); void setType( UOI type ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/LinkedType.java ================================================ package com.pinecone.hydra.unit.imperium; public enum LinkedType { Owned ( "Owned" ), // HardLink Hard ( "Hard" ), WeaK ("Weak"); private final String value; LinkedType( String value ){ this.value = value; } public String getName(){ return this.value; } public static String queryName( LinkedType type ) { return type.getName(); } public static LinkedType queryLinkedType( String sz ) { return LinkedType.valueOf( sz ); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/RegimentedImperialTree.java ================================================ package com.pinecone.hydra.unit.imperium; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.system.ko.KernelObjectConfig; import com.pinecone.hydra.system.ko.KernelObjectInstrument; import com.pinecone.hydra.system.ko.kom.KOMInstrument; import com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator; import com.pinecone.ulf.util.guid.i64.GUID72; import com.pinecone.framework.util.id.GuidAllocator; import java.util.List; public class RegimentedImperialTree implements UniImperialTree { static TreeMasterManipulator evalTreeMasterManipulator( KOMInstrument komInstrument ) { return ((ArchRegimentObjectModel) komInstrument).getTreeMasterManipulator(); } static KernelObjectConfig evalKernelObjectConfig( KOMInstrument komInstrument ) { return komInstrument.getConfig(); } protected TrieTreeManipulator trieTreeManipulator; protected TireOwnerManipulator tireOwnerManipulator; protected TriePathCacheManipulator triePathCacheManipulator; protected KernelObjectConfig kernelObjectConfig; protected int shortPathLength; public RegimentedImperialTree( TreeMasterManipulator masterManipulator ) { this.trieTreeManipulator = masterManipulator.getTrieTreeManipulator(); this.tireOwnerManipulator = masterManipulator.getTireOwnerManipulator(); this.triePathCacheManipulator = masterManipulator.getTriePathCacheManipulator(); this.shortPathLength = ImperialTreeConstants.DefaultShortPathLength; } public RegimentedImperialTree( TreeMasterManipulator masterManipulator, KernelObjectConfig config ) { this( masterManipulator ); this.kernelObjectConfig = config; this.shortPathLength = config.getShortPathLength(); } public RegimentedImperialTree( KOMInstrument komInstrument ) { this( RegimentedImperialTree.evalTreeMasterManipulator(komInstrument), RegimentedImperialTree.evalKernelObjectConfig(komInstrument) ); } @Override public void insert( ImperialTreeNode node ) { this.trieTreeManipulator.insert( this.tireOwnerManipulator, (GUIDImperialTrieNode) node ); } @Override public void affirmOwnedNode( GUID nodeGUID, GUID parentGUID ){ GUID owner = this.tireOwnerManipulator.getOwner( nodeGUID ); if ( owner == null ){ this.tireOwnerManipulator.remove( nodeGUID, owner ); this.tireOwnerManipulator.insertOwnedNode( nodeGUID, parentGUID ); } else { this.tireOwnerManipulator.insertOwnedNode( nodeGUID, parentGUID ); } } @Override public GUIDImperialTrieNode getNode( GUID guid ){ return this.trieTreeManipulator.getNode( guid ); } @Override public void purge( GUID guid ){ this.trieTreeManipulator.purge( guid ); } @Override public void removeTreeNodeOnly( GUID guid ) { this.trieTreeManipulator.removeTreeNode( guid ); } @Override public void put( GUID guid, GUIDImperialTrieNode distributedTreeNode ){ this.trieTreeManipulator.insertNode( guid, distributedTreeNode ); } @Override public boolean contains( GUID key ) { return this.trieTreeManipulator.contains( key ); } @Override public boolean containsChild( GUID parentGuid, GUID childGuid ) { return this.trieTreeManipulator.countNode( parentGuid, childGuid ) > 0; } @Override public GUID queryGUIDByPath( String path ) { return this.triePathCacheManipulator.queryGUIDByPath( path ); } @Override public List getChildren(GUID guid ) { return this.trieTreeManipulator.getChildren(guid); } @Override public List fetchChildrenGuids( GUID parentGuid ) { return this.trieTreeManipulator.fetchChildrenGuids( parentGuid ); } @Override public List fetchParentGuids( GUID guid ) { return this.trieTreeManipulator.fetchParentGuids(guid); } @Override public void removeInheritance( GUID childGuid, GUID parentGuid ) { this.trieTreeManipulator.removeInheritance(childGuid,parentGuid); } @Override public void setOwner( GUID sourceGuid, GUID targetGuid ) { GUID owner = this.tireOwnerManipulator.getOwner(sourceGuid); if ( owner == null ){ long exist = this.trieTreeManipulator.countNode( sourceGuid, targetGuid ); if ( exist <= 0 ){ this.tireOwnerManipulator.insertOwnedNode( sourceGuid, targetGuid ); } else { this.tireOwnerManipulator.setOwned(sourceGuid, targetGuid); } } else { this.tireOwnerManipulator.remove( sourceGuid, owner ); this.tireOwnerManipulator.insertOwnedNode( sourceGuid, targetGuid ); } } @Override public void setGuidLineage( GUID sourceGuid, GUID targetGuid ) { this.tireOwnerManipulator.updateParentGuid( sourceGuid, targetGuid ); } @Override public String getCachePath( GUID guid ){ return this.triePathCacheManipulator.getPath(guid); } @Override public void removeCachePath( GUID guid ) { this.triePathCacheManipulator.remove( guid ); } @Override public GUID getOwner( GUID guid ) { return this.tireOwnerManipulator.getOwner(guid); } @Override public List getSubordinates( GUID guid) { return this.tireOwnerManipulator.getSubordinates(guid); } @Override public void insertCachePath( GUID guid, String path ) { if ( path.length() > this.shortPathLength ){ String part1 = path.substring( 0, this.shortPathLength ); String part2 = path.substring( this.shortPathLength ); this.triePathCacheManipulator.insertLongPath( guid, part1, part2 ); } else { GUID node = this.triePathCacheManipulator.getNode(path); if( node == null ){ this.triePathCacheManipulator.insert( guid, path ); } } } @Override public List fetchRoot() { return this.trieTreeManipulator.fetchRoot(); } @Override public boolean isRoot( GUID guid ) { return this.trieTreeManipulator.isRoot( guid ); } @Override public long queryLinkedCount( GUID guid, LinkedType linkedType ) { return this.trieTreeManipulator.queryLinkedCount( guid, linkedType ); } @Override public long queryAllLinkedCount( GUID guid ) { return this.trieTreeManipulator.queryAllLinkedCount( guid ); } @Override public long queryStrongLinkedCount( GUID guid ) { return this.trieTreeManipulator.queryStrongLinkedCount( guid ); } @Override public long queryWeakLinkedCount( GUID guid ) { return this.trieTreeManipulator.queryWeakLinkedCount( guid ); } @Override public void newHardLink( GUID sourceGuid, GUID targetGuid ) { long count = this.trieTreeManipulator.countNode( sourceGuid, targetGuid ); if ( count <= 0 ){ this.tireOwnerManipulator.insertHardLinkedNode( sourceGuid,targetGuid ); } } @Override public void moveTo( GUID sourceGuid, GUID destinationGuid ) { this.removeCachePath( sourceGuid ); this.tireOwnerManipulator.updateParentGuid( sourceGuid, destinationGuid ); } @Override public void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName, KernelObjectInstrument instrument ) { GuidAllocator guidAllocator = instrument.getGuidAllocator(); GUID tagGuid = guidAllocator.nextGUID(); this.trieTreeManipulator.newLinkTag( originalGuid, dirGuid, tagName, tagGuid ); } @Override public void updateLinkTagName( GUID tagGuid, String tagName ) { this.trieTreeManipulator.updateLinkTagName( tagGuid,tagName ); } @Override public boolean isTagGuid(GUID guid) { return this.trieTreeManipulator.isTagGuid( guid ); } @Override public GUID getOriginalGuid( String tagName, GUID parentDirGUID ) { return this.trieTreeManipulator.getOriginalGuid( tagName, parentDirGUID ); } @Override public GUID getOriginalGuidByNodeGuid( String tagName, GUID nodeGUID ) { return this.trieTreeManipulator.getOriginalGuidByNodeGuid( tagName, nodeGUID ); } @Override public List fetchOriginalGuid( String tagName ) { return this.trieTreeManipulator.fetchOriginalGuid( tagName ); } @Override public List fetchOriginalGuidRoot( String tagName ) { return this.trieTreeManipulator.fetchOriginalGuidRoot( tagName ); } @Override public ReparseLinkNode getReparseLinkNodeByNodeGuid( String tagName, GUID nodeGUID ) { return this.trieTreeManipulator.getReparseLinkNodeByNodeGuid( tagName, nodeGUID ); } @Override public ReparseLinkNode getReparseLinkNode( String tagName, GUID parentDirGuid ) { return this.trieTreeManipulator.getReparseLinkNode( tagName, parentDirGuid ); } @Override public GUID getOriginalGuid( GUID tagGuid ) { return this.trieTreeManipulator.getOriginalGuidByTagGuid( tagGuid ); } @Override public void removeReparseLink( GUID guid ) { this.trieTreeManipulator.removeReparseLink( guid ); } @Override public boolean hasOwnProperty(Object key) { return this.containsKey( key ); } @Override public boolean containsKey(Object key) { if( key instanceof GUID ) { return this.containsKey((GUID) key ); } else if( key instanceof String ) { return this.containsKey( (new GUID72((String)key)) ); } return false; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/UniImperialTree.java ================================================ package com.pinecone.hydra.unit.imperium; public interface UniImperialTree extends ImperialTree { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/entity/BranchNode.java ================================================ package com.pinecone.hydra.unit.imperium.entity; import com.pinecone.framework.util.id.GUID; public interface BranchNode extends EntityNode { String getName(); GUID getGuid(); default String getMetaType() { return this.className().replace("Generic",""); } default BranchNode evinceBranchNode(){ return this; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/entity/ElementumNode.java ================================================ package com.pinecone.hydra.unit.imperium.entity; public interface ElementumNode extends TreeNode, MetadataNode { default ElementumNode evinceElementNode() { return this; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/entity/EntityNode.java ================================================ package com.pinecone.hydra.unit.imperium.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface EntityNode extends Pinenut { String getName(); GUID getGuid(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/entity/MetaEntryNode.java ================================================ package com.pinecone.hydra.unit.imperium.entity; public interface MetaEntryNode extends EntityNode, MetadataNode { default MetaEntryNode evinceEntryNode() { return this; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/entity/MetadataNode.java ================================================ package com.pinecone.hydra.unit.imperium.entity; public interface MetadataNode extends EntityNode { default MetadataNode evinceMetadataNode() { return this; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/entity/ReparseLinkNode.java ================================================ package com.pinecone.hydra.unit.imperium.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.LinkedType; public interface ReparseLinkNode extends EntityNode { @Override default String getName() { return this.getTagName(); } @Override default GUID getGuid() { return this.getTagGuid(); } String getTagName(); GUID getTagGuid(); LinkedType getLinkedType(); GUID getTargetNodeGuid(); GUID getParentNodeGuid(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/entity/SkeletonNode.java ================================================ package com.pinecone.hydra.unit.imperium.entity; public interface SkeletonNode extends EntityNode { default SkeletonNode evinceSkeletonNode(){ return this; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/entity/TreeNode.java ================================================ package com.pinecone.hydra.unit.imperium.entity; import com.pinecone.framework.util.id.GUID; public interface TreeNode extends SkeletonNode { String getName(); GUID getGuid(); default String getMetaType() { return this.className().replace("Generic",""); } default TreeNode evinceTreeNode(){ return this; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/entity/TreeReparseLinkNode.java ================================================ package com.pinecone.hydra.unit.imperium.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.LinkedType; public class TreeReparseLinkNode implements ReparseLinkNode { protected String tagName; protected GUID tagGuid; protected LinkedType linkedType; protected GUID targetNodeGuid; protected GUID parentNodeGuid; public TreeReparseLinkNode(){ } @Override public String getTagName() { return this.tagName; } public void setTagName( String tagName ) { this.tagName = tagName; } @Override public GUID getTagGuid() { return this.tagGuid; } public void setTagGuid( GUID tagGuid ) { this.tagGuid = tagGuid; } @Override public LinkedType getLinkedType() { return this.linkedType; } public void setLinkedType( LinkedType linkedType ) { this.linkedType = linkedType; } @Override public GUID getTargetNodeGuid() { return this.targetNodeGuid; } public void setTargetNodeGuid( GUID targetNodeGuid ) { this.targetNodeGuid = targetNodeGuid; } @Override public GUID getParentNodeGuid() { return this.parentNodeGuid; } public void setParentNodeGuid( GUID parentNodeGuid ) { this.parentNodeGuid = parentNodeGuid; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/operator/OperatorFactory.java ================================================ package com.pinecone.hydra.unit.imperium.operator; import com.pinecone.framework.system.prototype.Pinenut; public interface OperatorFactory extends Pinenut { void register( String typeName, TreeNodeOperator functionalNodeOperation ); TreeNodeOperator getOperator(String typeName); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/operator/TreeNodeOperator.java ================================================ package com.pinecone.hydra.unit.imperium.operator; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public interface TreeNodeOperator extends Pinenut { GUID insert( TreeNode treeNode ); void purge( GUID guid ); TreeNode get( GUID guid ) ; TreeNode get( GUID guid, int depth ); TreeNode getAsRootDepth( GUID guid ); void update( TreeNode treeNode ); void updateName( GUID guid ,String name ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/source/TireOwnerManipulator.java ================================================ package com.pinecone.hydra.unit.imperium.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.LinkedType; import java.util.List; public interface TireOwnerManipulator extends Pinenut { void insertRootNode ( GUID guid, LinkedType linkedType ); default void insertRootNode ( GUID guid ) { this.insertRootNode( guid, LinkedType.Owned ); } void insert( GUID targetGuid, GUID parentGUID, LinkedType linkedType ); default void insertOwnedNode( GUID targetGuid, GUID parentGUID ) { this.insert( targetGuid, parentGUID, LinkedType.Owned ); } default void insertHardLinkedNode( GUID targetGuid, GUID parentGUID ) { this.insert( targetGuid, parentGUID, LinkedType.Hard ); } void update( GUID targetGuid, GUID parentGUID, LinkedType linkedType ); void updateParentGuid( GUID targetGuid, GUID parentGUID ); void updateLinkedType( GUID targetGuid, LinkedType linkedType ); void remove( GUID subordinateGuid, GUID ownerGuid ); void removeBySubordinate( GUID subordinateGuid ); void removeByOwner( GUID OwnerGuid ); GUID getOwner( GUID subordinateGuid ); List getSubordinates( GUID guid ); void setLinkedType ( GUID sourceGuid, GUID targetGuid, LinkedType linkedType ); default void setOwned ( GUID sourceGuid, GUID targetGuid ) { this.setLinkedType( sourceGuid, targetGuid, LinkedType.Owned ); } default void setHardLink ( GUID sourceGuid, GUID targetGuid ) { this.setLinkedType( sourceGuid, targetGuid, LinkedType.Hard ); } LinkedType getLinkedType ( GUID childGuid,GUID parentGuid ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/source/TreeMasterManipulator.java ================================================ package com.pinecone.hydra.unit.imperium.source; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; public interface TreeMasterManipulator extends KOISkeletonMasterManipulator { TireOwnerManipulator getTireOwnerManipulator(); TrieTreeManipulator getTrieTreeManipulator(); TriePathCacheManipulator getTriePathCacheManipulator(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/source/TriePathCacheManipulator.java ================================================ package com.pinecone.hydra.unit.imperium.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface TriePathCacheManipulator extends Pinenut { void insert ( GUID guid, String path ); void insertLongPath( GUID guid, String path, String longPath ); void remove ( GUID guid ); String getPath ( GUID guid ); GUID getNode ( String path ); GUID queryGUIDByPath( String path ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/source/TrieTreeManipulator.java ================================================ package com.pinecone.hydra.unit.imperium.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.LinkedType; import com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode; import java.util.List; public interface TrieTreeManipulator extends Pinenut { void insert( TireOwnerManipulator ownerManipulator, GUIDImperialTrieNode node ); /** With detail meta data node information. */ GUIDImperialTrieNode getNode(GUID guid ); boolean contains( GUID key ) ; /** Only with tree node index information. */ GUIDImperialTrieNode getTreeNodeOnly(GUID guid, GUID parentGuid ); long countNode( GUID guid, GUID parentGuid ); // TODO void insertNode( GUID guid, GUIDImperialTrieNode distributedTreeNode ); // TODO void updateNode( GUID guid, GUIDImperialTrieNode distributedTreeNode ); /** Purge / Deletion */ void purge ( GUID guid ); void removeTreeNode( GUID guid ); void removeTreeNodeByParentGuid( GUID parentGuid ); void removeTreeNodeYoke( GUID guid, GUID parentGuid ); void removeTreeNodeWithLinkedType( GUID guid, LinkedType linkedType ); void removeNodeMeta( GUID guid ); default void removeOwnedTreeNode ( GUID guid ) { this.removeTreeNodeWithLinkedType( guid, LinkedType.Owned ); } default void removeHardLinkedTreeNode ( GUID guid ) { this.removeTreeNodeWithLinkedType( guid, LinkedType.Hard ); } /** Lineage / Affinity */ List getChildren(GUID guid ); List fetchChildrenGuids( GUID parentGuid ); List fetchParentGuids( GUID guid ); void removeInheritance( GUID childNode, GUID parentGUID ); void addChild( GUID childGuid, GUID parentGuid ); void updateType ( UOI type, GUID guid ); List fetchRoot(); boolean isRoot( GUID guid ); /** Link / Reference */ /** * Querying link-count, that the node be linked by its owner. [Strong/Weak] * 获取节点引用计数。 [根据强弱引用条件] * @return the link-count, which its has been linked. */ long queryLinkedCount( GUID guid, LinkedType linkedType ); long queryAllLinkedCount( GUID guid ); default long queryStrongLinkedCount( GUID guid ) { return this.queryLinkedCount( guid, LinkedType.Owned ); } default long queryWeakLinkedCount( GUID guid ) { return this.queryLinkedCount( guid, LinkedType.Hard ); } void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName, GUID tagGuid, LinkedType linkedType ); default void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName, GUID tagGuid ) { this.newLinkTag( originalGuid, dirGuid, tagName, tagGuid, LinkedType.Hard ); } void updateLinkTagName( GUID tagGuid, String tagName ); GUID getOriginalGuid( String tagName,GUID parentDirGuid ); GUID getOriginalGuidByNodeGuid( String tagName, GUID nodeGUID ); ReparseLinkNode getReparseLinkNode( String tagName, GUID parentDirGuid ); ReparseLinkNode getReparseLinkNodeByNodeGuid( String tagName, GUID nodeGUID ); List fetchOriginalGuid( String tagName ); List fetchOriginalGuidRoot( String tagName ); boolean isTagGuid( GUID guid ); GUID getOriginalGuidByTagGuid( GUID tagGuid ); void removeReparseLink( GUID guid ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/ArchQueueTableMeta.java ================================================ package com.pinecone.hydra.unit.iqueue; public abstract class ArchQueueTableMeta implements QueueMeta { protected String mszQueueTableName; public ArchQueueTableMeta(String queueTableName ) { this.mszQueueTableName = queueTableName; } public ArchQueueTableMeta(){} public void setQueueTableName( String queueTableName ) { this.mszQueueTableName = queueTableName; } @Override public String getQueueTable() { return this.mszQueueTableName; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/ConfigurableMegaDeflectPriorityQueueMeta.java ================================================ package com.pinecone.hydra.unit.iqueue; public class ConfigurableMegaDeflectPriorityQueueMeta extends ArchQueueTableMeta implements MegaDeflectPriorityQueueMeta { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/ConfigurableMegaStratumQueueMeta.java ================================================ package com.pinecone.hydra.unit.iqueue; public class ConfigurableMegaStratumQueueMeta extends ArchQueueTableMeta implements MegaStratumQueueMeta { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/DPQueueManipulator.java ================================================ package com.pinecone.hydra.unit.iqueue; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.iqueue.entity.QueueElement; import java.util.List; public interface DPQueueManipulator extends Pinenut { void pushBack(QueueElement queueElement, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta); void pushFront( QueueElement queueElement, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta ); void incrementLinkedPriorities( QueueElement queueElement, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta ); QueueElement popFront( long currentPos, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta ); List batchPopFront( long currentPos, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta, long limit, long offset ); QueueElement popBack( String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta ); List batchPopBack( String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta, long limit, long offset ); long queryQueueSize( String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta ); QueueElement remove( long currentPos, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta ); QueueElement query( long enumId, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta ); List fetchElementByPriority( long priority, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta, long limit, long offset ); List fetchElement( String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta, long limit, long offset ); List fetchElementGuid( String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta, long limit, long offset ); QueueElement getByIndex( long index, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta ); Long nextPos( long currentPos, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta ); Long getIndexPriority( long currentPos, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/DPStratumQueueManipulator.java ================================================ package com.pinecone.hydra.unit.iqueue; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.unit.iqueue.entity.QueueElement; import com.pinecone.hydra.unit.iqueue.entity.QueueStratumElement; public interface DPStratumQueueManipulator extends Pinenut { void pushBack(QueueStratumElement queueElement, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta); QueueStratumElement popFront( String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta ); void removeFront( String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta ); long isEmpty( String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/DeflectPriorityQueue.java ================================================ package com.pinecone.hydra.unit.iqueue; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.iqueue.entity.QueueElement; import java.util.List; public interface DeflectPriorityQueue extends MegaPriorityQueue, SharedSegmentIQueue { void pushBack( QueueElement queueElement ); void pushFront( QueueElement queueElement ); QueueElement getByIndex( long index ); QueueElement popFront(); List fetchElements( long offset, long limit ); List fetchElementGuids( long offset, long limit ); QueueElement popBack(); long size(); boolean isEmpty(); boolean contains( QueueElement queueElement ); void setCurrentPos( long mnCurrentPos ); void reset(); QueueElement remove( long enumId ); List fetchElementByPriority( long priority, long offset, long limit ); String getSegmentName(); QueueMasterManipulator getMasterManipulator(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/MagnitudeDPQueue.java ================================================ package com.pinecone.hydra.unit.iqueue; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.unit.iqueue.entity.QueueElement; import java.util.List; public class MagnitudeDPQueue implements DeflectPriorityQueue, Cloneable { private QueueMasterManipulator mQueueMasterManipulator; private DPQueueManipulator mDPQueueManipulator; private long mnCurrentPos; private String mszSharedSegmentField; private String mszSharedSegmentName; private QueueMeta mQueueMeta; public MagnitudeDPQueue( KOIMappingDriver driver, long currentPos, String shareSegmentField, String sharedSegmentName, QueueMeta queueMeta ) { this.mQueueMasterManipulator = (QueueMasterManipulator) driver.getMasterManipulator(); this.mDPQueueManipulator = this.mQueueMasterManipulator.getDPQueueManipulator(); this.mnCurrentPos = currentPos; this.mszSharedSegmentField = shareSegmentField; this.mszSharedSegmentName = sharedSegmentName; this.mQueueMeta = queueMeta; } @Override public String getSharedSegmentField() { return this.mszSharedSegmentField; } @Override public String getSharedSegmentName() { return this.mszSharedSegmentName; } @Override public void setCurrentPos( long currentPos ) { this.mnCurrentPos = currentPos; } @Override public void reset() { this.setCurrentPos( 0 ); } @Override public MagnitudeDPQueue clone() { try { Object o = super.clone(); MagnitudeDPQueue neo = (MagnitudeDPQueue) o; return neo; } catch ( CloneNotSupportedException ignore ) { return null; } } @Override public void pushBack( QueueElement queueElement ) { this.mDPQueueManipulator.pushBack( queueElement, this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta ); } @Override public void pushFront( QueueElement queueElement ) { this.mDPQueueManipulator.pushFront( queueElement, this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta ); this.mDPQueueManipulator.incrementLinkedPriorities( queueElement, this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta ); } @Override public QueueElement getByIndex( long index ) { return this.mDPQueueManipulator.getByIndex( index, this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta ); } @Override public QueueElement popFront() { QueueElement peek = this.mDPQueueManipulator.popFront( this.mnCurrentPos, this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta ); Long indexPriority = this.mDPQueueManipulator.getIndexPriority( this.mnCurrentPos, this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta ); peek.setIndexPriority( indexPriority ); Long l = this.mDPQueueManipulator.nextPos( this.mnCurrentPos, this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta ); if( l == null ) { this.mnCurrentPos = -1; } else { this.mnCurrentPos = l; } return peek; } @Override public List fetchElements( long offset, long limit ) { return this.mDPQueueManipulator.fetchElement( this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta, limit, offset ); } @Override public List fetchElementGuids( long offset, long limit ) { return this.mDPQueueManipulator.fetchElementGuid( this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta, limit, offset ); } @Override public QueueElement popBack() { return this.mDPQueueManipulator.popBack( this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta ); } @Override public long size() { return this.mDPQueueManipulator.queryQueueSize( this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta ); } @Override public boolean isEmpty() { return this.mDPQueueManipulator.queryQueueSize( this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta ) == 0; } @Override public boolean contains( QueueElement queueElement ) { QueueElement query = this.mDPQueueManipulator.query( queueElement.getEnumId(), this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta ); return !(query == null); } @Override public QueueElement remove( long enumId ) { return this.mDPQueueManipulator.remove( this.mnCurrentPos, this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta ); } @Override public List fetchElementByPriority( long priority, long limit, long offset ) { return this.mDPQueueManipulator.fetchElementByPriority( priority, this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta, limit, offset ); } @Override public String getSegmentName() { return this.mszSharedSegmentName; } public long currentPosition() { return this.mnCurrentPos; } @Override public QueueMasterManipulator getMasterManipulator() { return this.mQueueMasterManipulator; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/MegaDPStratumQueue.java ================================================ package com.pinecone.hydra.unit.iqueue; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.unit.iqueue.entity.QueueElement; import com.pinecone.hydra.unit.iqueue.entity.QueueStratumElement; public class MegaDPStratumQueue implements MegaStratumQueue { private QueueMasterManipulator mQueueMasterManipulator; private DPStratumQueueManipulator mDPStratumQueueManipulator; private String mszSharedSegmentField; private String mszSharedSegmentName; private QueueMeta mQueueMeta; public MegaDPStratumQueue( KOIMappingDriver driver, String shareSegmentField, String sharedSegmentName, QueueMeta queueMeta ) { this.mQueueMasterManipulator = (QueueMasterManipulator) driver.getMasterManipulator(); this.mDPStratumQueueManipulator = this.mQueueMasterManipulator.getDPStratumQueueManipulator(); this.mszSharedSegmentName = sharedSegmentName; this.mszSharedSegmentField = shareSegmentField; this.mQueueMeta = queueMeta; } @Override public void pushBack(QueueStratumElement queueElement) { this.mDPStratumQueueManipulator.pushBack( queueElement, this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta ); } @Override public QueueStratumElement popFront() { QueueStratumElement queueStratumElement = this.mDPStratumQueueManipulator.popFront(this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta); this.mDPStratumQueueManipulator.removeFront( this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta ); return queueStratumElement; } @Override public boolean isEmpty() { return this.mDPStratumQueueManipulator.isEmpty( this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta ) == 0; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/MegaDeflectPriorityQueueMeta.java ================================================ package com.pinecone.hydra.unit.iqueue; public interface MegaDeflectPriorityQueueMeta extends QueueMeta { void setQueueTableName( String queueTableName ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/MegaPriorityQueue.java ================================================ package com.pinecone.hydra.unit.iqueue; import com.pinecone.framework.system.prototype.Pinenut; public interface MegaPriorityQueue extends Pinenut { //List fetchElements( long offset, long pageSize ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/MegaStratumQueue.java ================================================ package com.pinecone.hydra.unit.iqueue; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.unit.iqueue.entity.QueueElement; import com.pinecone.hydra.unit.iqueue.entity.QueueStratumElement; public interface MegaStratumQueue extends Pinenut { void pushBack( QueueStratumElement queueElement); QueueStratumElement popFront(); boolean isEmpty(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/MegaStratumQueueMeta.java ================================================ package com.pinecone.hydra.unit.iqueue; public interface MegaStratumQueueMeta extends QueueMeta { void setQueueTableName( String queueTableName ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/QueueExistManipulator.java ================================================ package com.pinecone.hydra.unit.iqueue; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface QueueExistManipulator extends Pinenut { void insertQueueExist(GUID layerGuid ); void insertQueueNotExist(GUID layerGuid ); void setQueueExist(GUID layerGuid); void setQueueNotExist(GUID layerGuid); boolean isExist( GUID layer_guid ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/QueueMasterManipulator.java ================================================ package com.pinecone.hydra.unit.iqueue; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; public interface QueueMasterManipulator extends KOIMasterManipulator { DPQueueManipulator getDPQueueManipulator(); DPStratumQueueManipulator getDPStratumQueueManipulator(); QueueExistManipulator getQueueExistManipulator(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/QueueMeta.java ================================================ package com.pinecone.hydra.unit.iqueue; import com.pinecone.framework.system.prototype.Pinenut; public interface QueueMeta extends Pinenut { String getQueueTable(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/SharedSegmentIQueue.java ================================================ package com.pinecone.hydra.unit.iqueue; import com.pinecone.framework.system.prototype.Pinenut; public interface SharedSegmentIQueue extends Pinenut { String getSharedSegmentField(); String getSharedSegmentName(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/entity/GenericQueueElement.java ================================================ package com.pinecone.hydra.unit.iqueue.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; public class GenericQueueElement implements QueueElement { protected long mnEnumId; protected GUID mObjectGuid; protected long mnPriority; protected long mnLinkedPriority; protected long mnIndexPriority; protected double mBias; @Override public long getEnumId() { return 0; } public void setEnumId( long enumId ) { this.mnEnumId = enumId; } @Override public GUID getObjectGuid() { return this.mObjectGuid; } public void setObjectGuid( GUID objectGuid ) { this.mObjectGuid = objectGuid; } @Override public long getPriority() { return this.mnPriority; } public void setPriority( long priority ) { this.mnPriority = priority; } @Override public long getLinkedPriority() { return this.mnLinkedPriority; } public void setLinkedPriority( long linkedPriority ) { this.mnLinkedPriority = linkedPriority; } @Override public long getIndexPriority() { return this.mnIndexPriority; } public void setIndexPriority( long indexPriority ) { this.mnIndexPriority = indexPriority; } @Override public long getActualPriority() { return 0; } @Override public double getBias() { return this.mBias; } public void setBias( double bias ) { this.mBias = bias; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/entity/GenericStratumQueueElement.java ================================================ package com.pinecone.hydra.unit.iqueue.entity; public class GenericStratumQueueElement extends GenericQueueElement implements QueueStratumElement{ protected short mStratum; @Override public void setStratum(short stratum) { this.mStratum = stratum; } @Override public short getStratum() { return this.mStratum; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/entity/QueueElement.java ================================================ package com.pinecone.hydra.unit.iqueue.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface QueueElement extends Pinenut { long getEnumId(); GUID getObjectGuid(); long getPriority(); long getLinkedPriority(); long getIndexPriority(); void setIndexPriority( long indexPriority ); long getActualPriority(); double getBias(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/entity/QueueStratumElement.java ================================================ package com.pinecone.hydra.unit.iqueue.entity; public interface QueueStratumElement extends QueueElement { void setStratum( short stratum ); short getStratum(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/ArchAtlasInstrument.java ================================================ package com.pinecone.hydra.unit.vgraph; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.unit.vgraph.algo.BasicDAGPathResolver; import com.pinecone.hydra.unit.vgraph.algo.BasicDAGPathSelector; import com.pinecone.hydra.unit.vgraph.algo.DAGPathResolver; import com.pinecone.hydra.unit.vgraph.algo.DAGPathSelector; import com.pinecone.hydra.unit.vgraph.entity.GraphNode; import com.pinecone.hydra.unit.vgraph.layer.LayerInstrument; import com.pinecone.hydra.unit.vgraph.source.AtlasMappingDriver; import com.pinecone.hydra.unit.vgraph.source.AtlasMasterManipulator; import com.pinecone.hydra.unit.vgraph.source.VectorGraphManipulator; import com.pinecone.hydra.unit.vgraph.source.VectorGraphMasterManipulator; import com.pinecone.hydra.unit.vgraph.source.VectorGraphPathCacheManipulator; import com.pinecone.ulf.util.guid.GUIDs; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Objects; import java.util.Queue; public abstract class ArchAtlasInstrument implements AtlasInstrument { protected AtlasInstrument mParentInstrument; protected LayerInstrument mLayerInstrument; protected Hydrogen mHydrogen; protected Processum mSuperiorProcess; protected GuidAllocator mGuidAllocator; protected DAGPathResolver mPathResolver; protected DAGPathSelector mPathSelector; protected AtlasMasterManipulator mAtlasMasterManipulator; protected VectorGraphMasterManipulator mVectorGraphMasterManipulator; protected VectorGraphPathCacheManipulator mVectorGraphPathCacheManipulator; protected VectorGraphManipulator mVectorGraphManipulator; protected VectorGraphConfig mVectorGraphConfig; public ArchAtlasInstrument( AtlasMappingDriver atlasMappingDriver, VectorGraphConfig vectorGraphConfig, LayerInstrument layerInstrument ) { this.mLayerInstrument = layerInstrument; this.mVectorGraphConfig = vectorGraphConfig; this.mSuperiorProcess = atlasMappingDriver.getSuperiorProcess(); this.mAtlasMasterManipulator = atlasMappingDriver.getMasterManipulator(); this.mVectorGraphMasterManipulator = this.mAtlasMasterManipulator.getVectorGraphMasterManipulator(); this.mVectorGraphManipulator = this.mVectorGraphMasterManipulator.getVectorGraphManipulator(); this.mVectorGraphPathCacheManipulator = this.mVectorGraphMasterManipulator.getVectorGraphPathCacheManipulator(); if ( this.mSuperiorProcess instanceof Hydrogen) { this.mHydrogen = (Hydrogen) this.mSuperiorProcess; } else { this.mHydrogen = (Hydrogen) this.mSuperiorProcess.parentSystem(); } this.mGuidAllocator = GUIDs.newGuidAllocator(); this.mPathResolver = new BasicDAGPathResolver();//后续要使用配置类指定 this.mPathSelector = new BasicDAGPathSelector( this.mPathResolver, this.mVectorGraphManipulator ); } public ArchAtlasInstrument( AtlasMappingDriver driver, LayerInstrument layerInstrument ) { this( driver, null, layerInstrument ); } @Override public LayerInstrument layerInstrument() { return this.mLayerInstrument; } @Override public AtlasInstrument parent() { return this.mParentInstrument; } @Override public Processum getSuperiorProcess() { return this.mSuperiorProcess; } @Override public void setParent(AtlasInstrument atlasInstrument) { this.mParentInstrument = atlasInstrument; } @Override public AtlasMasterManipulator getMasterManipulator() { return this.mAtlasMasterManipulator; } @Override public VectorGraphConfig getConfig() { return this.mVectorGraphConfig; } @Override public GuidAllocator getGuidAllocator() { return this.mGuidAllocator; } @Override public List getPath(GUID guid) { return this.getNS( guid, "/" ); } @Override public GUID queryGUIDByPath(String path) { return this.queryGUIDByNS( path, null, null ); } @Override public GUID queryParentID(GUID guid) { return null; } @Override public boolean contains( GUID handleNode, GUID nodeGuid) { return this.mPathSelector.contains( handleNode, nodeGuid ); } @Override public GUID put( GraphNode graphNode ) { GUID guid = this.mGuidAllocator.nextGUID(); graphNode.setId( guid ); this.mVectorGraphManipulator.insertGraphNode( graphNode ); return guid; } @Override public GUID put( GUID parentGuid, GraphNode graphNode ) { GUID guid = this.mGuidAllocator.nextGUID(); graphNode.setId( guid ); this.mVectorGraphManipulator.insertNodeByEdge( parentGuid, graphNode ); return guid; } @Override public GraphNode get( GUID guid ) { return this.mVectorGraphManipulator.queryNode( guid ); } @Override public GUID queryGUIDByNS( String path, String szBadSep, String szTargetSep ) { if( szTargetSep != null ) { path = path.replace( szBadSep, szTargetSep ); } String[] parts = this.mPathResolver.segmentPathParts( path ); List resolvedParts = this.mPathResolver.resolvePath( parts ); path = this.mPathResolver.assemblePath( resolvedParts ); GUID guid = this.mVectorGraphPathCacheManipulator.queryGUIDByPath( path ); if ( guid != null ){ return guid; } guid = this.mPathSelector.searchId( resolvedParts ); if( guid != null ){ this.mVectorGraphPathCacheManipulator.insert( path, guid ); } return guid; } @Override public TreeNode get( GUID guid, int depth) { return null; } @Override public void remove( GUID guid ) { this.mVectorGraphManipulator.removeNode( guid ); this.mVectorGraphPathCacheManipulator.remove( guid ); } @Override public void remove( String path ) { GUID guid = this.queryGUIDByPath(path); if( guid != null ) { this.remove( guid ); } } @Override public List getChildren( GUID guid ) { return this.mVectorGraphManipulator.fetchChildNodes( guid ); } @Override public List fetchChildrenIds( GUID guid ) { return this.mVectorGraphManipulator.fetchChildNodeIds( guid ); } @Override public void rename( GUID guid, String name ) { } /**使用bfs找到所有可达路径**/ protected List getNS( GUID guid, String szSeparator ){ // 先检查缓存 List path = this.mVectorGraphPathCacheManipulator.getPath( guid ); if (path != null && !path.isEmpty()) { return path; } GraphNode startNode = this.get(guid); if (startNode == null) { return Collections.emptyList(); } List allPaths = new ArrayList<>(); Queue queue = new LinkedList<>(); queue.offer( new GraphNodePair(startNode, startNode.getName()) ); while ( !queue.isEmpty() ) { GraphNodePair current = queue.poll(); GraphNode currentNode = current.getGraphNode(); String currentPath = current.getCurrentPath(); List parentIds = this.mVectorGraphManipulator.fetchParentIds( currentNode.getId() ); if ( parentIds.isEmpty() || !this.allNonNull(parentIds) ) { allPaths.add(currentPath); continue; } // 遍历所有非空的父节点 for (GUID parentId : parentIds) { if (parentId != null) { GraphNode parentNode = this.get(parentId); if (parentNode != null) { String newPath = parentNode.getName() + szSeparator + currentPath; queue.offer(new GraphNodePair(parentNode, newPath)); } } } } if (!allPaths.isEmpty()) { for ( String s : allPaths ) { this.mVectorGraphPathCacheManipulator.insert( s, guid ); } } return allPaths; } private boolean allNonNull( List list ) { return list.stream().noneMatch( Objects::isNull ); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/ArchVectorDAG.java ================================================ package com.pinecone.hydra.unit.vgraph; import com.pinecone.framework.util.id.GUID; public abstract class ArchVectorDAG implements VectorDAG { protected GUID mGraphGuid; protected VectorGraphConfig mVectorGraphConfig; // Temporary Graph public ArchVectorDAG( GUID graphGuid, VectorGraphConfig vectorGraphConfig ) { this.mVectorGraphConfig = vectorGraphConfig; this.mGraphGuid = graphGuid; } @Override public VectorGraphConfig getConfig() { return this.mVectorGraphConfig; } @Override public GUID getGraphGuid() { return this.mGraphGuid; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/AtlasInstrument.java ================================================ package com.pinecone.hydra.unit.vgraph; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.system.regime.Instrument; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.unit.vgraph.entity.GraphNode; import com.pinecone.hydra.unit.vgraph.layer.Layer; import com.pinecone.hydra.unit.vgraph.layer.LayerInstrument; import com.pinecone.hydra.unit.vgraph.source.AtlasMasterManipulator; import java.util.List; public interface AtlasInstrument extends Instrument { AtlasInstrument parent(); LayerInstrument layerInstrument(); Processum getSuperiorProcess(); AtlasMasterManipulator getMasterManipulator(); VectorGraphConfig getConfig(); void setParent( AtlasInstrument atlasInstrument ); GuidAllocator getGuidAllocator(); List getPath( GUID guid ); GUID queryGUIDByPath( String path ); GUID queryParentID( GUID guid ); default GUID assertPath( String path, String pathType ) throws IllegalArgumentException { GUID guid = this.queryGUIDByPath( path ); if( guid == null ) { throw new IllegalArgumentException( "Undefined " + pathType + " '" + path + "'" ); } return guid; } default GUID assertPath( String path ) throws IllegalArgumentException { return this.assertPath( path, "path" ); } boolean contains( GUID handleNode, GUID nodeGuid ); GUID put( GraphNode graphNode ); GUID put( GUID parentGuid, GraphNode graphNode ); GraphNode get( GUID guid ); GUID queryGUIDByNS( String path, String szBadSep, String szTargetSep ); TreeNode get(GUID guid, int depth ); void remove( GUID guid ); void remove( String path ); List getChildren( GUID guid ); List fetchChildrenIds(GUID guid ); void rename( GUID guid, String name ); VectorDAG toVectorDAG( Layer layer ); void addChild( GUID parentGuid, GUID childGuid ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/GraphNodePair.java ================================================ package com.pinecone.hydra.unit.vgraph; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.unit.vgraph.entity.GraphNode; public class GraphNodePair implements Pinenut { private GraphNode mGraphNode; private String mszCurrentPath; public GraphNodePair( GraphNode graphNode, String currentPath){ this.mGraphNode = graphNode; this.mszCurrentPath = currentPath; } public GraphNodePair(){} public GraphNode getGraphNode() { return mGraphNode; } public void setGraphNode(GraphNode graphNode) { this.mGraphNode = graphNode; } public String getCurrentPath() { return mszCurrentPath; } public void setCurrentPath(String sCurrentPath) { this.mszCurrentPath = sCurrentPath; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/MagnitudeVectorDAG.java ================================================ package com.pinecone.hydra.unit.vgraph; import java.util.List; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.vgraph.entity.GraphNode; import com.pinecone.hydra.unit.vgraph.layer.Layer; import com.pinecone.hydra.unit.vgraph.source.VectorGraphManipulator; import com.pinecone.hydra.unit.vgraph.source.VectorGraphMasterManipulator; import com.pinecone.hydra.unit.vgraph.source.VectorGraphPathCacheManipulator; public class MagnitudeVectorDAG extends ArchVectorDAG implements VectorDAG { protected Layer mGraphLayer; protected VectorGraphMasterManipulator mMasterManipulator; protected VectorGraphManipulator mVectorGraphManipulator; protected VectorGraphPathCacheManipulator mVectorGraphPathCacheManipulator; public MagnitudeVectorDAG( Layer affliatedLayer, VectorGraphMasterManipulator masterManipulator, VectorGraphConfig vectorGraphConfig ) { super( affliatedLayer.getGuid(), vectorGraphConfig ); this.mGraphLayer = affliatedLayer; this.mMasterManipulator = masterManipulator; this.mVectorGraphManipulator = this.mMasterManipulator.getVectorGraphManipulator(); this.mVectorGraphPathCacheManipulator = this.mMasterManipulator.getVectorGraphPathCacheManipulator(); } @Override public GUID getAffiliateLayerGuid() { return this.mGraphLayer.getGuid(); } @Override public Layer getAffiliateLayer() { return this.mGraphLayer; } @Override public boolean isPersistenceGraph() { return true; } @Override public List fetchSourceGuids( long offset, long limit ) { return this.mVectorGraphManipulator.fetchHandleGuids( offset, limit ); } @Override public List fetchSourceGuidsByTaskPriority( long offset, long limit ) { return this.mVectorGraphManipulator.fetchHandleGuidsByTaskPriority(offset, limit); } @Override public long countSourceNodes() { return this.mVectorGraphManipulator.countSourceNodes(); } @Override public List fetchDownstreamNodeGuid( GUID nodeGuid, long offset, long limit ) { return this.mVectorGraphManipulator.fetchDownstreamNodeGuid(nodeGuid,offset,limit); } @Override public List fetchUpstreamNodeGuid( GUID nodeGuid, long offset, long limit ) { return this.mVectorGraphManipulator.fetchUpstreamNodeGuid(nodeGuid,offset,limit); } @Override public long queryInDegree( GUID nodeGuid ) { return this.mVectorGraphManipulator.queryInDegree(nodeGuid); } @Override public long queryOutDegree( GUID nodeGuid ) { return this.mVectorGraphManipulator.queryOutDegree(nodeGuid); } @Override public GraphNode get( GUID guid ) { return this.mVectorGraphManipulator.queryNode( guid ); } @Override public void removeNode( GUID guid ) { this.mVectorGraphManipulator.removeNode( guid ); this.mVectorGraphPathCacheManipulator.remove( guid ); } @Override public List fetchChildNodes( GUID guid ) { return this.mVectorGraphManipulator.fetchChildNodes( guid ); } @Override public List fetchChildNodeGuids( GUID guid ) { return this.mVectorGraphManipulator.fetchChildNodeGuids( guid ); } @Override public List fetchChildNodeGuids( long offset, long limit, GUID guid ) { return this.mVectorGraphManipulator.limitFetchChildNodeGuids( offset, limit, guid ); } @Override public long countChildNodeNum( GUID guid ) { return this.mVectorGraphManipulator.countChildNodeNums( guid ); } @Override public long getPriorityByInDegree( GUID guid ) { return this.mVectorGraphManipulator.getPriorityByInDegree( guid ); } @Override public void addChild( GUID parentGuid, GUID childGuid ) { this.mVectorGraphManipulator.addChild( parentGuid,childGuid ); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/VectorDAG.java ================================================ package com.pinecone.hydra.unit.vgraph; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.vgraph.entity.GraphNode; import com.pinecone.hydra.unit.vgraph.layer.Layer; import com.pinecone.hydra.unit.vgraph.layer.LayerInstrument; import java.util.List; public interface VectorDAG extends Pinenut { GUID getGraphGuid(); GUID getAffiliateLayerGuid(); Layer getAffiliateLayer(); boolean isPersistenceGraph(); List fetchSourceGuids( long offset, long limit ); List fetchSourceGuidsByTaskPriority( long offset, long limit ); long countSourceNodes(); List fetchDownstreamNodeGuid( GUID nodeGuid, long offset, long limit ); List fetchUpstreamNodeGuid( GUID nodeGuid, long offset, long limit ); long queryInDegree( GUID nodeGuid ); long queryOutDegree( GUID nodeGuid ); VectorGraphConfig getConfig(); List fetchChildNodes( GUID guid ); List fetchChildNodeGuids( GUID guid ); List fetchChildNodeGuids( long offset, long limit, GUID guid ); long countChildNodeNum( GUID guid ); GraphNode get( GUID guid ); void removeNode( GUID guid ); long getPriorityByInDegree( GUID guid ); void addChild( GUID parentGuid, GUID childGuid ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/VectorGraphConfig.java ================================================ package com.pinecone.hydra.unit.vgraph; import com.pinecone.framework.system.prototype.Pinenut; public interface VectorGraphConfig extends Pinenut { String getPathNameSeparator(); String getFullNameSeparator(); String getPathNameSepRegex(); String getFullNameSepRegex(); int getShortPathLength(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/VectorGraphConstants.java ================================================ package com.pinecone.hydra.unit.vgraph; public class VectorGraphConstants { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/algo/BasicDAGPathResolver.java ================================================ package com.pinecone.hydra.unit.vgraph.algo; import java.util.ArrayList; import java.util.List; public class BasicDAGPathResolver implements DAGPathResolver { protected String mszSepRegex; protected String mszSeparator; public BasicDAGPathResolver(String szSeparator, String szSepRegex ){ this.mszSeparator = szSeparator; this.mszSepRegex = szSepRegex; } public BasicDAGPathResolver(){ this( "/", "/" ); } @Override public List resolvePath( String[] parts ) { ArrayList resolvedParts = new ArrayList<>(); for (String part : parts) { if ( part.equals(".") || part.isEmpty() ) { continue; } if ( part.equals("..") ) { if ( !resolvedParts.isEmpty() ) { resolvedParts.remove( resolvedParts.size() - 1 ); } } else { resolvedParts.add( part ); } } return resolvedParts; } @Override public String resolvePath( String path ) { String[] parts = this.processPath( path ).split( this.mszSepRegex ); return this.assemblePath( this.resolvePath( parts ) ); } @Override public List resolvePathParts( String path ) { return this.resolvePath( this.segmentPathParts( path ) ); } @Override public String[] segmentPathParts( String path ) { return this.processPath( path ).split( this.mszSepRegex ); } @Override public String assemblePath( List parts ) { if ( parts == null || parts.size() == 0 ) { return ""; } StringBuilder path = new StringBuilder(); for ( int i = 0; i < parts.size(); ++i ) { if ( i > 0 ) { path.append( this.mszSeparator ); } path.append( parts.get( i ) ); } return path.toString(); } protected String processPath( String path ) { return path; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/algo/BasicDAGPathSelector.java ================================================ package com.pinecone.hydra.unit.vgraph.algo; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.vgraph.AtlasInstrument; import com.pinecone.hydra.unit.vgraph.entity.GraphNode; import com.pinecone.hydra.unit.vgraph.source.VectorGraphManipulator; import java.util.List; import java.util.Stack; public class BasicDAGPathSelector implements DAGPathSelector { protected DAGPathResolver mPathResolver; protected VectorGraphManipulator mVectorGraphManipulator; public BasicDAGPathSelector(DAGPathResolver pathResolver, VectorGraphManipulator vectorGraphManipulator ){ this.mPathResolver = pathResolver; this.mVectorGraphManipulator = vectorGraphManipulator; } @Override public GUID searchId(String[] parts) { return this.searchId( parts, null ); } @Override public GUID searchId(String[] parts, @Nullable String[] lpResolvedPath) { List resolvedParts = this.mPathResolver.resolvePath( parts ); if( lpResolvedPath != null ) { lpResolvedPath[ 0 ] = this.mPathResolver.assemblePath( resolvedParts ); } return this.searchId( resolvedParts ); } @Override public GUID searchId(List resolvedParts) { return (GUID) this.dfsSearch( resolvedParts ); } @Override public GUID searchId(GUID parentId, String[] parts) { return this.searchId(parentId, parts, null ); } @Override public GUID searchId(GUID parentId, String[] parts, @Nullable String[] lpResolvedPath) { List resolvedParts = this.mPathResolver.resolvePath( parts ); if( lpResolvedPath != null ) { lpResolvedPath[ 0 ] = this.mPathResolver.assemblePath( resolvedParts ); } return this.searchId( parentId, resolvedParts ); } @Override public GUID searchId(GUID parentId, List resolvedParts) { return (GUID) this.dfsSearch( parentId, resolvedParts ); } @Override public boolean contains(GUID handleNode, GUID nodeGuid) { if( handleNode.equals(nodeGuid) ){ return true; } List nodes = this.mVectorGraphManipulator.fetchChildNodes(handleNode); for( GraphNode node : nodes ){ contains( node.getId(), nodeGuid ); } return false; } protected GUID dfsSearch(List parts ) { return this.dfsSearch( null, parts ); } /** 使用递归实现图的DFS遍历(考古专用) **/ protected Object dfsSearch(GUID parentID, List parts, int depth) { if (depth == parts.size() - 1) { List nodes = this.mVectorGraphManipulator.fetchNodesByName(parts.get(depth)); for (GraphNode graphNode : nodes) { if (parentID == null || this.mVectorGraphManipulator.fetchParentIds(graphNode.getId()).equals(parentID)) { return graphNode; } } return null; } List nodes = this.mVectorGraphManipulator.fetchNodesByName(parts.get(depth)); for (GraphNode graphNode : nodes) { if (parentID == null || this.mVectorGraphManipulator.fetchParentIds(graphNode.getId()).equals(parentID)) { Object result = this.dfsSearch(graphNode.getId(), parts, depth + 1); if (result != null) { return result; } } } return null; } /** 非递归形式DFS遍历 **/ protected GUID dfsSearch(GUID parentID, List parts) { if (parts.isEmpty()) { return null; // 边界条件:路径为空 } // 用栈保存当前状态:节点、父ID、当前深度 Stack stack = new Stack<>(); stack.push(new Object[]{parentID, 0}); // 初始状态:parentID, depth=0 while (!stack.isEmpty()) { Object[] state = stack.pop(); GUID currentParentID = (GUID) state[0]; int currentDepth = (int) state[1]; // 终止条件:到达路径末尾 if (currentDepth == parts.size() - 1) { List nodes = mVectorGraphManipulator.fetchNodesByName(parts.get(currentDepth)); for (GraphNode node : nodes) { if (currentParentID == null || mVectorGraphManipulator.fetchParentIds(node.getId()).contains(currentParentID)) { return node.getId(); // 找到目标节点 } } continue; // 当前深度未找到,继续回溯 } // 非终止条件:继续向下搜索 List nodes = mVectorGraphManipulator.fetchNodesByName(parts.get(currentDepth)); // 注意:栈是后进先出,为了保证顺序,需要反向遍历节点(或直接按顺序压栈) for (int i = nodes.size() - 1; i >= 0; i--) { GraphNode node = nodes.get(i); if (currentParentID == null || mVectorGraphManipulator.fetchParentIds(node.getId()).equals(currentParentID)) { // 压栈:子节点ID + 下一深度 stack.push(new Object[]{node.getId(), currentDepth + 1}); } } } return null; // 栈空仍未找到 } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/algo/DAGPathResolver.java ================================================ package com.pinecone.hydra.unit.vgraph.algo; import com.pinecone.framework.system.prototype.Pinenut; import java.util.List; public interface DAGPathResolver extends Pinenut { List resolvePath (String[] parts ) ; String resolvePath ( String path ); List resolvePathParts ( String path ) ; String[] segmentPathParts ( String path ) ; String assemblePath ( List parts ) ; } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/algo/DAGPathSelector.java ================================================ package com.pinecone.hydra.unit.vgraph.algo; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.util.List; public interface DAGPathSelector extends Pinenut { GUID searchId(String[] parts ); GUID searchId(String[] parts, @Nullable String[] lpResolvedPath ); GUID searchId(List resolvedParts ); GUID searchId(GUID parentId, String[] parts ); GUID searchId(GUID parentId, String[] parts, @Nullable String[] lpResolvedPath ); GUID searchId(GUID parentId, List resolvedParts ); boolean contains( GUID handleNode, GUID nodeGuid); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/entity/GraphNode.java ================================================ package com.pinecone.hydra.unit.vgraph.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.util.List; public interface GraphNode extends Pinenut { long getEnumId(); void setEnumId(long enumId); String getName(); GUID getId(); void setId( GUID guid ); List getParentIds(); void setParentIds( List parentIds ); String getDescription(); void setDescription( String description ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/AtlasLayer.java ================================================ package com.pinecone.hydra.unit.vgraph.layer; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.List; public class AtlasLayer implements Layer { private String mszName; private GUID parentGuid; private GUID mGuid; private List mLstHandleGuids; private List mLstEndGuids; private LocalDateTime mUpdateTime; private LocalDateTime mCreateTime; public AtlasLayer() { this.mLstHandleGuids = new ArrayList<>(); this.mUpdateTime = LocalDateTime.now(); this.mCreateTime = LocalDateTime.now(); } @Override public String getName() { return this.mszName; } @Override public GUID getGuid() { return this.mGuid; } @Override public void setName(String name) { this.mszName = name; } @Override public void setGuid(GUID guid) { this.mGuid = guid; } @Override public void setParentGuid( GUID parentGuid ) { this.parentGuid = parentGuid; } @Override public GUID getParentGuid() { return this.parentGuid; } @Override public List getSourceGuids() { return this.mLstHandleGuids; } @Override public void setSourceGuids( List handleGuids ) { this.mLstHandleGuids = handleGuids; } @Override public List getSinkGuids() { return this.mLstEndGuids; } @Override public void setSinkGuids(List endGuids) { this.mLstEndGuids = endGuids; } @Override public GUID addSourceeGuid(GUID handleGuid) { this.mLstHandleGuids.add(handleGuid); return handleGuid; } @Override public LocalDateTime getUpdateTime() { return this.mUpdateTime; } @Override public void setUpdateTime(LocalDateTime updateTime) { this.mUpdateTime = updateTime; } @Override public LocalDateTime getCreateTime() { return this.mCreateTime; } @Override public void setCreateTime(LocalDateTime createTime) { this.mCreateTime = createTime; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/AtlasLayerNamespace.java ================================================ package com.pinecone.hydra.unit.vgraph.layer; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public class AtlasLayerNamespace implements LayerNamespace { protected GUID mGuid; protected String mszName; protected LocalDateTime mCreateTime; protected LocalDateTime mUpdateTime; public AtlasLayerNamespace() { this.mCreateTime = LocalDateTime.now(); this.mUpdateTime = LocalDateTime.now(); } @Override public GUID getGuid() { return this.mGuid; } @Override public void setGuid(GUID guid) { this.mGuid = guid; } @Override public String getName() { return this.mszName; } @Override public void setName(String name) { this.mszName = name; } @Override public LocalDateTime getUpdateTime() { return this.mUpdateTime; } @Override public void setUpdateTime(LocalDateTime updateTime) { this.mUpdateTime = updateTime; } @Override public LocalDateTime getCreateTime() { return this.mCreateTime; } @Override public void setCreateTime(LocalDateTime createTime) { this.mCreateTime = createTime; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/Layer.java ================================================ package com.pinecone.hydra.unit.vgraph.layer; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; import java.util.List; public interface Layer extends LayerTreeNode { void setName( String name ); void setGuid( GUID guid ); void setParentGuid( GUID parentGuid ); GUID getParentGuid(); List getSourceGuids(); void setSourceGuids( List handleGuids ); List getSinkGuids(); void setSinkGuids( List endGuids ); GUID addSourceeGuid( GUID handleGuid ); LocalDateTime getUpdateTime(); void setUpdateTime( LocalDateTime updateTime ); LocalDateTime getCreateTime(); void setCreateTime( LocalDateTime startTime ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/LayerConfig.java ================================================ package com.pinecone.hydra.unit.vgraph.layer; import com.pinecone.hydra.system.ko.KernelObjectConfig; public interface LayerConfig extends KernelObjectConfig { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/LayerGraphHandle.java ================================================ package com.pinecone.hydra.unit.vgraph.layer; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public class LayerGraphHandle implements Pinenut { private String mszName; private GUID mGraphGuid; private GUID mGuid; private GUID mHandleNodeGuid; private GUID mEndNodeGuid; private LocalDateTime mUpdateTime; private LocalDateTime mCreateTime; public String getName() { return this.mszName; } public GUID getGuid() { return this.mGuid; } public void setName( String name ) { this.mszName = name; } public void setGuid( GUID guid ) { this.mGuid = guid; } public void setGraphGuid( GUID graphGuid ) { this.mGraphGuid = graphGuid; } public GUID getGraphGuid() { return this.mGraphGuid; } public GUID getHandleNodeGuid() { return this.mHandleNodeGuid; } public void setHandleNodeGuid(GUID handleNode) { this.mHandleNodeGuid = handleNode; } public GUID getEndNodeGuid() { return this.mEndNodeGuid; } public void setEndNodeGuid(GUID endNodeGuid) { this.mEndNodeGuid = endNodeGuid; } public LocalDateTime getUpdateTime() { return this.mUpdateTime; } public void setUpdateTime(LocalDateTime updateTime) { this.mUpdateTime = updateTime; } public LocalDateTime getCreateTime() { return this.mCreateTime; } public void setCreateTime(LocalDateTime createTime) { this.mCreateTime = createTime; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/LayerInstrument.java ================================================ package com.pinecone.hydra.unit.vgraph.layer; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.Unsafe; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.system.ko.kom.KOMInstrument; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.unit.vgraph.VectorDAG; import com.pinecone.slime.meta.TableIndexMeta; import java.util.List; public interface LayerInstrument extends KOMInstrument { LayerConfig LayerConfig = new VLayerConfig(); LayerConfig getConfig(); Hydrogen getSystem(); void addChild( GUID parentGuid, GUID childGuid ); void update( TreeNode treeNode ); List splitGraphLayer( VectorDAG vectorDAG ); long countSourceNode( GUID layerGuid ); List fetchSourceGuidsByTaskPriority( GUID layerGuid,long offset, long limit ); @Unsafe( "TestOnly" ) List fetchLayersAll(); List fetchLayers( long offset, long limit, boolean anyNode, @Nullable GUID parentGuid ); default List fetchLayers( long offset, long limit ) { return this.fetchLayers( offset, limit, true, null ); } List fetchLayersById( long idStart, long idEnd, boolean anyNode, @Nullable GUID parentGuid ); default List fetchLayersById( long idStart, long idEnd ) { return this.fetchLayersById( idStart, idEnd, true, null ); } TableIndexMeta getLayerIndexMeta( boolean anyNode, @Nullable GUID parentGuid ); default TableIndexMeta getLayerIndexMeta() { return this.getLayerIndexMeta( true, null ); } long queryMaxLayerPage( long limit, boolean anyNode, @Nullable GUID parentGuid ); default long queryMaxLayerPage( long limit ) { return this.queryMaxLayerPage( limit, true, null ); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/LayerNamespace.java ================================================ package com.pinecone.hydra.unit.vgraph.layer; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public interface LayerNamespace extends LayerTreeNode { GUID getGuid(); void setGuid( GUID guid ); String getName(); void setName( String name ); LocalDateTime getUpdateTime(); void setUpdateTime( LocalDateTime updateTime ); LocalDateTime getCreateTime(); void setCreateTime( LocalDateTime createTime ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/LayerTreeNode.java ================================================ package com.pinecone.hydra.unit.vgraph.layer; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import java.time.LocalDateTime; public interface LayerTreeNode extends TreeNode { GUID getGuid(); void setUpdateTime( LocalDateTime updateTime ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/VLayerConfig.java ================================================ package com.pinecone.hydra.unit.vgraph.layer; import com.pinecone.hydra.system.ko.ArchKernelObjectConfig; public class VLayerConfig extends ArchKernelObjectConfig implements LayerConfig { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/VLayerInstrument.java ================================================ package com.pinecone.hydra.unit.vgraph.layer; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.Unsafe; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.system.identifier.KOPathResolver; import com.pinecone.hydra.system.ko.CascadeInstrument; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.hydra.system.ko.kom.ArchKOMTree; import com.pinecone.hydra.system.ko.kom.SimpleMultiFolderPathSelector; import com.pinecone.hydra.system.ko.kom.SimplePathSelector; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; import com.pinecone.hydra.unit.vgraph.VectorDAG; import com.pinecone.hydra.unit.vgraph.layer.operator.AtlasLayerComponentOperatorFactory; import com.pinecone.hydra.unit.vgraph.layer.source.LayerHandleManipulator; import com.pinecone.hydra.unit.vgraph.layer.source.LayerManipulator; import com.pinecone.hydra.unit.vgraph.layer.source.LayerMasterManipulator; import com.pinecone.hydra.unit.vgraph.layer.source.NamespaceManipulator; import com.pinecone.slime.meta.TableIndexMeta; import com.pinecone.ulf.util.guid.i128.GuidAllocator128V7; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; public class VLayerInstrument extends ArchKOMTree implements LayerInstrument { protected LayerMasterManipulator mLayerMasterManipulator; protected LayerManipulator mLayerManipulator; protected NamespaceManipulator mNamespaceManipulator; protected LayerHandleManipulator mLayerHandleManipulator; public VLayerInstrument( Processum superiorProcess, KOIMasterManipulator masterManipulator, LayerInstrument parent, String name, String superiorPathScope, @Nullable GuidAllocator guidAllocator ) { super( superiorProcess, masterManipulator, LayerInstrument.LayerConfig, parent, name, superiorPathScope, guidAllocator ); this.mLayerMasterManipulator = (LayerMasterManipulator) masterManipulator; this.pathResolver = new KOPathResolver( this.kernelObjectConfig ); this.operatorFactory = new AtlasLayerComponentOperatorFactory( this, (LayerMasterManipulator) masterManipulator); this.mLayerManipulator = this.mLayerMasterManipulator.getLayerManipulator(); this.mNamespaceManipulator = this.mLayerMasterManipulator.getNamespaceManipulator(); this.mLayerHandleManipulator = this.mLayerMasterManipulator.getLayerHandleManipulator(); this.pathSelector = new SimpleMultiFolderPathSelector( this.pathResolver, this.imperialTree, new GUIDNameManipulator[]{ this.mNamespaceManipulator, this.mLayerManipulator }, new GUIDNameManipulator[]{ this.mLayerManipulator } ); } public VLayerInstrument( Processum superiorProcess, KOIMasterManipulator masterManipulator, LayerInstrument parent, String name ) { this( superiorProcess, masterManipulator, parent, name, CascadeInstrument.EmptySuperiorPathScope, new GuidAllocator128V7()); } public VLayerInstrument( Processum superiorProcess, KOIMasterManipulator masterManipulator ) { this( superiorProcess, masterManipulator, null, LayerConfig.class.getSimpleName() ); } public VLayerInstrument( KOIMappingDriver driver ) { this(driver.getSuperiorProcess(), driver.getMasterManipulator()); } @Override public String getFullName() { return super.getFullName(); } @Override public Object queryEntityHandleByNS( String path, String szBadSep, String szTargetSep ) { if( szTargetSep != null ) { path = path.replace( szBadSep, szTargetSep ); } String[] parts = this.pathResolver.segmentPathParts( path ); return this.pathSelector.searchGUID( parts ); } @Override public Hydrogen getSystem() { return this.hydrogen; } @Override public LayerConfig getConfig() { return (LayerConfig) this.kernelObjectConfig; } @Override public GUID put( TreeNode treeNode ) { TreeNodeOperator operator = this.operatorFactory.getOperator( this.getLayerMetaType( treeNode ) ); return operator.insert( treeNode ); } @Override public void remove( GUID guid ) { GUIDImperialTrieNode node = this.imperialTree.getNode( guid ); TreeNode newInstance = (TreeNode)node.getType().newInstance(); TreeNodeOperator operator = this.operatorFactory.getOperator( this.getLayerMetaType( newInstance ) ); operator.purge( guid ); } protected TreeNodeOperator getOperatorByGuid( GUID guid ) { ImperialTreeNode node = this.imperialTree.getNode( guid ); if ( node == null ){ return null; } TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class[]{this.getClass()}, null ); return this.operatorFactory.getOperator( this.getLayerMetaType( newInstance ) ); } @Override public Layer get( GUID guid ) { TreeNodeOperator operator = this.getOperatorByGuid( guid ); if( operator == null ) { return null; } return (Layer) operator.get( guid ); } @Override public void addChild( GUID parentGuid, GUID childGuid ) { this.imperialTree.affirmOwnedNode(childGuid, parentGuid); } @Override public void update( TreeNode treeNode ) { } @Override public List splitGraphLayer( VectorDAG vectorDAG ) { Layer layer = this.mLayerManipulator.queryLayer( vectorDAG.getAffiliateLayerGuid() ); List children = this.getChildren( layer.getGuid() ); List collect = children.stream().map(TreeNode::getGuid).collect(Collectors.toList()); List layers = this.mLayerManipulator.fetchLayer(collect); return layers; } @Override public long countSourceNode( GUID layerGuid ) { return this.mLayerHandleManipulator.countSourceNode( layerGuid ); } @Override public List fetchSourceGuidsByTaskPriority( GUID layerGuid, long offset, long limit ) { return this.mLayerHandleManipulator.fetchSourceGuidsByTaskPriority( layerGuid, offset, limit ); } @Override public List fetchLayers( long offset, long limit, boolean anyNode, @Nullable GUID parentGuid ) { return this.mLayerManipulator.fetchLayerPage( offset, limit, anyNode, parentGuid ); } @Unsafe( "TestOnly" ) @Override public List fetchLayersAll() { TableIndexMeta meta = this.getLayerIndexMeta(); return this.fetchLayersById( meta.getMinId(), meta.getMaxId() ); } @Override public List fetchLayersById( long idStart, long idEnd, boolean anyNode, @Nullable GUID parentGuid ) { return this.mLayerManipulator.fetchLayerPageById( idStart, idEnd, anyNode, parentGuid ); } @Override public TableIndexMeta getLayerIndexMeta( boolean anyNode, @Nullable GUID parentGuid ) { return this.mLayerManipulator.selectLayerIndexMeta( anyNode, parentGuid ); } @Override public long queryMaxLayerPage( long limit, boolean anyNode, @Nullable GUID parentGuid ) { if ( limit <= 0 ) { throw new IllegalArgumentException( "Limit must be greater than zero." ); } long nTotal = this.mLayerManipulator.countLayer( anyNode, parentGuid ); if ( nTotal == 0 ) { return 0; } long nPage = nTotal / limit; if ( nTotal % limit != 0 ) { ++nPage; } return nPage; } protected ImperialTreeNode affirmPreinsertionInitialize(AtlasLayer atlasLayer) { GUID guid = atlasLayer.getGuid(); atlasLayer.setUpdateTime(LocalDateTime.now()); GUIDImperialTrieNode imperialTrieNode = new GUIDImperialTrieNode(); imperialTrieNode.setGuid(guid); return imperialTrieNode; } private String getLayerMetaType( TreeNode treeNode ) { return treeNode.className().replace( "Atlas", "" ); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/operator/ArchLayerComponentOperator.java ================================================ package com.pinecone.hydra.unit.vgraph.layer.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.system.ko.UOIUtils; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.ImperialTree; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.unit.vgraph.layer.LayerInstrument; import com.pinecone.hydra.unit.vgraph.layer.LayerTreeNode; import com.pinecone.hydra.unit.vgraph.layer.source.LayerMasterManipulator; import java.time.LocalDateTime; public abstract class ArchLayerComponentOperator implements LayerComponentOperator { protected LayerInstrument mLayerInstrument; protected LayerComponentOperatorFactory mFactory; protected ImperialTree mImperialTree; protected LayerMasterManipulator mLayerMasterManipulator; protected GuidAllocator mGuidAllocator; public ArchLayerComponentOperator( LayerMasterManipulator layerMasterManipulator, LayerInstrument layerInstrument) { this.mImperialTree = layerInstrument.getMasterTrieTree(); this.mLayerInstrument = layerInstrument; this.mLayerMasterManipulator = layerMasterManipulator; this.mGuidAllocator = layerInstrument.getGuidAllocator(); } protected ImperialTreeNode affirmPreinsertionInitialize( LayerTreeNode treeNode ) { GUID guid = treeNode.getGuid(); treeNode.setUpdateTime( LocalDateTime.now() ); GUIDImperialTrieNode imperialTrieNode = new GUIDImperialTrieNode(); imperialTrieNode.setGuid( guid ); imperialTrieNode.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) ); return imperialTrieNode; } public LayerComponentOperatorFactory getLayerComponentOperatorFactory() { return this.mFactory; } protected String getLayerNodeMetaType( TreeNode treeNode ) { return treeNode.className().replace("Atlas",""); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/operator/AtlasLayerComponentOperatorFactory.java ================================================ package com.pinecone.hydra.unit.vgraph.layer.operator; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; import com.pinecone.hydra.unit.vgraph.layer.AtlasLayer; import com.pinecone.hydra.unit.vgraph.layer.AtlasLayerNamespace; import com.pinecone.hydra.unit.vgraph.layer.LayerInstrument; import com.pinecone.hydra.unit.vgraph.layer.source.LayerMasterManipulator; import java.util.HashMap; import java.util.Map; import java.util.TreeMap; public class AtlasLayerComponentOperatorFactory implements LayerComponentOperatorFactory { protected LayerMasterManipulator mLayerMasterManipulator; protected LayerInstrument mLayerInstrument; protected Map registerer = new HashMap<>(); protected Map metaTypeMap = new TreeMap<>(); protected void registerDefaultMetaType( Class genericType ) { this.metaTypeMap.put( genericType.getName(), genericType.getSimpleName().replace( "Atlas","" )); } protected void registerDefaultMetaTypes() { this.registerDefaultMetaType( AtlasLayer.class ); this.registerDefaultMetaType( AtlasLayerNamespace.class ); } public AtlasLayerComponentOperatorFactory(LayerInstrument layerInstrument, LayerMasterManipulator layerMasterManipulator ) { this.mLayerInstrument = layerInstrument; this.mLayerMasterManipulator = layerMasterManipulator; this.registerer.put( DefaultLayer, new LayerOperator( this ) ); this.registerer.put( DefaultNamespace, new LayerNamespaceOperator( this ) ); } @Override public void register(String typeName, TreeNodeOperator functionalNodeOperation) { this.registerer.put( typeName, functionalNodeOperation ); } @Override public void registerMetaType(Class clazz, String metaType) { this.registerMetaType( clazz.getName(), metaType ); } @Override public void registerMetaType(String classFullName, String metaType) { this.metaTypeMap.put( classFullName, metaType ); } @Override public String getMetaType(String classFullName) { return this.metaTypeMap.get( classFullName ); } @Override public LayerComponentOperator getOperator(String typeName) { return (LayerComponentOperator) this.registerer.get( typeName ); } @Override public LayerInstrument getLayerManager() { return this.mLayerInstrument; } @Override public LayerMasterManipulator getMasterManipulator() { return this.mLayerMasterManipulator; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/operator/LayerComponentOperator.java ================================================ package com.pinecone.hydra.unit.vgraph.layer.operator; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; public interface LayerComponentOperator extends TreeNodeOperator { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/operator/LayerComponentOperatorFactory.java ================================================ package com.pinecone.hydra.unit.vgraph.layer.operator; import com.pinecone.hydra.unit.imperium.operator.OperatorFactory; import com.pinecone.hydra.unit.vgraph.layer.Layer; import com.pinecone.hydra.unit.vgraph.layer.LayerInstrument; import com.pinecone.hydra.unit.vgraph.layer.LayerNamespace; import com.pinecone.hydra.unit.vgraph.layer.source.LayerMasterManipulator; public interface LayerComponentOperatorFactory extends OperatorFactory { String DefaultLayer = Layer.class.getSimpleName(); String DefaultNamespace = LayerNamespace.class.getSimpleName(); void registerMetaType( Class clazz, String metaType ); void registerMetaType( String classFullName, String metaType ); String getMetaType( String classFullName ); LayerComponentOperator getOperator(String typeName ); LayerInstrument getLayerManager(); LayerMasterManipulator getMasterManipulator(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/operator/LayerNamespaceOperator.java ================================================ package com.pinecone.hydra.unit.vgraph.layer.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.unit.vgraph.layer.LayerInstrument; import com.pinecone.hydra.unit.vgraph.layer.LayerNamespace; import com.pinecone.hydra.unit.vgraph.layer.source.LayerMasterManipulator; import com.pinecone.hydra.unit.vgraph.layer.source.NamespaceManipulator; import java.util.List; public class LayerNamespaceOperator extends ArchLayerComponentOperator implements LayerComponentOperator { protected NamespaceManipulator mNamespaceManipulator; public LayerNamespaceOperator( LayerComponentOperatorFactory factory ) { this( factory.getMasterManipulator(), factory.getLayerManager() ); this.mFactory = factory; } public LayerNamespaceOperator(LayerMasterManipulator layerMasterManipulator, LayerInstrument layerInstrument) { super(layerMasterManipulator, layerInstrument); this.mNamespaceManipulator = layerMasterManipulator.getNamespaceManipulator(); } @Override public GUID insert(TreeNode treeNode) { GUID guid = this.mGuidAllocator.nextGUID(); LayerNamespace layerNamespace = (LayerNamespace) treeNode; layerNamespace.setGuid( guid ); ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize(layerNamespace); this.mImperialTree.insert(imperialTreeNode); this.mNamespaceManipulator.insert(layerNamespace); return guid; } @Override public void purge(GUID guid) { List children = this.mImperialTree.getChildren(guid); for( GUIDImperialTrieNode node : children ) { TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class[]{this.getClass()}, this ); LayerComponentOperator operator = this.mFactory.getOperator(this.getLayerNodeMetaType(newInstance)); operator.purge( node.getGuid() ); } this.removeNode( guid ); } @Override public TreeNode get(GUID guid) { return this.mNamespaceManipulator.query(guid); } @Override public TreeNode get(GUID guid, int depth) { return null; } @Override public TreeNode getAsRootDepth(GUID guid) { return null; } @Override public void update(TreeNode treeNode) { } @Override public void updateName(GUID guid, String name) { } private void removeNode( GUID guid ) { this.mImperialTree.purge( guid ); this.mImperialTree.removeCachePath( guid ); this.mNamespaceManipulator.remove( guid ); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/operator/LayerOperator.java ================================================ package com.pinecone.hydra.unit.vgraph.layer.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.unit.vgraph.layer.Layer; import com.pinecone.hydra.unit.vgraph.layer.LayerGraphHandle; import com.pinecone.hydra.unit.vgraph.layer.LayerInstrument; import com.pinecone.hydra.unit.vgraph.layer.AtlasLayer; import com.pinecone.hydra.unit.vgraph.layer.source.LayerHandleManipulator; import com.pinecone.hydra.unit.vgraph.layer.source.LayerManipulator; import com.pinecone.hydra.unit.vgraph.layer.source.LayerMasterManipulator; import java.util.ArrayList; import java.util.List; public class LayerOperator extends ArchLayerComponentOperator implements LayerComponentOperator{ protected LayerManipulator mLayerManipulator; protected LayerHandleManipulator mLayerHandleManipulator; public LayerOperator( LayerComponentOperatorFactory factory ) { this( factory.getMasterManipulator(), factory.getLayerManager() ); this.mFactory = factory; } public LayerOperator(LayerMasterManipulator layerMasterManipulator, LayerInstrument layerInstrument) { super(layerMasterManipulator, layerInstrument); this.mLayerManipulator = this.mLayerMasterManipulator.getLayerManipulator(); this.mLayerHandleManipulator = this.mLayerMasterManipulator.getLayerHandleManipulator(); } @Override public GUID insert(TreeNode treeNode) { GUID guid = this.mGuidAllocator.nextGUID(); AtlasLayer atlasLayer = (AtlasLayer) treeNode; atlasLayer.setGuid( guid ); ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize(atlasLayer); this.mImperialTree.insert(imperialTreeNode); LayerGraphHandle layerGraphHandle = new LayerGraphHandle(); layerGraphHandle.setGuid(atlasLayer.getGuid()); layerGraphHandle.setName(atlasLayer.getName()); layerGraphHandle.setUpdateTime(atlasLayer.getUpdateTime()); layerGraphHandle.setCreateTime(atlasLayer.getCreateTime()); this.mLayerManipulator.insertLayer( layerGraphHandle ); if( atlasLayer.getSourceGuids() != null ) { this.mLayerHandleManipulator.batchInsertSourceNodes( layerGraphHandle.getGuid(), atlasLayer.getSourceGuids() ); } if( atlasLayer.getSinkGuids() != null ) { this.mLayerHandleManipulator.batchInsertSinkNodes( layerGraphHandle.getGuid(), atlasLayer.getSinkGuids() ); } return guid; } @Override public void purge(GUID guid) { List children = this.mImperialTree.getChildren(guid); for( GUIDImperialTrieNode node : children ) { TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class[]{this.getClass()}, this ); LayerComponentOperator operator = this.mFactory.getOperator(this.getLayerNodeMetaType(newInstance)); operator.purge( node.getGuid() ); } this.removeNode( guid ); } @Override public TreeNode get(GUID guid) { Layer layer = this.mLayerManipulator.queryLayer(guid); List sourceNodeGuids = this.mLayerHandleManipulator.fetchSourceNodes(layer.getGuid()); List sinkNodeGuids = this.mLayerHandleManipulator.fetchSinkNodes(layer.getGuid()); layer.setSourceGuids( sourceNodeGuids ); layer.setSinkGuids( sinkNodeGuids ); return layer; } @Override public TreeNode get(GUID guid, int depth) { return null; } @Override public TreeNode getAsRootDepth(GUID guid) { return null; } @Override public void update(TreeNode treeNode) { } @Override public void updateName(GUID guid, String name) { } private void removeNode( GUID guid ) { this.mImperialTree.purge( guid ); this.mImperialTree.removeCachePath( guid ); this.mLayerManipulator.remove( guid ); } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/source/LayerHandleManipulator.java ================================================ package com.pinecone.hydra.unit.vgraph.layer.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.util.List; public interface LayerHandleManipulator extends Pinenut { void insertSourceNode( GUID layerGuid, GUID handleGuid ); void insertSinkNode( GUID layerGuid, GUID handleGuid ); void batchInsertSourceNodes( GUID layerGuid, List handleGuids ); void batchInsertSinkNodes( GUID layerGuid, List handleGuids ); List fetchSourceNodes( GUID layerGuid ); List fetchSinkNodes( GUID layerGuid ); long countSourceNode( GUID layerGuid ); List fetchSourceGuidsByTaskPriority( GUID layerGuid, long offset, long limit ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/source/LayerManipulator.java ================================================ package com.pinecone.hydra.unit.vgraph.layer.source; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.unit.vgraph.layer.Layer; import com.pinecone.hydra.unit.vgraph.layer.LayerGraphHandle; import com.pinecone.slime.meta.TableIndexMeta; import java.util.List; public interface LayerManipulator extends GUIDNameManipulator { void insertLayer(LayerGraphHandle layer ); void remove( GUID guid ); Layer queryLayer( GUID guid ); List fetchLayer( List guids ); List getGuidsByName( String name ); List getGuidsByNameID( String name, GUID guid ); void batchInsertLayer( List list ); // anyNode: true: ignore parentGuid, false: filter by parentGuid List fetchLayerPage( long offset, long limit, boolean anyNode, @Nullable GUID parentGuid ); List fetchLayerPageById( long idStart, long idEnd, boolean anyNode, @Nullable GUID parentGuid ); TableIndexMeta selectLayerIndexMeta( boolean anyNode, @Nullable GUID parentGuid ); long countLayer( boolean anyNode, @Nullable GUID parentGuid ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/source/LayerMasterManipulator.java ================================================ package com.pinecone.hydra.unit.vgraph.layer.source; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; public interface LayerMasterManipulator extends KOIMasterManipulator { LayerManipulator getLayerManipulator(); NamespaceManipulator getNamespaceManipulator(); LayerHandleManipulator getLayerHandleManipulator(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/source/LayerMasterTreeManipulator.java ================================================ package com.pinecone.hydra.unit.vgraph.layer.source; import com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator; public interface LayerMasterTreeManipulator extends TreeMasterManipulator { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/source/LayerOwnerManipulator.java ================================================ package com.pinecone.hydra.unit.vgraph.layer.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.LinkedType; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import java.util.List; public interface LayerOwnerManipulator extends TireOwnerManipulator { void insertRootNode (GUID guid, LinkedType linkedType ); default void insertRootNode ( GUID guid ) { this.insertRootNode( guid, LinkedType.Owned ); } void insert( GUID targetGuid, GUID parentGUID, LinkedType linkedType ); default void insertOwnedNode( GUID targetGuid, GUID parentGUID ) { this.insert( targetGuid, parentGUID, LinkedType.Owned ); } default void insertHardLinkedNode( GUID targetGuid, GUID parentGUID ) { this.insert( targetGuid, parentGUID, LinkedType.Hard ); } void update( GUID targetGuid, GUID parentGUID, LinkedType linkedType ); void updateParentGuid( GUID targetGuid, GUID parentGUID ); void updateLinkedType( GUID targetGuid, LinkedType linkedType ); void remove( GUID subordinateGuid, GUID ownerGuid ); void removeBySubordinate( GUID subordinateGuid ); void removeByOwner( GUID OwnerGuid ); GUID getOwner( GUID subordinateGuid ); List getSubordinates(GUID guid ); void setLinkedType ( GUID sourceGuid, GUID targetGuid, LinkedType linkedType ); default void setOwned ( GUID sourceGuid, GUID targetGuid ) { this.setLinkedType( sourceGuid, targetGuid, LinkedType.Owned ); } default void setHardLink ( GUID sourceGuid, GUID targetGuid ) { this.setLinkedType( sourceGuid, targetGuid, LinkedType.Hard ); } LinkedType getLinkedType ( GUID childGuid,GUID parentGuid ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/source/LayerPathCacheManipulator.java ================================================ package com.pinecone.hydra.unit.vgraph.layer.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; public interface LayerPathCacheManipulator extends TriePathCacheManipulator { void insert (GUID guid, String path ); void insertLongPath( GUID guid, String path, String longPath ); void remove ( GUID guid ); String getPath ( GUID guid ); GUID getNode ( String path ); GUID queryGUIDByPath( String path ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/source/LayerTreeManipulator.java ================================================ package com.pinecone.hydra.unit.vgraph.layer.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.LinkedType; import com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import java.util.List; public interface LayerTreeManipulator extends TrieTreeManipulator { void insert(TireOwnerManipulator ownerManipulator, GUIDImperialTrieNode node ); /** With detail meta data node information. */ GUIDImperialTrieNode getNode(GUID guid ); boolean contains( GUID key ) ; /** Only with tree node index information. */ GUIDImperialTrieNode getTreeNodeOnly(GUID guid, GUID parentGuid ); long countNode( GUID guid, GUID parentGuid ); // TODO void insertNode( GUID guid, GUIDImperialTrieNode distributedTreeNode ); // TODO void updateNode( GUID guid, GUIDImperialTrieNode distributedTreeNode ); /** Purge / Deletion */ void purge ( GUID guid ); void removeTreeNode( GUID guid ); void removeTreeNodeByParentGuid( GUID parentGuid ); void removeTreeNodeYoke( GUID guid, GUID parentGuid ); void removeTreeNodeWithLinkedType( GUID guid, LinkedType linkedType ); void removeNodeMeta( GUID guid ); default void removeOwnedTreeNode ( GUID guid ) { this.removeTreeNodeWithLinkedType( guid, LinkedType.Owned ); } default void removeHardLinkedTreeNode ( GUID guid ) { this.removeTreeNodeWithLinkedType( guid, LinkedType.Hard ); } /** Lineage / Affinity */ List getChildren(GUID guid ); List fetchChildrenGuids( GUID parentGuid ); List fetchParentGuids( GUID guid ); void removeInheritance( GUID childNode, GUID parentGUID ); void addChild( GUID childGuid, GUID parentGuid ); void updateType (UOI type, GUID guid ); List fetchRoot(); boolean isRoot( GUID guid ); /** Link / Reference */ /** * Querying link-count, that the node be linked by its owner. [Strong/Weak] * 获取节点引用计数。 [根据强弱引用条件] * @return the link-count, which its has been linked. */ long queryLinkedCount( GUID guid, LinkedType linkedType ); long queryAllLinkedCount( GUID guid ); default long queryStrongLinkedCount( GUID guid ) { return this.queryLinkedCount( guid, LinkedType.Owned ); } default long queryWeakLinkedCount( GUID guid ) { return this.queryLinkedCount( guid, LinkedType.Hard ); } void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName, GUID tagGuid, LinkedType linkedType ); default void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName, GUID tagGuid ) { this.newLinkTag( originalGuid, dirGuid, tagName, tagGuid, LinkedType.Hard ); } void updateLinkTagName( GUID tagGuid, String tagName ); GUID getOriginalGuid( String tagName,GUID parentDirGuid ); GUID getOriginalGuidByNodeGuid( String tagName, GUID nodeGUID ); ReparseLinkNode getReparseLinkNode(String tagName, GUID parentDirGuid ); ReparseLinkNode getReparseLinkNodeByNodeGuid( String tagName, GUID nodeGUID ); List fetchOriginalGuid( String tagName ); List fetchOriginalGuidRoot( String tagName ); boolean isTagGuid( GUID guid ); GUID getOriginalGuidByTagGuid( GUID tagGuid ); void removeReparseLink( GUID guid ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/source/NamespaceManipulator.java ================================================ package com.pinecone.hydra.unit.vgraph.layer.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.unit.vgraph.layer.LayerNamespace; public interface NamespaceManipulator extends GUIDNameManipulator { void insert( LayerNamespace layerNamespace ); void remove( GUID guid ); LayerNamespace query( GUID guid ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/source/AtlasMappingDriver.java ================================================ package com.pinecone.hydra.unit.vgraph.source; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.system.homotype.StereotypicInjector; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.system.Hydrogen; import java.util.Map; public interface AtlasMappingDriver extends Pinenut { String getVersionSignature(); Hydrogen getSystem(); Processum getSuperiorProcess(); AtlasMasterManipulator getMasterManipulator(); // Temp, TODO StereotypicInjector autoConstruct(Class stereotype, Map config, Object instance ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/source/AtlasMasterManipulator.java ================================================ package com.pinecone.hydra.unit.vgraph.source; import com.pinecone.framework.system.prototype.Pinenut; public interface AtlasMasterManipulator extends Pinenut { VectorGraphMasterManipulator getVectorGraphMasterManipulator(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/source/VectorGraphManipulator.java ================================================ package com.pinecone.hydra.unit.vgraph.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.vgraph.entity.GraphNode; import com.pinecone.slime.meta.TableIndex64Meta; import java.util.List; public interface VectorGraphManipulator extends Pinenut { void insertHandleNode( GraphNode graphNode ); void insertGraphNode( GraphNode graphNode ); void insertNodeByEdge( GUID parentGuid, GraphNode graphNode ); void removeNode( GUID guid ); GraphNode queryNode( GUID guid ); List fetchParentIds(GUID guid ); List fetchChildNodes( GUID guid ); List fetchChildNodeGuids( GUID guid ); List limitFetchChildNodeGuids( long offset, long limit, GUID guid ); List fetchRootNodes(); long countChildNodeNums( GUID guid ); List fetchChildNodeIds( GUID guid ); List fetchNodesByName( String name ); void updateNode( GraphNode graphNode ); List fetchHandleGuids(long offset, long limit); List fetchHandleGuidsByTaskPriority( long offset, long limit ); long countSourceNodes(); List fetchDownstreamNodeGuid( GUID nodeGuid, long offset, long limit ); List fetchUpstreamNodeGuid( GUID nodeGuid, long offset, long limit ); long queryInDegree( GUID nodeGuid ); long queryOutDegree( GUID nodeGuid ); long getPriorityByInDegree( GUID guid ); void addChild( GUID parentGuid, GUID childGuid ); List fetchIsolatedNodes( long offset, long limit ); List fetchIsolatedNodesById( long idStart, long idEnd ); long countIsolatedNodes(); TableIndex64Meta selectIsolatedNodeIndexMeta(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/source/VectorGraphMasterManipulator.java ================================================ package com.pinecone.hydra.unit.vgraph.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; public interface VectorGraphMasterManipulator extends Pinenut { VectorGraphManipulator getVectorGraphManipulator(); VectorGraphPathCacheManipulator getVectorGraphPathCacheManipulator(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/source/VectorGraphPathCacheManipulator.java ================================================ package com.pinecone.hydra.unit.vgraph.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.util.List; public interface VectorGraphPathCacheManipulator extends Pinenut { void insert(String path, GUID guid); void insertLongPath( GUID guid, String path, String longPath ); void remove ( GUID guid ); void removeByPath( String path ); List getPath (GUID guid ); GUID getNode ( String path ); GUID queryGUIDByPath( String path ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/traversal/AtlasGraphIterator.java ================================================ package com.pinecone.hydra.unit.vgraph.traversal; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.vgraph.VectorDAG; import com.pinecone.hydra.unit.vgraph.entity.GraphNode; public class AtlasGraphIterator implements GraphIterator { @Override public boolean containNode(VectorDAG vectorDAG, GUID targetNodeGuid) { return false; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/traversal/GraphIterator.java ================================================ package com.pinecone.hydra.unit.vgraph.traversal; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.vgraph.VectorDAG; public interface GraphIterator extends Pinenut { boolean containNode( VectorDAG vectorDAG, GUID targetNodeGuid ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/DataWare.java ================================================ package com.pinecone.hydra.ware; public interface DataWare extends Ware { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/MessageWare.java ================================================ package com.pinecone.hydra.ware; public interface MessageWare extends Middleware { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/Middleware.java ================================================ package com.pinecone.hydra.ware; public interface Middleware extends Ware { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/MiddlewareDirector.java ================================================ package com.pinecone.hydra.ware; public interface MiddlewareDirector extends WareDirector { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/MiddlewareManager.java ================================================ package com.pinecone.hydra.ware; public interface MiddlewareManager extends WareManager { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/OLAPWare.java ================================================ package com.pinecone.hydra.ware; public interface OLAPWare { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/OLTPWare.java ================================================ package com.pinecone.hydra.ware; public interface OLTPWare { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/RDBWare.java ================================================ package com.pinecone.hydra.ware; public interface RDBWare extends Ware { } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/Ware.java ================================================ package com.pinecone.hydra.ware; import com.pinecone.framework.system.prototype.Pinenut; public interface Ware extends Pinenut { String typeName(); String domainTypeName(); WareDomain wareDomain(); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/WareDirector.java ================================================ package com.pinecone.hydra.ware; import com.pinecone.framework.system.regime.arch.Director; import com.pinecone.framework.util.config.Config; public interface WareDirector extends Director { Config getSectionConfig(); WareManager getManager( String name ); } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/WareDomain.java ================================================ package com.pinecone.hydra.ware; public enum WareDomain { Undefined ( "Undefined" , 0x0000 ), Data ( "Data" , 0x0001 ), Storage ( "Storage" , 0x0002 ), Message ( "Message" , 0x0003 ), Config ( "Config" , 0x0004 ), Log ( "Log" , 0x0005 ), Compute ( "Compute" , 0x0006 ), Monitor ( "Monitor" , 0x0007 ), Security ( "Security" , 0x0008 ), Network ( "Network" , 0x0009 ), Business ( "Business" , 0x000A ), User ( "User" , 0x000B ), Device ( "Device" , 0x000C ), Other ( "Other" , 0xFFFF ); private final String value; private final short code; WareDomain( String value, int code ){ this.value = value; this.code = (short) code; } public String getName(){ return this.value; } public short getCode() { return this.code; } } ================================================ FILE: Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/WareManager.java ================================================ package com.pinecone.hydra.ware; import com.pinecone.framework.system.regime.arch.Manager; import com.pinecone.framework.util.config.Config; public interface WareManager extends Manager { Config getManagedWaresConfig(); Ware getWare( String name ); } ================================================ FILE: Hydra/hydra-architecture-conduct/pom.xml ================================================ hydra com.pinecone.hydra 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.pinecone.hydra.kernel hydra-architecture-conduct 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile com.pinecone.ulf ulfhedinn 1.2.1 compile com.pinecone.hydra.kernel hydra-architecture 2.1.0 compile ================================================ FILE: Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/conduct/CascadeMarshal.java ================================================ package com.pinecone.hydra.system.conduct; public interface CascadeMarshal extends Marshal, Unit { } ================================================ FILE: Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/conduct/CascadeUnit.java ================================================ package com.pinecone.hydra.system.conduct; public interface CascadeUnit extends Unit { } ================================================ FILE: Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/conduct/Marshal.java ================================================ package com.pinecone.hydra.system.conduct; import com.pinecone.framework.system.regime.Instrument; public interface Marshal extends Instrument { } ================================================ FILE: Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/conduct/Unionem.java ================================================ package com.pinecone.hydra.system.conduct; import com.pinecone.framework.system.regime.Orchestrator; public interface Unionem extends Orchestrator { } ================================================ FILE: Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/conduct/Unit.java ================================================ package com.pinecone.hydra.system.conduct; import com.pinecone.framework.system.prototype.Pinenut; public interface Unit extends Pinenut { } ================================================ FILE: Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/flow/CascadeFlow.java ================================================ package com.pinecone.hydra.system.flow; public interface CascadeFlow extends Flow, Stage { } ================================================ FILE: Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/flow/Flow.java ================================================ package com.pinecone.hydra.system.flow; import com.pinecone.framework.system.regime.Orchestrator; public interface Flow extends Orchestrator { } ================================================ FILE: Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/flow/SequentialFlow.java ================================================ package com.pinecone.hydra.system.flow; public interface SequentialFlow extends Flow { } ================================================ FILE: Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/flow/Stage.java ================================================ package com.pinecone.hydra.system.flow; import com.pinecone.framework.system.prototype.Pinenut; public interface Stage extends Pinenut { } ================================================ FILE: Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/ups/UniformPyramidTask.java ================================================ package com.pinecone.hydra.system.ups; import com.pinecone.framework.system.prototype.Pinenut; /** * Pinecone Ursus For Java - Uniform Pyramid Task Scheduling * Author: Harald.E (Dragon King) * Copyright © 2008 - 2028 Bean Nuts Foundation. All rights reserved. * ***************************************************************************************** * Uniform Pyramid Task Scheduling * A Centrally Controlled Architecture for Systematic Task Orchestration and Scheduling * 金字塔模型统一调度系统 - 基于中央集权的系统性任务规划与调度架构 * ***************************************************************************************** */ public interface UniformPyramidTask extends Pinenut { } ================================================ FILE: Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/ups/UniformPyramidTaskInstrument.java ================================================ package com.pinecone.hydra.system.ups; import com.pinecone.framework.system.regime.Instrument; public interface UniformPyramidTaskInstrument extends Instrument { } ================================================ FILE: Hydra/hydra-architecture-message/pom.xml ================================================ hydra com.pinecone.hydra 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.pinecone.hydra.kernel hydra-architecture-message 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile com.pinecone.ulf ulfhedinn 1.2.1 compile ================================================ FILE: Hydra/hydra-architecture-message/src/main/java/com/pinecone/message/ArchResponse.java ================================================ package com.pinecone.message; import org.springframework.http.HttpStatus; import com.pinecone.framework.system.prototype.Pinenut; public abstract class ArchResponse implements Pinenut { private Boolean success; private Integer code = HttpStatus.OK.value(); private String message; private String requestId; private String errorCode; } ================================================ FILE: Hydra/hydra-architecture-message/src/main/java/com/pinecone/message/StringResponse.java ================================================ package com.pinecone.message; public class StringResponse extends ArchResponse { } ================================================ FILE: Hydra/hydra-architecture-storage/pom.xml ================================================ hydra com.pinecone.hydra 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.pinecone.hydra.kernel hydra-architecture-storage 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile com.pinecone.ulf ulfhedinn 1.2.1 compile com.pinecone.hydra.kernel hydra-architecture 2.1.0 compile ================================================ FILE: Hydra/hydra-architecture-storage/src/main/java/com/pinecone/hydra/storage/UFile.java ================================================ package com.pinecone.hydra.storage; import com.pinecone.framework.system.prototype.Pinenut; public interface UFile extends Pinenut { String getName(); Number size(); //String getPath(); } ================================================ FILE: Hydra/hydra-framework-config/pom.xml ================================================ hydra com.pinecone.hydra 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.pinecone.hydra.kernel hydra-framework-config 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile com.pinecone.hydra.kernel hydra-architecture 2.1.0 compile com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.pinecone.ulf ulfhedinn 1.2.1 compile mysql mysql-connector-java 8.0.26 org.jsoup jsoup 1.15.4 ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/AccountConfig.java ================================================ package com.pinecone.hydra.account; import com.pinecone.hydra.system.ko.KernelObjectConfig; public interface AccountConfig extends KernelObjectConfig { } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/AccountManager.java ================================================ package com.pinecone.hydra.account; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.account.entity.ACNodeAllotment; import com.pinecone.hydra.account.entity.Account; import com.pinecone.hydra.account.entity.Credential; import com.pinecone.hydra.account.entity.Domain; import com.pinecone.hydra.account.entity.ElementNode; import com.pinecone.hydra.account.entity.GenericAccount; import com.pinecone.hydra.account.entity.GenericAuthorization; import com.pinecone.hydra.account.entity.GenericDomain; import com.pinecone.hydra.account.entity.GenericPrivilege; import com.pinecone.hydra.account.entity.GenericRole; import com.pinecone.hydra.account.entity.Group; import com.pinecone.hydra.account.entity.Privilege; import com.pinecone.hydra.account.entity.Role; import com.pinecone.hydra.system.ko.kom.KOMInstrument; import com.pinecone.ulf.util.guid.i64.GUID72; import java.util.List; public interface AccountManager extends KOMInstrument { ACNodeAllotment getAllotment(); AccountConfig KernelAccountConfig = new KernelAccountConfig(); Account affirmAccount( String path ); Group affirmGroup( String path ); Domain affirmDomain( String path ); void insertCredential( Credential credential ); void insertRole(Role role); ElementNode queryElement( String path ); void addChildren(GUID parentGuid, GUID childrenGuid); boolean containsChild( GUID parentGuid, String childName ); List queryAccountGuidByName(String userName); boolean queryAccountByGuid(GUID userGuid, String kernelCredential); void insertPrivilege(GenericPrivilege privilege); void removePrivilege(GUID privilegeGuid); Object queryPrivilege(GUID72 guid72); List queryPrivilegeByName(String name); List queryAllPrivileges(); void updateRole(GenericRole role); GUID queryUserCredentialByGuid(GUID userGuid); boolean hasPermission(GUID userGuid, String requiredPrivilegeCode); void insertAuthorization(GenericAuthorization authorization); void removeAuthorizationByGuid(GUID userGuid); void removeAuthorizationByUserGuid(GUID userGuid); List queryAllAccount(); List queryAllDomain(); Group queryGroupByGroupGuid(GUID groupGuid); String queryDomainNameByGuid(GUID domainGuid); List queryAllAuthorization(); List queryAllRoles(); Account queryAccountByName(String userName); void updateAccount(Account account); Account queryAccountByUserGuid(GUID userGuid); Privilege queryPrivilegeByGuid(GUID guid); void updatePrivilege(Privilege privilege); void updateAuthorization(GUID guid72); void removeRole(int id); List queryAuthorizationByUserGuid(GUID userGuid); Domain queryDomainByGuid(GUID domainGuid); void updateDomain(Domain domain); void updateGroup(Group group); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/KernelAccountConfig.java ================================================ package com.pinecone.hydra.account; import com.pinecone.hydra.system.ko.ArchKernelObjectConfig; public class KernelAccountConfig extends ArchKernelObjectConfig implements AccountConfig { } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/UniformAccountManager.java ================================================ package com.pinecone.hydra.account; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.account.entity.ACNodeAllotment; import com.pinecone.hydra.account.entity.Account; import com.pinecone.hydra.account.entity.Credential; import com.pinecone.hydra.account.entity.Domain; import com.pinecone.hydra.account.entity.ElementNode; import com.pinecone.hydra.account.entity.GenericACNodeAllotment; import com.pinecone.hydra.account.entity.GenericAccount; import com.pinecone.hydra.account.entity.GenericAuthorization; import com.pinecone.hydra.account.entity.GenericDomain; import com.pinecone.hydra.account.entity.GenericGroup; import com.pinecone.hydra.account.entity.GenericPrivilege; import com.pinecone.hydra.account.entity.GenericRole; import com.pinecone.hydra.account.entity.Group; import com.pinecone.hydra.account.entity.Privilege; import com.pinecone.hydra.account.entity.Role; import com.pinecone.hydra.account.source.AuthorizationManipulator; import com.pinecone.hydra.account.source.CredentialManipulator; import com.pinecone.hydra.account.source.PrivilegeManipulator; import com.pinecone.hydra.account.source.RoleManipulator; import com.pinecone.hydra.system.identifier.KOPathResolver; import com.pinecone.hydra.system.ko.CascadeInstrument; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.hydra.system.ko.kom.ArchKOMTree; import com.pinecone.hydra.system.ko.kom.MultiFolderPathSelector; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; import com.pinecone.hydra.account.operator.GenericAccountOperatorFactory; import com.pinecone.hydra.account.source.DomainNodeManipulator; import com.pinecone.hydra.account.source.GroupNodeManipulator; import com.pinecone.hydra.account.source.UserMasterManipulator; import com.pinecone.hydra.account.source.UserNodeManipulator; import com.pinecone.ulf.util.guid.i64.GUID72; import com.pinecone.ulf.util.guid.GUIDs; import java.util.ArrayList; import java.util.List; import java.util.Objects; public class UniformAccountManager extends ArchKOMTree implements AccountManager { protected UserMasterManipulator userMasterManipulator; protected GroupNodeManipulator groupNodeManipulator; protected UserNodeManipulator userNodeManipulator; protected DomainNodeManipulator domainNodeManipulator; protected CredentialManipulator credentialManipulator; protected AuthorizationManipulator authorizationManipulator; protected PrivilegeManipulator privilegeManipulator; protected RoleManipulator roleManipulator; protected List folderManipulators; protected List fileManipulators; protected ACNodeAllotment acNodeAllotment; public UniformAccountManager( Processum superiorProcess, KOIMasterManipulator masterManipulator, AccountManager parent, String name, String superiorPathScope, @Nullable GuidAllocator guidAllocator ) { super( superiorProcess, masterManipulator, KernelAccountConfig, parent, name, superiorPathScope, guidAllocator ); this.userMasterManipulator = (UserMasterManipulator) masterManipulator; this.pathResolver = new KOPathResolver( this.kernelObjectConfig ); this.operatorFactory = new GenericAccountOperatorFactory( this, this.userMasterManipulator ); this.groupNodeManipulator = this.userMasterManipulator.getGroupNodeManipulator(); this.userNodeManipulator = this.userMasterManipulator.getUserNodeManipulator(); this.domainNodeManipulator = this.userMasterManipulator.getDomainNodeManipulator(); this.credentialManipulator = this.userMasterManipulator.getCredentialManipulator(); this.authorizationManipulator = this.userMasterManipulator.getAuthorizationManipulator(); this.privilegeManipulator = this.userMasterManipulator.getPrivilegeManipulator(); this.roleManipulator = this.userMasterManipulator.getRoleManipulator(); this.folderManipulators = new ArrayList<>(List.of(this.domainNodeManipulator, this.groupNodeManipulator)); this.fileManipulators = new ArrayList<>(List.of(this.userNodeManipulator)); this.pathSelector = new MultiFolderPathSelector( this.pathResolver, this.imperialTree, this.folderManipulators.toArray( new GUIDNameManipulator[]{} ), this.fileManipulators.toArray( new GUIDNameManipulator[]{} ) ); this.acNodeAllotment = new GenericACNodeAllotment( this ); } public UniformAccountManager( Processum superiorProcess, KOIMasterManipulator masterManipulator, AccountManager parent, String name ) { this( superiorProcess, masterManipulator, parent, name, CascadeInstrument.EmptySuperiorPathScope, null ); } public UniformAccountManager( Processum superiorProcess, KOIMasterManipulator masterManipulator ) { this( superiorProcess, masterManipulator, null, AccountManager.class.getSimpleName() ); } public UniformAccountManager( KOIMappingDriver driver, AccountManager parent, String name ){ this( driver.getSuperiorProcess(), driver.getMasterManipulator(), parent, name ); } public UniformAccountManager( KOIMappingDriver driver ) { this( driver.getSuperiorProcess(), driver.getMasterManipulator() ); } @Override public ACNodeAllotment getAllotment() { return this.acNodeAllotment; } @Override public Object queryEntityHandleByNS(String path, String szBadSep, String szTargetSep) { return null; } @Override public String getPath( GUID guid ) { return this.getNS( guid, this.kernelObjectConfig.getPathNameSeparator() ); } @Override public String getFullName( GUID guid ) { return this.getNS( guid, this.kernelObjectConfig.getFullNameSeparator() ); } @Override public ElementNode queryElement(String path) { GUID guid = this.queryGUIDByPath(path); if( guid != null ) { return (ElementNode) this.get( guid ); } return null; } protected ElementNode affirmTreeNodeByPath(String path, Class cnSup, Class nsSup ) { String[] parts = this.pathResolver.segmentPathParts( path ); String currentPath = ""; GUID parentGuid = GUIDs.Dummy128(); ElementNode node = this.queryElement(path); if ( node != null ){ return node; } ElementNode ret = null; for( int i = 0; i < parts.length; ++i ){ currentPath = currentPath + ( i > 0 ? this.getConfig().getPathNameSeparator() : "" ) + parts[ i ]; node = this.queryElement( currentPath ); if ( node == null){ if ( i == parts.length - 1 && cnSup != null ){ Account account = (Account) this.dynamicFactory.optNewInstance( cnSup, new Object[]{ this } ); account.setName( parts[i] ); GUID guid = this.put( account ); return account; } else { ElementNode element = (ElementNode) this.dynamicFactory.optNewInstance( nsSup, new Object[]{ this } ); element.setName( parts[i] ); GUID guid = this.put( element ); if ( i != 0 ){ this.treeMasterManipulator.getTrieTreeManipulator().addChild( guid, parentGuid ); parentGuid = guid; } else { parentGuid = guid; } ret = element; } } else { parentGuid = node.getGuid(); } } return ret; } @Override public Account affirmAccount(String path) { return (Account) this.affirmTreeNodeByPath( path, GenericAccount.class, GenericDomain.class ); } @Override public Group affirmGroup(String path) { return (Group) this.affirmTreeNodeByPath( path, GenericGroup.class, GenericDomain.class); } @Override public Domain affirmDomain(String path) { return (Domain) this.affirmTreeNodeByPath( path, null, GenericDomain.class ); } @Override public void insertCredential(Credential credential) { this.credentialManipulator.insert( credential ); } @Override public void insertRole(Role role) { this.roleManipulator.insert( role ); } @Override public void addChildren( GUID parentGuid, GUID childrenGuid ) { this.treeMasterManipulator.getTrieTreeManipulator().addChild( childrenGuid, parentGuid ); } @Override public boolean containsChild(GUID parentGuid, String childName) { for( GUIDNameManipulator manipulator : this.fileManipulators ) { if( this.containsChild( manipulator, parentGuid, childName ) ) { return true; } } for( GUIDNameManipulator manipulator : this.folderManipulators ) { if( this.containsChild( manipulator, parentGuid, childName ) ) { return true; } } return false; } @Override public List queryAccountGuidByName(String userName) { return this.userNodeManipulator.getGuidsByName( userName ); } @Override public boolean queryAccountByGuid(GUID userGuid, String kernelCredential) { Account account = this.userNodeManipulator.queryUser( userGuid ); return account.getKernelCredential().equals(kernelCredential); } @Override public void insertPrivilege(GenericPrivilege privilege) { this.privilegeManipulator.insert(privilege); } @Override public void removePrivilege(GUID privilegeGuid) { this.privilegeManipulator.remove( privilegeGuid ); } @Override public Object queryPrivilege(GUID72 guid72) { return null; } @Override public List queryPrivilegeByName(String name) { return null; } @Override public List queryAllPrivileges() { List privileges = new ArrayList<>(); for( GenericPrivilege privilege : this.privilegeManipulator.queryAllPrivileges() ) { privileges.add( privilege ); } return privileges; } @Override public void updateRole(GenericRole role) { this.roleManipulator.updateRole(role); } @Override public GUID queryUserCredentialByGuid(GUID userGuid) { return this.credentialManipulator.queryCredential(userGuid).getGuid(); } @Override public boolean hasPermission(GUID userGuid, String requiredPrivilegeCode) { List authorizations = this.authorizationManipulator.queryAuthorizationByUserGuid(userGuid); for( GenericAuthorization authorization : authorizations ) { if( authorization.getPrivilegeToken().contains( requiredPrivilegeCode ) ) { return true; } } return false; } @Override public void insertAuthorization(GenericAuthorization authorization) { this.authorizationManipulator.insert( authorization ); } @Override public void removeAuthorizationByGuid(GUID userGuid) { this.authorizationManipulator.remove( userGuid ); } @Override public void removeAuthorizationByUserGuid(GUID userGuid) { this.authorizationManipulator.removeAuthorizationByUserGuid( userGuid ); } @Override public List queryAllAccount() { return this.userNodeManipulator.queryAllAccount(); } @Override public List queryAllDomain() { return this.domainNodeManipulator.queryAllDomain(); } @Override public Group queryGroupByGroupGuid(GUID groupGuid) { return this.groupNodeManipulator.queryGroup(groupGuid); } @Override public String queryDomainNameByGuid(GUID domainGuid) { return this.domainNodeManipulator.queryDomainNameByGuid(domainGuid); } @Override public List queryAllAuthorization() { return this.authorizationManipulator.queryAllAuthorization(); } @Override public List queryAllRoles() { return this.roleManipulator.queryAllRoles(); } @Override public Account queryAccountByName(String userName) { return this.userNodeManipulator.queryAccountByName( userName ); } @Override public void updateAccount(Account account) { this.userNodeManipulator.update( account ); } @Override public Account queryAccountByUserGuid(GUID userGuid) { return this.userNodeManipulator.queryAccountByUserGuid( userGuid ); } @Override public Privilege queryPrivilegeByGuid(GUID guid) { return this.privilegeManipulator.queryPrivilege( guid ); } @Override public void updatePrivilege(Privilege privilege) { this.privilegeManipulator.update(privilege); } @Override public void updateAuthorization(GUID guid72) { this.authorizationManipulator.update( guid72 ); } @Override public void removeRole(int id) { //this.roleManipulator.removeRole( id ); } @Override public List queryAuthorizationByUserGuid(GUID userGuid) { return this.authorizationManipulator.queryAuthorizationByUserGuid( userGuid ); } @Override public Domain queryDomainByGuid(GUID domainGuid) { return this.domainNodeManipulator.queryDomain( domainGuid ); } @Override public void updateDomain(Domain domain) { this.domainNodeManipulator.update( domain ); } @Override public void updateGroup(Group group) { this.groupNodeManipulator.update( group ); } protected boolean containsChild( GUIDNameManipulator manipulator, GUID parentGuid, String childName ) { List guids = manipulator.getGuidsByName( childName ); for( GUID guid : guids ) { List ps = this.imperialTree.fetchParentGuids( guid ); if( ps.contains( parentGuid ) ){ return true; } } return false; } protected String getNS(GUID guid, String szSeparator ){ String path = this.imperialTree.getCachePath(guid); if ( path != null ) { return path; } ImperialTreeNode node = this.imperialTree.getNode(guid); String assemblePath = this.getNodeName(node); while ( !node.getParentGUIDs().isEmpty() && this.allNonNull( node.getParentGUIDs() ) ){ List parentGuids = node.getParentGUIDs(); for( int i = 0; i < parentGuids.size(); ++i ){ if ( parentGuids.get(i) != null ){ node = this.imperialTree.getNode( parentGuids.get(i) ); break; } } String nodeName = this.getNodeName(node); assemblePath = nodeName + szSeparator + assemblePath; } this.imperialTree.insertCachePath( guid, assemblePath ); return assemblePath; } private String getNodeName(ImperialTreeNode node ){ UOI type = node.getType(); TreeNode newInstance = (TreeNode)type.newInstance(); TreeNodeOperator operator = this.operatorFactory.getOperator( newInstance.getMetaType() ); TreeNode treeNode = operator.get(node.getGuid()); return treeNode.getName(); } private boolean allNonNull( List list ) { return list.stream().noneMatch( Objects::isNull ); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/ACNodeAllotment.java ================================================ package com.pinecone.hydra.account.entity; import com.pinecone.framework.system.prototype.Pinenut; public interface ACNodeAllotment extends Pinenut { Domain newDomain(); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/Account.java ================================================ package com.pinecone.hydra.account.entity; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public interface Account extends FileElement { String getNickName(); void setNickName( String nickName ); String getKernelCredential(); void setKernelCredential( String kernelCredential ); GUID getCredentialGuid(); void setCredentialGuid( GUID credentialGuid ); String getKernelGroupType(); void setKernelGroupType( String kernelGroupType ); LocalDateTime getCreateTime(); void setCreateTime( LocalDateTime createTime ); LocalDateTime getUpdateTime(); void setUpdateTime( LocalDateTime updateTime ); String getRole(); void setRole( String role); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/ArchElementNode.java ================================================ package com.pinecone.hydra.account.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.account.AccountManager; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.ulf.util.guid.GUIDs; public class ArchElementNode implements ElementNode { protected long enumId; protected String name; protected GUID guid; protected AccountManager accountManager; protected GuidAllocator guidAllocator = GUIDs.newGuidAllocator(); public ArchElementNode(){ this.guid = guidAllocator.nextGUID(); } public ArchElementNode(AccountManager accountManager){ this.guid = this.guidAllocator.nextGUID(); this.accountManager = accountManager; } @Override public long getEnumId() { return this.enumId; } @Override public void setEnumId(long enumId) { this.enumId = enumId; } @Override public String getName() { return this.name; } @Override public GUID getGuid() { return this.guid; } @Override public void setGuid(GUID guid) { this.guid = guid; } @Override public void setName(String name) { this.name = name; } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/ArchFolderElementNode.java ================================================ package com.pinecone.hydra.account.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.account.AccountManager; import java.util.ArrayList; import java.util.Collection; import java.util.List; public class ArchFolderElementNode extends ArchElementNode implements FolderElement{ public ArchFolderElementNode(){ super(); } public ArchFolderElementNode(AccountManager accountManager){ super(accountManager); } @Override public List fetchChildren() { ArrayList elementNodes = new ArrayList<>(); Collection guids = this.fetchChildrenGuids(); for( GUID elementGuid : guids ){ ElementNode node = (ElementNode)this.accountManager.get(elementGuid); elementNodes.add( node ); } return elementNodes; } @Override public Collection fetchChildrenGuids() { return this.accountManager.fetchChildrenGuids(this.getGuid()); } @Override public void addChild(ElementNode child) { GUID childId; boolean bContainsChild = this.containsChild( child.getName() ); if( bContainsChild ) { return; } else { childId = this.accountManager.put( child ); } this.accountManager.addChildren( this.guid, childId ); } @Override public boolean containsChild(String childName) { return this.accountManager.containsChild( this.guid, childName ); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/Authorization.java ================================================ package com.pinecone.hydra.account.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public interface Authorization extends Pinenut { int getEnumId(); GUID getGuid(); void setGuid( GUID guid ); String getUserName(); void setUserName( String userName ); GUID getUserGuid(); void setUserGuid( GUID userGuid ); GUID getCredentialGuid(); void setCredentialGuid( GUID credentialGuid ); String getPrivilegeToken(); void setPrivilegeToken(String privilegeToken); GUID getPrivilegeGuid(); void setPrivilegeGuid( GUID privilegeGuid ); LocalDateTime getCreateTime(); void setCreateTime( LocalDateTime createTime ); LocalDateTime getUpdateTime(); void setUpdateTime( LocalDateTime updateTime ); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/Credential.java ================================================ package com.pinecone.hydra.account.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public interface Credential extends Pinenut { int getEnumId(); GUID getGuid(); void setGuid( GUID guid ); String getName(); void setName( String name ); String getCredential(); void setCredential( String credential ); LocalDateTime getCreateTime(); void setCreateTime( LocalDateTime createTime ); LocalDateTime getUpdateTime(); void setUpdateTime( LocalDateTime updateTime ); String getType(); void setType( String type ); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/Domain.java ================================================ package com.pinecone.hydra.account.entity; import com.pinecone.hydra.account.source.DomainNodeManipulator; public interface Domain extends FolderElement { String getDomainName(); void setDomainName( String domainName ); void save(); void delete(); void setDomainNodeManipulator(DomainNodeManipulator domainNodeManipulator); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/ElementNode.java ================================================ package com.pinecone.hydra.account.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.system.ko.meta.ElementObject; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public interface ElementNode extends TreeNode, ElementObject { long getEnumId(); void setEnumId( long enumId ); String getName(); void setName( String name ); GUID getGuid(); void setGuid( GUID guid ); @Override default String objectCategoryName() { return "Account"; } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/FileElement.java ================================================ package com.pinecone.hydra.account.entity; public interface FileElement extends ElementNode{ } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/FolderElement.java ================================================ package com.pinecone.hydra.account.entity; import com.pinecone.framework.util.id.GUID; import java.util.Collection; public interface FolderElement extends ElementNode{ Collection fetchChildren(); Collection fetchChildrenGuids(); void addChild( ElementNode child ); boolean containsChild( String childName ); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/GenericACNodeAllotment.java ================================================ package com.pinecone.hydra.account.entity; import com.pinecone.hydra.account.AccountManager; import com.pinecone.hydra.account.source.UserMasterManipulator; public class GenericACNodeAllotment implements ACNodeAllotment{ protected AccountManager accountManager; protected UserMasterManipulator userMasterManipulator; public GenericACNodeAllotment(AccountManager accountManager){ this.accountManager = accountManager; this.userMasterManipulator = (UserMasterManipulator) accountManager.getMasterTrieTree(); } @Override public Domain newDomain() { return new GenericDomain( accountManager, userMasterManipulator.getDomainNodeManipulator() ); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/GenericAccount.java ================================================ package com.pinecone.hydra.account.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.account.AccountManager; import java.time.LocalDateTime; public class GenericAccount extends ArchElementNode implements Account { protected long enumId; protected String name; protected GUID guid; protected String nickName; protected String kernelCredential; protected GUID credentialGuid; protected String kernelGroupType; protected LocalDateTime createTime; protected LocalDateTime updateTime; protected String role; public GenericAccount(){ super(); } public GenericAccount(AccountManager accountManager){ super(accountManager); } @Override public String getNickName() { return this.nickName; } @Override public void setNickName(String nickName) { this.nickName = nickName; } @Override public String getKernelCredential() { return this.kernelCredential; } @Override public void setKernelCredential(String kernelCredential) { this.kernelCredential = kernelCredential; } @Override public GUID getCredentialGuid() { return this.credentialGuid; } @Override public void setCredentialGuid(GUID credentialGuid) { this.credentialGuid = credentialGuid; } @Override public String getKernelGroupType() { return this.kernelGroupType; } @Override public void setKernelGroupType(String kernelGroupType) { this.kernelGroupType = kernelGroupType; } @Override public LocalDateTime getCreateTime() { return this.createTime; } @Override public void setCreateTime(LocalDateTime createTime) { this.createTime = createTime; } @Override public LocalDateTime getUpdateTime() { return this.updateTime; } @Override public void setUpdateTime(LocalDateTime updateTime) { this.updateTime = updateTime; } @Override public String getRole() { return this.role; } @Override public void setRole(String role) { this.role = role; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/GenericAuthorization.java ================================================ package com.pinecone.hydra.account.entity; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public class GenericAuthorization implements Authorization{ private int enumId; private GUID guid; private String userName; private GUID userGuid; private GUID credentialGuid; private String privilegeToken; private GUID privilegeGuid; private LocalDateTime createTime; private LocalDateTime updateTime; public GenericAuthorization(){} public GenericAuthorization(GUID userGuid, String userName, GUID credential, String privilegeToken, LocalDateTime creationTime, LocalDateTime expirationTime) { this.userGuid = userGuid; this.userName = userName; this.credentialGuid =credential; this.privilegeToken = privilegeToken; this.createTime = creationTime; this.updateTime = expirationTime; } @Override public int getEnumId() { return this.enumId; } @Override public GUID getGuid() { return this.guid; } @Override public void setGuid(GUID guid) { this.guid = guid; } @Override public String getUserName() { return this.userName; } @Override public void setUserName(String userName) { this.userName = userName; } @Override public GUID getUserGuid() { return this.userGuid; } @Override public void setUserGuid(GUID userGuid) { this.userGuid = userGuid; } @Override public GUID getCredentialGuid() { return this.credentialGuid; } @Override public void setCredentialGuid(GUID credentialGuid) { this.credentialGuid = credentialGuid; } @Override public String getPrivilegeToken() { return this.privilegeToken; } @Override public void setPrivilegeToken(String privilegeToken) { this.privilegeToken = privilegeToken; } @Override public GUID getPrivilegeGuid() { return this.privilegeGuid; } @Override public void setPrivilegeGuid(GUID privilegeGuid) { this.privilegeGuid = privilegeGuid; } @Override public LocalDateTime getCreateTime() { return this.createTime; } @Override public void setCreateTime(LocalDateTime createTime) { this.createTime = createTime; } @Override public LocalDateTime getUpdateTime() { return this.updateTime; } @Override public void setUpdateTime(LocalDateTime updateTime) { this.updateTime = updateTime; } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/GenericCredential.java ================================================ package com.pinecone.hydra.account.entity; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public class GenericCredential implements Credential { private int enumId; private GUID guid; private String name; private String credential; private LocalDateTime createTime; private LocalDateTime updateTime; private String type; public GenericCredential(){} public GenericCredential( GUID guid, String name, String credential, LocalDateTime createTime, LocalDateTime updateTime, String type) { this.guid = guid; this.name = name; this.credential = credential; this.createTime = createTime; this.updateTime = updateTime; this.type = type; } @Override public int getEnumId() { return this.enumId; } @Override public GUID getGuid() { return this.guid; } @Override public void setGuid(GUID guid) { this.guid = guid; } @Override public String getName() { return this.name; } @Override public void setName(String name) { this.name = name; } @Override public String getCredential() { return this.credential; } @Override public void setCredential(String credential) { this.credential = credential; } @Override public LocalDateTime getCreateTime() { return this.createTime; } @Override public void setCreateTime(LocalDateTime createTime) { this.createTime = createTime; } @Override public LocalDateTime getUpdateTime() { return this.updateTime; } @Override public void setUpdateTime(LocalDateTime updateTime) { this.updateTime = updateTime; } @Override public String getType() { return this.type; } @Override public void setType(String type) { this.type = type; } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/GenericDomain.java ================================================ package com.pinecone.hydra.account.entity; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.account.AccountManager; import com.pinecone.hydra.account.source.DomainNodeManipulator; public class GenericDomain extends ArchFolderElementNode implements Domain{ protected String domainName; protected DomainNodeManipulator domainNodeManipulator; public GenericDomain(){ super(); } public GenericDomain(AccountManager accountManager, DomainNodeManipulator domainNodeManipulator){ super(accountManager); this.accountManager = accountManager; this.domainNodeManipulator = domainNodeManipulator; } @Override public String getDomainName() { return this.domainName; } @Override public void setDomainName(String domainName) { this.domainName = domainName; } @Override public void save() { this.accountManager.put( this ); } @Override public void delete() { this.accountManager.remove( this.guid ); } @Override public void setDomainNodeManipulator(DomainNodeManipulator domainNodeManipulator) { this.domainNodeManipulator = domainNodeManipulator; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/GenericGroup.java ================================================ package com.pinecone.hydra.account.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.account.AccountManager; public class GenericGroup extends ArchFolderElementNode implements Group{ protected GUID defaultPrivilegePolicyGuid; public GenericGroup(){ super(); } public GenericGroup(AccountManager accountManager){ super(accountManager); } @Override public GUID getDefaultPrivilegePolicyGuid() { return this.defaultPrivilegePolicyGuid; } @Override public void setDefaultPrivilegePolicyGuid(GUID defaultPrivilegePolicyGuid) { this.defaultPrivilegePolicyGuid = defaultPrivilegePolicyGuid; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/GenericPrivilege.java ================================================ package com.pinecone.hydra.account.entity; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public class GenericPrivilege implements Privilege { private int id; private GUID guid; private String token; private String name; private String privilegeCode; private LocalDateTime createTime; private LocalDateTime updateTime; private String type; private GUID parentPrivGuid; @Override public int getId() { return id; } @Override public GUID getGuid() { return guid; } @Override public void setGuid(GUID guid) { this.guid = guid; } @Override public String getToken() { return token; } @Override public void setToken(String token) { this.token = token; } @Override public String getName() { return name; } @Override public void setName(String name) { this.name = name; } @Override public String getPrivilegeCode() { return privilegeCode; } @Override public void setPrivilegeCode(String privilegeCode) { this.privilegeCode = privilegeCode; } @Override public LocalDateTime getCreateTime() { return createTime; } @Override public void setCreateTime(LocalDateTime createTime) { this.createTime = createTime; } @Override public LocalDateTime getUpdateTime() { return updateTime; } @Override public void setUpdateTime(LocalDateTime updateTime) { this.updateTime = updateTime; } @Override public String getType() { return type; } @Override public void setType(String type) { this.type = type; } @Override public GUID getParentPrivGuid() { return parentPrivGuid; } @Override public void setParentPrivGuid(GUID parentPrivGuid) { this.parentPrivGuid = parentPrivGuid; } // 无参构造方法 public GenericPrivilege() { } // 全参构造方法 public GenericPrivilege( GUID guid, String token, String name, String privilegeCode, LocalDateTime createTime, LocalDateTime updateTime, String type) { this.guid = guid; this.token = token; this.name = name; this.privilegeCode = privilegeCode; this.createTime = createTime; this.updateTime = updateTime; this.type = type; } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/GenericRole.java ================================================ package com.pinecone.hydra.account.entity; import java.time.LocalDateTime; public class GenericRole implements Role{ private int id; private String name; private String privilegeGuids; private LocalDateTime createTime; private LocalDateTime updateTime; private String type; // 无参构造方法 public GenericRole() { super(); } // 全参构造方法 public GenericRole(String name, String privilegeGuids, LocalDateTime createTime, LocalDateTime updateTime, String type) { this.name = name; this.privilegeGuids = privilegeGuids; this.createTime = createTime; this.updateTime = updateTime; this.type = type; } @Override public int getId() { return id; } @Override public String getName() { return name; } @Override public void setName(String name) { this.name = name; } @Override public String getPrivilegeGuids() { return privilegeGuids; } @Override public void setPrivilegeGuids(String privilegeGuids) { this.privilegeGuids = privilegeGuids; } @Override public LocalDateTime getCreateTime() { return createTime; } @Override public void setCreateTime(LocalDateTime createTime) { this.createTime = createTime; } @Override public LocalDateTime getUpdateTime() { return updateTime; } @Override public void setUpdateTime(LocalDateTime updateTime) { this.updateTime = updateTime; } @Override public String getType() { return type; } @Override public void setType(String type) { this.type = type; } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/Group.java ================================================ package com.pinecone.hydra.account.entity; import com.pinecone.framework.util.id.GUID; public interface Group extends FolderElement { GUID getDefaultPrivilegePolicyGuid(); void setDefaultPrivilegePolicyGuid( GUID defaultPrivilegePolicyGuid ); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/Privilege.java ================================================ package com.pinecone.hydra.account.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public interface Privilege extends Pinenut { int getId(); String getPrivilegeCode(); String getToken(); GUID getParentPrivGuid(); GUID getGuid(); void setGuid(GUID guid); void setToken(String token); String getName(); void setName(String name); void setPrivilegeCode(String privilegeCode); LocalDateTime getCreateTime(); void setCreateTime(LocalDateTime createTime); LocalDateTime getUpdateTime(); void setUpdateTime(LocalDateTime updateTime); String getType(); void setType(String type); void setParentPrivGuid(GUID parentPrivGuid); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/Role.java ================================================ package com.pinecone.hydra.account.entity; import com.pinecone.framework.system.prototype.Pinenut; import java.time.LocalDateTime; public interface Role extends Pinenut { int getId(); String getName(); void setName(String name); String getPrivilegeGuids(); void setPrivilegeGuids(String privilegeGuids); LocalDateTime getCreateTime(); void setCreateTime(LocalDateTime createTime); LocalDateTime getUpdateTime(); void setUpdateTime(LocalDateTime updateTime); String getType(); void setType(String type); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/operator/AccountServiceOperator.java ================================================ package com.pinecone.hydra.account.operator; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; public interface AccountServiceOperator extends TreeNodeOperator { } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/operator/AccountServiceOperatorFactory.java ================================================ package com.pinecone.hydra.account.operator; import com.pinecone.hydra.account.entity.Account; import com.pinecone.hydra.account.entity.Domain; import com.pinecone.hydra.account.entity.Group; import com.pinecone.hydra.unit.imperium.operator.OperatorFactory; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; import com.pinecone.hydra.account.AccountManager; import com.pinecone.hydra.account.source.UserMasterManipulator; public interface AccountServiceOperatorFactory extends OperatorFactory { String DefaultUser = Account.class.getSimpleName(); String DefaultGroup = Group.class.getSimpleName(); String DefaultDomain = Domain.class.getSimpleName(); void register( String typeName, TreeNodeOperator functionalNodeOperation ); void registerMetaType( Class clazz, String metaType ); void registerMetaType( String classFullName, String metaType ); String getMetaType( String classFullName ); AccountServiceOperator getOperator(String typeName ); AccountManager getUserManager(); UserMasterManipulator getMasterManipulator(); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/operator/ArchAccountServiceOperator.java ================================================ package com.pinecone.hydra.account.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.system.ko.UOIUtils; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.ImperialTree; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.account.AccountManager; import com.pinecone.hydra.account.source.UserMasterManipulator; public abstract class ArchAccountServiceOperator implements AccountServiceOperator { protected AccountManager accountManager; protected AccountServiceOperatorFactory factory; protected ImperialTree imperialTree; protected UserMasterManipulator userMasterManipulator; public ArchAccountServiceOperator(UserMasterManipulator masterManipulator, AccountManager accountManager){ this.accountManager = accountManager; this.userMasterManipulator = masterManipulator; this.imperialTree = this.accountManager.getMasterTrieTree(); } protected ImperialTreeNode affirmPreinsertionInitialize(TreeNode node ){ GUID guid = node.getGuid(); ImperialTreeNode imperialTreeNode = new GUIDImperialTrieNode(); imperialTreeNode.setGuid( guid ); imperialTreeNode.setType( UOIUtils.createLocalJavaClass( node.getClass().getName() ) ); return imperialTreeNode; } public AccountServiceOperatorFactory getUserOperatorFactory(){ return this.factory; } protected String getUserMetaType( TreeNode treeNode ){ return treeNode.className().replace("Generic",""); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/operator/GenericAccountOperator.java ================================================ package com.pinecone.hydra.account.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.account.AccountManager; import com.pinecone.hydra.account.entity.Account; import com.pinecone.hydra.account.source.UserMasterManipulator; import com.pinecone.hydra.account.source.UserNodeManipulator; import java.time.LocalDateTime; import java.util.List; public class GenericAccountOperator extends ArchAccountServiceOperator implements AccountServiceOperator { protected UserNodeManipulator userNodeManipulator; public GenericAccountOperator(AccountServiceOperatorFactory factory ){ this( factory.getMasterManipulator(), factory.getUserManager() ); this.userNodeManipulator = this.userMasterManipulator.getUserNodeManipulator(); } public GenericAccountOperator(UserMasterManipulator masterManipulator, AccountManager accountManager) { super(masterManipulator, accountManager); this.userNodeManipulator = this.userMasterManipulator.getUserNodeManipulator(); } @Override public GUID insert(TreeNode treeNode) { Account account = (Account) treeNode; account.setCreateTime(LocalDateTime.now()); account.setUpdateTime(LocalDateTime.now()); ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize(account); GUID guid = account.getGuid(); this.imperialTree.insert(imperialTreeNode); this.userNodeManipulator.insert(account); return guid; } @Override public void purge(GUID guid) { List children = this.imperialTree.getChildren(guid); for( GUIDImperialTrieNode node : children ){ TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class[]{this.getClass()}, this ); AccountServiceOperator operator = this.factory.getOperator(this.getUserMetaType(newInstance)); operator.purge( node.getGuid() ); } this.removeNode( guid ); } @Override public TreeNode get(GUID guid) { return this.userNodeManipulator.queryUser(guid); } @Override public TreeNode get(GUID guid, int depth) { return null; } @Override public TreeNode getAsRootDepth(GUID guid) { return null; } @Override public void update(TreeNode treeNode) { } @Override public void updateName(GUID guid, String name) { } private void removeNode( GUID guid ){ this.imperialTree.purge( guid ); this.imperialTree.removeCachePath( guid ); this.userNodeManipulator.remove( guid ); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/operator/GenericAccountOperatorFactory.java ================================================ package com.pinecone.hydra.account.operator; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; import com.pinecone.hydra.account.AccountManager; import com.pinecone.hydra.account.source.UserMasterManipulator; import java.util.HashMap; import java.util.Map; import java.util.TreeMap; public class GenericAccountOperatorFactory implements AccountServiceOperatorFactory { protected UserMasterManipulator userMasterManipulator; protected AccountManager accountManager; protected Map registerer = new HashMap<>(); protected Map metaTypeMap = new TreeMap<>(); public GenericAccountOperatorFactory(AccountManager accountManager, UserMasterManipulator userMasterManipulator ){ this.accountManager = accountManager; this.userMasterManipulator = userMasterManipulator; this.registerer.put( DefaultUser, new GenericAccountOperator( this ) ); this.registerer.put( DefaultGroup, new GenericGroupOperator( this ) ); this.registerer.put( DefaultDomain, new GenericDomainOperator( this ) ); } @Override public void register(String typeName, TreeNodeOperator functionalNodeOperation) { this.registerer.put( typeName, functionalNodeOperation ); } @Override public void registerMetaType(Class clazz, String metaType) { this.registerMetaType( clazz.getName(), metaType ); } @Override public void registerMetaType(String classFullName, String metaType) { this.metaTypeMap.put( classFullName, metaType ); } @Override public String getMetaType(String classFullName) { return this.metaTypeMap.get( classFullName ); } @Override public AccountServiceOperator getOperator(String typeName) { return (AccountServiceOperator) this.registerer.get( typeName ); } @Override public AccountManager getUserManager() { return this.accountManager; } @Override public UserMasterManipulator getMasterManipulator() { return this.userMasterManipulator; } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/operator/GenericDomainOperator.java ================================================ package com.pinecone.hydra.account.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.account.AccountManager; import com.pinecone.hydra.account.entity.Domain; import com.pinecone.hydra.account.source.DomainNodeManipulator; import com.pinecone.hydra.account.source.UserMasterManipulator; import java.util.List; public class GenericDomainOperator extends ArchAccountServiceOperator implements AccountServiceOperator { protected DomainNodeManipulator domainNodeManipulator; public GenericDomainOperator( AccountServiceOperatorFactory factory ){ this( factory.getMasterManipulator(), factory.getUserManager() ); this.domainNodeManipulator = this.userMasterManipulator.getDomainNodeManipulator(); } public GenericDomainOperator(UserMasterManipulator masterManipulator, AccountManager accountManager) { super(masterManipulator, accountManager); this.domainNodeManipulator = this.userMasterManipulator.getDomainNodeManipulator(); } @Override public GUID insert(TreeNode treeNode) { Domain domain = (Domain) treeNode; ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize(domain); GUID guid = domain.getGuid(); this.imperialTree.insert(imperialTreeNode); this.domainNodeManipulator.insert( domain ); return guid; } @Override public void purge(GUID guid) { List children = this.imperialTree.getChildren(guid); for( GUIDImperialTrieNode node : children ){ TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class[]{this.getClass()}, this ); AccountServiceOperator operator = this.factory.getOperator(this.getUserMetaType(newInstance)); operator.purge( node.getGuid() ); } this.removeNode( guid ); } @Override public TreeNode get(GUID guid) { return this.domainNodeManipulator.queryDomain( guid ); } @Override public TreeNode get(GUID guid, int depth) { return null; } @Override public TreeNode getAsRootDepth(GUID guid) { return null; } @Override public void update(TreeNode treeNode) { } @Override public void updateName(GUID guid, String name) { } private void removeNode( GUID guid ){ this.imperialTree.purge( guid ); this.imperialTree.removeCachePath( guid ); this.domainNodeManipulator.remove( guid ); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/operator/GenericGroupOperator.java ================================================ package com.pinecone.hydra.account.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.account.AccountManager; import com.pinecone.hydra.account.entity.Group; import com.pinecone.hydra.account.source.GroupNodeManipulator; import com.pinecone.hydra.account.source.UserMasterManipulator; import java.util.List; public class GenericGroupOperator extends ArchAccountServiceOperator implements AccountServiceOperator { protected GroupNodeManipulator groupNodeManipulator; public GenericGroupOperator( AccountServiceOperatorFactory factory ){ this( factory.getMasterManipulator(), factory.getUserManager() ); this.groupNodeManipulator = this.userMasterManipulator.getGroupNodeManipulator(); } public GenericGroupOperator(UserMasterManipulator masterManipulator, AccountManager accountManager) { super(masterManipulator, accountManager); this.groupNodeManipulator = this.userMasterManipulator.getGroupNodeManipulator(); } @Override public GUID insert(TreeNode treeNode) { Group group = (Group) treeNode; ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize(group); GUID guid = group.getGuid(); this.imperialTree.insert(imperialTreeNode); this.groupNodeManipulator.insert( group ); return guid; } @Override public void purge(GUID guid) { List children = this.imperialTree.getChildren(guid); for( GUIDImperialTrieNode node : children ){ TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class[]{this.getClass()}, this ); AccountServiceOperator operator = this.factory.getOperator(this.getUserMetaType(newInstance)); operator.purge( node.getGuid() ); } this.removeNode( guid ); } @Override public TreeNode get(GUID guid) { return this.groupNodeManipulator.queryGroup( guid ); } @Override public TreeNode get(GUID guid, int depth) { return null; } @Override public TreeNode getAsRootDepth(GUID guid) { return null; } @Override public void update(TreeNode treeNode) { } @Override public void updateName(GUID guid, String name) { } private void removeNode( GUID guid ){ this.imperialTree.purge( guid ); this.imperialTree.removeCachePath( guid ); this.groupNodeManipulator.remove( guid ); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/source/AuthorizationManipulator.java ================================================ package com.pinecone.hydra.account.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.account.entity.Authorization; import com.pinecone.hydra.account.entity.GenericAuthorization; import java.util.List; public interface AuthorizationManipulator extends Pinenut { void insert(Authorization authorization); void remove(GUID authorizationGuid); void update(GUID authorizationGuid); Authorization queryCredential(GUID authorizationGuid ); List queryAuthorizationByUserGuid(GUID userGuid); void removeAuthorizationByUserGuid(GUID userGuid); List queryAllAuthorization(); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/source/CredentialManipulator.java ================================================ package com.pinecone.hydra.account.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.account.entity.Credential; public interface CredentialManipulator extends Pinenut { void insert(Credential credential); void remove(GUID credentialGuid); Credential queryCredential(GUID credentialGuid ); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/source/DomainNodeManipulator.java ================================================ package com.pinecone.hydra.account.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.account.entity.GenericDomain; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.account.entity.Domain; import java.util.List; public interface DomainNodeManipulator extends GUIDNameManipulator { void insert(Domain domain); void remove(GUID domainGuid); Domain queryDomain(GUID domainGuid ); List queryAllDomain(); String queryDomainNameByGuid(GUID domainGuid); void update(Domain domain); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/source/GroupNodeManipulator.java ================================================ package com.pinecone.hydra.account.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.account.entity.Group; public interface GroupNodeManipulator extends GUIDNameManipulator { void insert(Group group); void remove(GUID groupGuid); Group queryGroup(GUID groupGuid ); void update(Group group); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/source/PrivilegeManipulator.java ================================================ package com.pinecone.hydra.account.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.account.entity.GenericPrivilege; import com.pinecone.hydra.account.entity.Privilege; import java.util.List; public interface PrivilegeManipulator extends Privilege { void insert( Privilege privilege); void remove( GUID privilegeGuid); Privilege queryPrivilege( GUID privilegeGuid); List queryAllPrivileges(); void update(Privilege privilege); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/source/RoleManipulator.java ================================================ package com.pinecone.hydra.account.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.account.entity.GenericRole; import com.pinecone.hydra.account.entity.Role; import java.util.List; public interface RoleManipulator extends Role { void insert(Role role); void remove(GUID roleGuid); Role queryRole(GUID roleGuid ); void updateRole(GenericRole role); GenericRole queryRolesByUserGuid(String userGuid); List queryAllRoles(); void removeRoleById(int id); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/source/UserMasterManipulator.java ================================================ package com.pinecone.hydra.account.source; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; public interface UserMasterManipulator extends KOIMasterManipulator { DomainNodeManipulator getDomainNodeManipulator(); GroupNodeManipulator getGroupNodeManipulator(); UserNodeManipulator getUserNodeManipulator(); CredentialManipulator getCredentialManipulator(); AuthorizationManipulator getAuthorizationManipulator(); PrivilegeManipulator getPrivilegeManipulator(); RoleManipulator getRoleManipulator(); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/source/UserNodeManipulator.java ================================================ package com.pinecone.hydra.account.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.account.entity.GenericAccount; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.account.entity.Account; import java.util.List; public interface UserNodeManipulator extends GUIDNameManipulator { void insert(Account account); void remove(GUID userGuid); Account queryUser(GUID userGuid ); List queryAllAccount(); GenericAccount queryAccountByName(String userName); void update(Account account); GenericAccount queryAccountByUserGuid(GUID userGuid); List queryAccountsByGroup(GUID groupGuid); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/config/ConfigSource.java ================================================ package com.pinecone.hydra.config; import com.pinecone.framework.system.RuntimeSystem; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.config.PatriarchalConfig; import java.io.IOException; import java.net.URI; public interface ConfigSource extends Pinenut { PatriarchalConfig getSearchScopeConfig(); RuntimeSystem getSystem(); PatriarchalConfig loadConfig( URI path ) throws IOException; PatriarchalConfig loadConfig( Object dyPath ) throws IOException; PatriarchalConfig loadConfigBySegmentName ( String szSegName ) throws IOException; } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/config/LocalConfigSource.java ================================================ package com.pinecone.hydra.config; import com.pinecone.hydra.servgram.Servgram; import com.pinecone.framework.system.ErrorStrings; import com.pinecone.framework.system.RuntimeSystem; import com.pinecone.framework.util.config.PatriarchalConfig; import java.io.IOException; import java.net.URI; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; public class LocalConfigSource implements ConfigSource { protected PatriarchalConfig mSearchScopeConf; protected List mPathScopes; protected List mFileExtends; protected Servgram mParentGram; protected RuntimeSystem mSystem; @SuppressWarnings("unchecked") public LocalConfigSource( Servgram gram, PatriarchalConfig setupScope, PatriarchalConfig searchScope ) { this.mParentGram = gram; this.mSystem = this.mParentGram.parentSystem(); this.mSearchScopeConf = searchScope; Object t = setupScope.get( "PathScopes" ); if( t instanceof List ) { this.mPathScopes = ( List ) t; } else { this.mPathScopes = new ArrayList<>(); } t = setupScope.get( "FileExtends" ); if( t instanceof List ) { this.mFileExtends = ( List ) t; } else { this.mFileExtends = new ArrayList<>(); } } @Override public RuntimeSystem getSystem() { return this.mSystem; } @Override public PatriarchalConfig getSearchScopeConfig() { return this.mSearchScopeConf; } @Override public PatriarchalConfig loadConfig( URI path ) throws IOException { String szPath = path.getPath(); Path lp = Path.of( szPath ); if( lp.isAbsolute() ) { return this.getSearchScopeConfig().getChildFromPath( lp ); } return this.loadConfig( lp ); } @Override public PatriarchalConfig loadConfig( Object dyPath ) throws IOException { if( dyPath instanceof Path ) { return this.loadConfig( (Path) dyPath ); } else if( dyPath instanceof URI ) { return this.loadConfig( (URI) dyPath ); } else if( dyPath instanceof String ) { return this.loadConfig( Path.of( (String) dyPath ) ); } return this.loadConfig( Path.of( dyPath.toString() ) ); } public PatriarchalConfig loadConfig( Path path ) throws IOException { try{ return this.getSearchScopeConfig().getChildFromPath( path ); } catch ( IOException e ) { IOException ie = null; for( String sp : this.mPathScopes ) { try{ return this.getSearchScopeConfig().getChildFromPath( Path.of( sp ).resolve( path ) ); } catch ( IOException e1 ) { ie = e1; } } if( ie != null ) { throw new IOException( ErrorStrings.E_IRREDEEMABLE_NO_PATH_CONTEXT_MATCHED + "What-> '" + path + "'", ie ); } } throw new IOException( ErrorStrings.E_IRREDEEMABLE_NO_PATH_CONTEXT_MATCHED + "What-> '" + path + "'" ); } @Override public PatriarchalConfig loadConfigBySegmentName ( String szSegName ) throws IOException { IOException ie = null; for( String sfe : this.mFileExtends ) { try{ return this.loadConfig( Path.of( szSegName + "." + sfe ) ); } catch ( IOException e1 ) { ie = e1; } } throw new IOException( ErrorStrings.E_IRREDEEMABLE_NO_PATH_CONTEXT_MATCHED + "Segment-> '" + szSegName + "'", ie ); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/config/MapConfigReinterpreter.java ================================================ package com.pinecone.hydra.config; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.unit.MultiScopeMap; import java.util.Collection; import java.util.Map; public interface MapConfigReinterpreter extends Pinenut { MultiScopeMap getPrimaryScope(); void setPrimaryScope( MultiScopeMap scope ); Collection getExcludeKeys(); void addExcludeKey( String szKey ); void addExcludeKeys( Collection keys ); void removeExcludeKey( String szKey ); String getKeyWordsToken(); void setKeyWordsToken( String szToken ); void reinterpret( Map that ); void reinterpret( Map that, MultiScopeMap scope ); void reinterpretByBasicKeyWordsScope( Map that, MultiScopeMap keyWordsScope ); void reinterpretByLineage( Map that, Object parent ); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/config/ScopedMapConfigReinterpreter.java ================================================ package com.pinecone.hydra.config; import com.pinecone.framework.unit.MultiScopeMap; import com.pinecone.framework.unit.PrecedeMultiMaptron; import com.pinecone.framework.unit.PrecedeMultiScopeMap; import com.pinecone.framework.unit.MultiScopeMaptron; import com.pinecone.framework.util.template.TemplateParser; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.TreeMap; public class ScopedMapConfigReinterpreter implements MapConfigReinterpreter { protected MultiScopeMap mPrimaryScope; protected Set mExcludeKeys; protected String mszKeyWordsToken; public ScopedMapConfigReinterpreter( MultiScopeMap scopeMap, String szKeyWordsToken ) { this.mPrimaryScope = scopeMap; this.mExcludeKeys = new TreeSet<>(); this.mszKeyWordsToken = szKeyWordsToken; } public ScopedMapConfigReinterpreter( MultiScopeMap scopeMap ) { this( scopeMap, "KeyWords" ); } @Override public MultiScopeMap getPrimaryScope() { return this.mPrimaryScope; } @Override public void setPrimaryScope( MultiScopeMap scope ) { this.mPrimaryScope = scope; } @Override public Collection getExcludeKeys() { return this.mExcludeKeys; } @Override public void addExcludeKey( String szKey ) { this.mExcludeKeys.add( szKey ); } @Override public void addExcludeKeys( Collection keys ) { this.mExcludeKeys.addAll( keys ); } @Override public void removeExcludeKey( String szKey ) { this.mExcludeKeys.remove( szKey ); } @Override public String getKeyWordsToken(){ return this.mszKeyWordsToken; } @Override public void setKeyWordsToken( String szToken ){ this.mszKeyWordsToken = szToken; } @SuppressWarnings( "unchecked" ) protected Object reinterpretVal ( Object key, Object val, Map scope ) { if( val instanceof String ) { String szVal = (String) val; TemplateParser parser = new TemplateParser( szVal, scope ); return parser.eval(); } else if( val instanceof Map || val instanceof List) { Map previousThisScope = null; Object previousSupper = null; Object previousThis = null; MultiScopeMap kwFields = null; if( scope instanceof MultiScopeMap ) { // Retrieving keyword fields MultiScopeMap ms = (MultiScopeMap) scope; previousThisScope = ((MultiScopeMap) scope).thisScope(); if( ms instanceof PrecedeMultiScopeMap) { kwFields = ((PrecedeMultiScopeMap) ms).getPrecedeScope(); } else { kwFields = ms.getScopeByNS( this.mszKeyWordsToken ); } if( kwFields != null ) { previousSupper = kwFields.get( "super" ); previousThis = kwFields.get( "this" ); kwFields.put( "this" , val ); kwFields.put( "super", previousThis ); } } if( val instanceof Map ) { if( scope instanceof MultiScopeMap ) { ( (MultiScopeMap) scope ).setThisScope( (Map)val ); } this.reinterpretObject( (Map)val, scope ); } else { this.reinterpretList( (List)val, scope ); } // Restoring previous scope. if( scope instanceof MultiScopeMap && kwFields != null ) { MultiScopeMap ms = (MultiScopeMap) scope; kwFields.put( "super", previousSupper ); kwFields.put( "this" , previousThis ); ms.setThisScope( previousThisScope ); } } return null; } protected void reinterpretList ( List that, Map scope ) { int idx = 0; for( Object val : that ) { Object nv = this.reinterpretVal( idx, val, scope ); if( nv != null ) { that.set( idx, nv ); } ++idx; } } protected void reinterpretObject ( Map that, Map scope ) { for( Map.Entry kv : that.entrySet() ) { if( this.mExcludeKeys.contains( kv.getKey() ) ) { continue; } Object nv = this.reinterpretVal( kv.getKey(), kv.getValue(), scope ); if( nv != null ) { that.put( kv.getKey(), nv ); } } } @Override public void reinterpret( Map that ) { this.reinterpret( that, this.mPrimaryScope ); } @Override public void reinterpret( Map that, MultiScopeMap scope ) { this.reinterpretObject( that, scope ); } @Override public void reinterpretByBasicKeyWordsScope( Map that, MultiScopeMap keyWordsScope ) { // Keyword fields, keyword has the highest priority. PrecedeMultiMaptron scope = new PrecedeMultiMaptron<>( that ); //scope.addParent( keyWords.setName( "KeyWords" ) ); scope.setPrecedeScope( keyWordsScope.setName( this.mszKeyWordsToken ) ); scope.addParent( this.getPrimaryScope() ); keyWordsScope.put( "__scope__" , (Object) scope ); this.reinterpretObject( that, scope ); } @Override public void reinterpretByLineage( Map that, Object parent ) { MultiScopeMap keyWords = new MultiScopeMaptron<>( new TreeMap<>() ); keyWords.put( "this" , that ); keyWords.put( "super" , parent ); this.reinterpretByBasicKeyWordsScope( that, keyWords ); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/GenericKOMRegistry.java ================================================ package com.pinecone.hydra.registry; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.registry.entity.ConfigNode; import com.pinecone.hydra.registry.entity.DefaultPropertyConverter; import com.pinecone.hydra.registry.entity.DefaultTextValueConverter; import com.pinecone.hydra.registry.entity.ElementNode; import com.pinecone.hydra.registry.entity.GenericNamespace; import com.pinecone.hydra.registry.entity.GenericProperties; import com.pinecone.hydra.registry.entity.GenericTextFile; import com.pinecone.hydra.registry.entity.GenericTextValue; import com.pinecone.hydra.registry.entity.Namespace; import com.pinecone.hydra.registry.entity.Properties; import com.pinecone.hydra.registry.entity.Property; import com.pinecone.hydra.registry.entity.RegistryTreeNode; import com.pinecone.hydra.registry.entity.TextFile; import com.pinecone.hydra.registry.entity.TextValue; import com.pinecone.hydra.registry.entity.TypeConverter; import com.pinecone.hydra.registry.operator.RegistryNodeOperator; import com.pinecone.hydra.system.identifier.KOPathResolver; import com.pinecone.hydra.system.ko.kom.ArchReparseKOMTree; import com.pinecone.hydra.system.ko.kom.GenericReparseKOMTreeAddition; import com.pinecone.hydra.system.ko.kom.KOMSelector; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.hydra.system.ko.kom.StandardPathSelector; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.registry.operator.GenericRegistryOperatorFactory; import com.pinecone.hydra.registry.operator.RegistryOperatorFactory; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; import com.pinecone.hydra.registry.source.RegistryMasterManipulator; import com.pinecone.hydra.registry.source.RegistryConfigNodeManipulator; import com.pinecone.hydra.registry.source.RegistryNSNodeManipulator; import com.pinecone.hydra.registry.source.RegistryPropertiesManipulator; import com.pinecone.hydra.registry.source.RegistryTextFileManipulator; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.ulf.util.guid.GUIDs; import java.io.StringReader; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; /** * Pinecone Ursus For Java Uniform KOMRegistry * Author: Harald.E (Dragon King), Ken * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Uniform Distribute Kernel Object Model Registry (Config KOM Registry) * ***************************************************************************************** */ public class GenericKOMRegistry extends ArchReparseKOMTree implements KOMRegistry { protected RegistryMasterManipulator registryMasterManipulator; protected RegistryPropertiesManipulator registryPropertiesManipulator; protected RegistryTextFileManipulator registryTextFileManipulator; protected RegistryConfigNodeManipulator configNodeManipulator; protected RegistryNSNodeManipulator namespaceNodeManipulator; protected TypeConverter propertyTypeConverter; protected TypeConverter textValueTypeConverter; public GenericKOMRegistry( Processum superiorProcess, KOIMasterManipulator masterManipulator, KOMRegistry parent, String name, @Nullable GuidAllocator guidAllocator ){ // Phase [1] Construct system. super( superiorProcess, masterManipulator, KernelRegistryConfig, parent, name, guidAllocator ); // Phase [2] Construct fundamentals. this.registryMasterManipulator = (RegistryMasterManipulator) masterManipulator; this.pathResolver = new KOPathResolver( this.kernelObjectConfig ); // Phase [3] Construct manipulators. this.registryPropertiesManipulator = this.registryMasterManipulator.getPropertiesManipulator(); this.registryTextFileManipulator = this.registryMasterManipulator.getTextFileManipulator(); this.configNodeManipulator = this.registryMasterManipulator.getConfigNodeManipulator(); this.namespaceNodeManipulator = this.registryMasterManipulator.getNSNodeManipulator(); this.operatorFactory = new GenericRegistryOperatorFactory( this, this.registryMasterManipulator ); // Phase [4] Construct selectors. this.pathSelector = new StandardPathSelector( this.pathResolver, this.imperialTree, this.namespaceNodeManipulator, new GUIDNameManipulator[] { this.configNodeManipulator } ); // Warning: ReparseKOMTreeAddition must be constructed only after `pathSelector` has been constructed. this.mReparseKOM = new GenericReparseKOMTreeAddition( this ); // Phase [5] Construct misc. this.propertyTypeConverter = new DefaultPropertyConverter(); this.textValueTypeConverter = new DefaultTextValueConverter(); } public GenericKOMRegistry( Processum superiorProcess, KOIMasterManipulator masterManipulator, KOMRegistry parent, String name ){ this ( superiorProcess, masterManipulator, parent, name, null ); } public GenericKOMRegistry( Processum superiorProcess, KOIMasterManipulator masterManipulator ){ this( superiorProcess, masterManipulator, null, KOMRegistry.class.getSimpleName() ); } public GenericKOMRegistry( Processum superiorProcess ) { this( superiorProcess, null ); } public GenericKOMRegistry( KOIMappingDriver driver ) { this( driver.getSuperiorProcess(), driver.getMasterManipulator() ); } public GenericKOMRegistry( KOIMappingDriver driver, KOMRegistry parent, String name ) { this( driver.getSuperiorProcess(), driver.getMasterManipulator(), parent, name ); } @Override public RegistryConfig getConfig() { return (RegistryConfig) this.kernelObjectConfig; } public RegistryOperatorFactory getOperatorFactory() { return (RegistryOperatorFactory) this.operatorFactory; } @Override public void setPropertyTypeConverter( TypeConverter propertyTypeConverter ) { this.propertyTypeConverter = propertyTypeConverter; } @Override public void setTextValueTypeConverter( TypeConverter textValueTypeConverter ) { this.textValueTypeConverter = textValueTypeConverter; } @Override public TypeConverter getTextValueTypeConverter() { return this.textValueTypeConverter; } @Override public TypeConverter getPropertyTypeConverter() { return this.propertyTypeConverter; } @Override public RegistryConfig getRegistryConfig() { return this.getConfig(); } @Override protected RegistryNodeOperator getOperatorByGuid( GUID guid ) { return (RegistryNodeOperator) super.getOperatorByGuid( guid ); } @Override public RegistryTreeNode get( GUID guid ) { return (RegistryTreeNode) super.get( guid ); } @Override public RegistryTreeNode get( GUID guid, int depth ) { return (RegistryTreeNode) super.get( guid, depth ); } @Override public RegistryTreeNode getAsRootDepth( GUID guid ) { return (RegistryTreeNode) super.getAsRootDepth( guid ); } @Override public ElementNode queryElement( String path ){ //GUID guid = this.distributedConfTree.queryGUIDByPath( path ); GUID guid = this.queryGUIDByPath( path ); if( guid != null ) { return (ElementNode) this.get( guid ); } return null; } @Override public Properties getProperties( GUID guid ) { return this.get( guid ).evinceProperties(); } @Override public Properties getProperties( String path ) { GUID guid = this.queryGUIDByPath( path ); if( guid == null ) { return null; } return this.getProperties( guid ); } @Override public Namespace getNamespace( GUID guid ) { return this.get( guid ).evinceNamespace(); } @Override public Namespace getNamespace( String path ){ GUID guid = this.queryGUIDByPath( path ); if( guid == null ) { return null; } return this.getNamespace( guid ); } @Override public List fetchProperties( GUID guid ) { Properties properties = this.getProperties( guid ); if( properties != null ) { return this.registryPropertiesManipulator.getProperties( guid, properties ); } return null; } @Override public List fetchProperties( String path ) { GUID guid = this.queryGUIDByPath( path ); if( guid == null ) { return null; } return this.fetchProperties( guid ); } @Override public TextValue getTextValue( GUID guid ) { return this.registryTextFileManipulator.getTextValue( guid ); } @Override public TextValue getTextValue( String path ) { GUID guid = this.queryGUIDByPath( path ); if( guid == null ) { return null; } return this.getTextValue( guid ); } @Override public void putProperty( Property property, GUID configNodeGuid ) { property.setGuid( configNodeGuid ); property.setCreateTime( LocalDateTime.now() ); property.setUpdateTime( LocalDateTime.now() ); this.registryPropertiesManipulator.insert( property ); } @Override public void updateProperty( @Nullable GUID configNodeGuid, Property property ) { if( configNodeGuid != null ) { property.setGuid( configNodeGuid ); } property.setUpdateTime( LocalDateTime.now() ); this.registryPropertiesManipulator.update( property ); } @Override public void updateTextValue( TextValue textValue, GUID configNodeGuid ) { textValue.setGuid( configNodeGuid ); textValue.setUpdateTime( LocalDateTime.now() ); this.registryTextFileManipulator.update( textValue ); } @Override public void updateTextValue( GUID guid, String text, String type ) { TextValue textValue = GenericTextValue.newUpdateTextValue( guid, text, type ); this.registryTextFileManipulator.update( textValue ); } @Override public void removeProperty( GUID guid, String key ) { this.registryPropertiesManipulator.remove(guid,key); } @Override public void removeTextValue( GUID guid ) { this.registryTextFileManipulator.remove(guid); } @Override public void setDataAffinityGuid( GUID childGuid, GUID parentGuid ) { this.configNodeManipulator.setDataAffinityGuid( childGuid, parentGuid ); } @Override public List selectByName( String name ) { List nodes = this.namespaceNodeManipulator.getGuidsByName(name); ArrayList configNodes = new ArrayList<>(); for( GUID guid : nodes ){ TreeNode treeNode = this.get(guid); configNodes.add(treeNode); } return configNodes; } protected GUID[] assertCopyMove ( String sourcePath, String destinationPath ) throws IllegalArgumentException { GUID sourceGuid = this.queryGUIDByPath( sourcePath ); if( sourceGuid == null ) { throw new IllegalArgumentException( "Undefined source '" + sourcePath + "'" ); } GUID destinationGuid = this.queryGUIDByPath( destinationPath ); if( !this.namespaceNodeManipulator.isNamespaceNode( destinationGuid ) ){ throw new IllegalArgumentException( "Illegal destination '" + destinationPath + "', should be namespace." ); } if( destinationGuid == null ) { throw new IllegalArgumentException( "Undefined destination '" + destinationPath + "'" ); } if( sourceGuid == destinationGuid ) { throw new IllegalArgumentException( "Cyclic path detected '" + sourcePath + "'" ); } return new GUID[] { sourceGuid, destinationGuid }; } @Override public void moveTo( String sourcePath, String destinationPath ) throws IllegalArgumentException { GUID[] pair = this.assertCopyMove( sourcePath, destinationPath ); GUID sourceGuid = pair[ 0 ]; GUID destinationGuid = pair[ 1 ]; this.imperialTree.moveTo( sourceGuid, destinationGuid ); this.imperialTree.removeCachePath( sourceGuid ); } @Override public void move( String sourcePath, String destinationPath ) { GUID sourceGuid = this.assertPath( sourcePath, "source" ); List sourParts = this.pathResolver.resolvePathParts( sourcePath ); List destParts = this.pathResolver.resolvePathParts( destinationPath ); String szLastDestTarget = destParts.get( destParts.size() - 1 ); sourcePath = sourcePath.trim(); destinationPath = destinationPath.trim(); // Case1: Move "game/terraria/npc" => "game/minecraft/npc", which has the same dest name. // Case1-1: Move "game/terraria/npc/" => "game/minecraft/npc/" // Case1-2: Move "game/terraria/npc/." => "game/minecraft/npc/." if( sourParts.get( sourParts.size() - 1 ).equals( szLastDestTarget ) || szLastDestTarget.equals( "." ) || ( sourcePath.endsWith( this.getConfig().getPathNameSeparator() ) && destinationPath.endsWith( this.getConfig().getPathNameSeparator() ) ) ) { destParts.remove( destParts.size() - 1 ); String szParentPath = this.pathResolver.assemblePath( destParts ); destParts.add( szLastDestTarget ); // Move to, which has the same name or explicit current dir `.`. this.moveTo( sourcePath, szParentPath ); } // Case 2: "game/terraria/npc" => "game/minecraft/character/" || "game/minecraft/character/." // game/terraria/npc => game/minecraft/character/npc else if ( !sourcePath.endsWith( this.getConfig().getPathNameSeparator() ) && ( destinationPath.endsWith( this.getConfig().getPathNameSeparator() ) || destinationPath.endsWith( "." ) ) ) { Namespace target = this.affirmNamespace( destinationPath ); this.imperialTree.moveTo( sourceGuid, target.getGuid() ); } // Case3: Move "game/terraria/npc" => "game/minecraft/character", move all children therein. // game/terraria/npc/f1 => game/minecraft/character/f1 // game/terraria/npc/f2 => game/minecraft/character/f2 // etc. else { // Case3-1: Is config or other none namespace node. // Move "game/terraria/file" => "game/minecraft/dir". // Case3-2: "game/terraria/npc/" => "game/minecraft/character" // Eq.Case2: Move "game/terraria/npc" => "game/minecraft/character", if( !this.namespaceNodeManipulator.isNamespaceNode( sourceGuid ) ) { Namespace target = this.affirmNamespace( destinationPath ); this.imperialTree.moveTo( sourceGuid, target.getGuid() ); } else { List children = this.getChildren( sourceGuid ); if( !children.isEmpty() ) { Namespace target = this.affirmNamespace( destinationPath ); for( TreeNode node : children ) { this.imperialTree.moveTo( node.getGuid(), target.getGuid() ); } } } this.imperialTree.removeTreeNodeOnly( sourceGuid ); } this.imperialTree.removeCachePath( sourceGuid ); } @Override public void copyTo( String sourcePath, String destinationPath ) throws IllegalArgumentException { GUID[] pair = this.assertCopyMove( sourcePath, destinationPath ); GUID sourceGuid = pair[ 0 ]; GUID destinationGuid = pair[ 1 ]; this.get( sourceGuid ).copyTo( destinationGuid ); } @Override public void copy( String sourcePath, String destinationPath ) { GUID sourceGuid = this.assertPath( sourcePath, "source" ); List sourParts = this.pathResolver.resolvePathParts( sourcePath ); List destParts = this.pathResolver.resolvePathParts( destinationPath ); sourcePath = sourcePath.trim(); destinationPath = destinationPath.trim(); String szLastDestTarget = destParts.get( destParts.size() - 1 ); // Case1: Copy "game/terraria/npc" => "game/minecraft/npc", which has the same dest name. // Case1-1: Copy "game/terraria/npc/" => "game/minecraft/npc/" // Case1-2: Copy "game/terraria/npc/." => "game/minecraft/npc/." if( sourParts.get( sourParts.size() - 1 ).equals( szLastDestTarget ) || szLastDestTarget.equals( "." ) || ( sourcePath.endsWith( this.getConfig().getPathNameSeparator() ) && destinationPath.endsWith( this.getConfig().getPathNameSeparator() ) ) ) { // Just return, copy to itself. return; } // Case 2: "game/terraria/npc" => "game/minecraft/character/" || "game/minecraft/character/." // game/terraria/npc => game/minecraft/character/npc if ( !sourcePath.endsWith( this.getConfig().getPathNameSeparator() ) && ( destinationPath.endsWith( this.getConfig().getPathNameSeparator() ) || destinationPath.endsWith( "." ) ) ) { this.copyTo( sourcePath, destinationPath ); } // Case3: Copy "game/terraria/npc" => "game/minecraft/character", copy all children therein. // game/terraria/npc/f1 => game/minecraft/character/f1 // game/terraria/npc/f2 => game/minecraft/character/f2 // etc. else { // Case3-1: Is config or other none namespace node. // Copy "game/terraria/file" => "game/minecraft/dir". // Case3-2: "game/terraria/npc/" => "game/minecraft/character" // Eq.Case2: Copy "game/terraria/npc" => "game/minecraft/character", if( !this.namespaceNodeManipulator.isNamespaceNode( sourceGuid ) ) { Namespace target = this.affirmNamespace( destinationPath ); this.get( sourceGuid ).copyTo( target.getGuid() ); } else { List children = this.getChildren( sourceGuid ); if( !children.isEmpty() ) { Namespace target = this.affirmNamespace( destinationPath ); for( TreeNode node : children ) { RegistryTreeNode treeNode = (RegistryTreeNode) node; treeNode.copyTo( target.getGuid() ); } } } } } @Override public List getAllTreeNode() { List nameSpaceNodes = this.namespaceNodeManipulator.dumpGuid(); List confNodes = this.configNodeManipulator.dumpGuid(); ArrayList treeNodes = new ArrayList<>(); for (GUID guid : nameSpaceNodes){ TreeNode treeNode = this.get(guid); treeNodes.add(treeNode); } for ( GUID guid : confNodes ){ TreeNode treeNode = this.get(guid); treeNodes.add(treeNode); } return treeNodes; } @SuppressWarnings( "unchecked" ) public List fetchRoot() { return (List) super.fetchRoot(); } // TODO, Unchecked type affirmed. protected RegistryTreeNode affirmTreeNodeByPath( String path, Class cnSup, Class nsSup ) { String[] parts = this.pathResolver.segmentPathParts( path ); String currentPath = ""; GUID parentGuid = GUIDs.Dummy128(); RegistryTreeNode node = this.queryElement( path ); if( node != null ) { return node; } RegistryTreeNode ret = null; for( int i = 0; i < parts.length; ++i ){ currentPath = currentPath + ( i > 0 ? this.getConfig().getPathNameSeparator() : "" ) + parts[ i ]; node = this.queryElement( currentPath ); if ( node == null){ if ( i == parts.length - 1 && cnSup != null ){ ConfigNode configNode = (ConfigNode) this.dynamicFactory.optNewInstance( cnSup, new Object[]{ this } ); configNode.setName( parts[i] ); GUID guid = this.put( configNode ); this.affirmOwnedNode( parentGuid, guid ); return configNode; } else { Namespace namespace = (Namespace) this.dynamicFactory.optNewInstance( nsSup, new Object[]{ this } ); namespace.setName(parts[i]); GUID guid = this.put(namespace); if ( i != 0 ){ this.affirmOwnedNode( parentGuid, guid ); parentGuid = guid; } else { parentGuid = guid; } ret = namespace; } } else { parentGuid = node.getGuid(); } } return ret; } @Override public Namespace affirmNamespace ( String path ) { return (Namespace) this.affirmTreeNodeByPath( path, null, GenericNamespace.class ); } @Override public Properties affirmProperties ( String path ) { return (Properties) this.affirmTreeNodeByPath( path, GenericProperties.class, GenericNamespace.class ); } @Override public TextFile affirmTextConfig ( String path ) { return (TextFile) this.affirmTreeNodeByPath( path, GenericTextFile.class, GenericNamespace.class ); } @Override public Properties putProperties( String path, Map properties ) { Properties pro = this.affirmProperties( path ); pro.puts( properties ); return pro; } @Override public TextFile putTextValue(String path, String type, String value ) { TextFile pro = this.affirmTextConfig( path ); pro.put( new GenericTextValue( pro.getGuid(), value, type ) ); return pro; } @Override public void copyPropertiesTo( GUID sourceGuid, GUID destinationGuid ) { this.registryPropertiesManipulator.copyPropertiesTo( sourceGuid, destinationGuid ); } @Override public void copyTextValueTo( GUID sourceGuid, GUID destinationGuid ) { this.registryTextFileManipulator.copyTextValueTo( sourceGuid, destinationGuid ); } @Override public void putTextValue( GUID guid, String text, String format ){ GenericTextValue genericTextValue = new GenericTextValue( guid, text, format ); this.registryTextFileManipulator.insert( genericTextValue ); } @Override public ConfigNode getConfigNode( GUID guid ) { RegistryTreeNode p = this.get( guid ); ConfigNode cn = p.evinceConfigNode() ; if( cn != null ) { return cn; } return null; } private String getNodeName( ImperialTreeNode node ) { UOI type = node.getType(); TreeNode newInstance = (TreeNode)type.newInstance(); TreeNodeOperator operator = this.operatorFactory.getOperator(newInstance.getMetaType()); TreeNode treeNode = operator.get(node.getGuid()); return treeNode.getName(); } protected KOMSelector newKOMSelector( String szSelector ) { return new RegistryJPathSelector( new StringReader( szSelector ), this.pathResolver, this, this.namespaceNodeManipulator, new GUIDNameManipulator[] { this.configNodeManipulator } ); } @Override public Object querySelector( String szSelector ) { return this.newKOMSelector( szSelector ).querySelector( null ) ; } @Override public List querySelectorAll( String szSelector ) { return this.newKOMSelector( szSelector ).querySelectorAll( null ) ; } @Override public Object querySelectorJ( String szSelector ) { return this.newKOMSelector( szSelector ).querySelectorJ( null ) ; } private boolean allNonNull( List list ) { return list.stream().noneMatch( Objects::isNull ); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/GenericRenderKOMRegistry.java ================================================ package com.pinecone.hydra.registry; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.template.UTRAlmondProvider; import com.pinecone.framework.util.template.UniformTemplateRenderer; import com.pinecone.hydra.registry.render.RenderConfigNode; import com.pinecone.hydra.registry.render.RenderRegistryTreeNode; import com.pinecone.hydra.registry.render.RenderTextValue; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; public class GenericRenderKOMRegistry extends GenericKOMRegistry implements RenderDistributeRegistry { protected UniformTemplateRenderer mUniformTemplateRenderer; private UniformTemplateRenderer renderer; public GenericRenderKOMRegistry( Processum superiorProcess, KOIMasterManipulator masterManipulator ){ super( superiorProcess, masterManipulator ); this.renderer = new UTRAlmondProvider(); } @Override public RenderRegistryTreeNode getAsRootDepth( GUID guid ) { return (RenderRegistryTreeNode) this.getOperatorByGuid( guid ).getAsRootDepth( guid ); } @Override public RenderConfigNode getConfigNode( GUID guid ) { return (RenderConfigNode) super.getConfigNode(guid); } @Override public RenderTextValue getTextValue( GUID guid ) { return (RenderTextValue) this.registryTextFileManipulator.getTextValue(guid); } @Override public UniformTemplateRenderer getRenderer() { return this.renderer; } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/KOMRegistry.java ================================================ package com.pinecone.hydra.registry; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.entity.ConfigNode; import com.pinecone.hydra.registry.entity.Namespace; import com.pinecone.hydra.registry.entity.Properties; import com.pinecone.hydra.registry.entity.Property; import com.pinecone.hydra.registry.entity.RegistryTreeNode; import com.pinecone.hydra.registry.entity.TextValue; import com.pinecone.hydra.system.ko.kom.ReparseKOMTree; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import java.util.List; public interface KOMRegistry extends Registry, ReparseKOMTree { @Override String getPath( GUID guid ); @Override String getFullName( GUID guid ); @Override GUID put( TreeNode treeNode ); @Override RegistryTreeNode get( GUID guid ); @Override RegistryTreeNode get( GUID guid, int depth ); @Override RegistryTreeNode getAsRootDepth( GUID guid ); Properties getProperties( GUID guid ); Namespace getNamespace( GUID guid ); @Override GUID queryGUIDByPath( String path ); @Override GUID queryGUIDByFN ( String fullName ); List fetchProperties( GUID guid ); TextValue getTextValue( GUID guid ); ConfigNode getConfigNode( GUID guid ); @Override RegistryConfig getConfig(); void putProperty( Property property, GUID configNodeGuid ); void putTextValue( GUID guid, String text, String format ); void updateProperty( @Nullable GUID configNodeGuid, Property property ); default void updateProperty( Property property ) { this.updateProperty( null, property ); } void updateTextValue( TextValue textValue, GUID configNodeGuid ); void updateTextValue( GUID guid, String text, String format ); @Override void remove( GUID guid ); @Override void removeReparseLink( GUID guid ); void removeProperty( GUID guid, String key ); void removeTextValue( GUID guid ); @Override List getChildren( GUID guid ); @Override void rename( GUID guid, String name ); default void rename( String path, String name ) { this.rename( this.assertPath( path ), name ); } @Override default GUID assertPath( String path, String pathType ) throws IllegalArgumentException { GUID guid = this.queryGUIDByPath( path ); if( guid == null ) { throw new IllegalArgumentException( "Undefined " + pathType + " '" + path + "'" ); } return guid; } @Override default GUID assertPath( String path ) throws IllegalArgumentException { return this.assertPath( path, "path" ); } List getAllTreeNode(); /** 断言,确保节点唯一拥有关系*/ @Override void affirmOwnedNode( GUID parentGuid, GUID childGuid ); @Override void newHardLink ( GUID sourceGuid, GUID targetGuid ); /** set affinityParentGuid for child.*/ void setDataAffinityGuid ( GUID childGuid, GUID affinityParentGuid ); default void setDataAffinity ( String childPath, String parentPath ) { GUID childGuid = this.assertPath( childPath ); GUID parentGuid = this.assertPath( parentPath ); if( childGuid == parentGuid ) { throw new IllegalArgumentException( "Cyclic path detected '" + childPath + "'" ); } this.setDataAffinityGuid( childGuid, parentGuid ); } @Override void newLinkTag( GUID originalGuid,GUID dirGuid,String tagName ); @Override void newLinkTag( String originalPath ,String dirPath,String tagName ); @Override void updateLinkTag( GUID tagGuid,String tagName); void copyPropertiesTo( GUID sourceGuid, GUID destinationGuid ); void copyTextValueTo( GUID sourceGuid, GUID destinationGuid ); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/KernelRegistryConfig.java ================================================ package com.pinecone.hydra.registry; import com.pinecone.hydra.system.ko.ArchKernelObjectConfig; public class KernelRegistryConfig extends ArchKernelObjectConfig implements RegistryConfig { } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/Registry.java ================================================ package com.pinecone.hydra.registry; import java.util.Collection; import java.util.List; import java.util.Map; import com.pinecone.hydra.registry.entity.ElementNode; import com.pinecone.hydra.registry.entity.Namespace; import com.pinecone.hydra.registry.entity.Properties; import com.pinecone.hydra.registry.entity.Property; import com.pinecone.hydra.registry.entity.RegistryTreeNode; import com.pinecone.hydra.registry.entity.TextFile; import com.pinecone.hydra.registry.entity.TextValue; import com.pinecone.hydra.registry.entity.TypeConverter; import com.pinecone.hydra.system.ko.KernelObjectInstrument; import com.pinecone.hydra.system.ko.kom.KOMInstrument; import com.pinecone.hydra.unit.imperium.entity.EntityNode; import com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public interface Registry extends KOMInstrument { RegistryConfig KernelRegistryConfig = new KernelRegistryConfig(); RegistryConfig getRegistryConfig(); void setPropertyTypeConverter( TypeConverter propertyTypeConverter ) ; void setTextValueTypeConverter( TypeConverter textValueTypeConverter ) ; TypeConverter getTextValueTypeConverter() ; TypeConverter getPropertyTypeConverter() ; ElementNode queryElement( String path ); Properties getProperties( String path ); Namespace getNamespace(String path ); void remove( String path ); Collection fetchProperties( String path ); TextValue getTextValue( String path ); /** Normal Tree Node or ReparseLinkNode**/ EntityNode queryNode( String path ); ReparseLinkNode queryReparseLink( String path ); List selectByName( String name ); /** Move "game/terraria/npc" => "game/minecraft/" => "game/minecraft/npc"*/ void moveTo( String sourcePath, String destinationPath ); /** Affirm destination path existed.*/ void move( String sourcePath, String destinationPath ); /** Copy "game/terraria/npc" => "game/minecraft/" => "game/minecraft/npc"*/ void copyTo( String sourcePath, String destinationPath ); void copy( String sourcePath, String destinationPath ); List fetchRoot(); Namespace affirmNamespace ( String path ); Properties affirmProperties ( String path ); TextFile affirmTextConfig ( String path ); Properties putProperties ( String path, Map properties ); TextFile putTextValue ( String path, String format, String value ); // Return with json. Object querySelectorJ ( String szSelector ); Object querySelector ( String szSelector ); List querySelectorAll ( String szSelector ); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/RegistryConfig.java ================================================ package com.pinecone.hydra.registry; import com.pinecone.hydra.system.ko.KernelObjectConfig; public interface RegistryConfig extends KernelObjectConfig { } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/RegistryJPathSelector.java ================================================ package com.pinecone.hydra.registry; import java.io.BufferedReader; import java.io.IOException; import java.io.Reader; import java.io.StringReader; import java.util.List; import com.pinecone.framework.util.CursorParser; import com.pinecone.framework.util.GeneralStrings; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.name.path.PathResolver; import com.pinecone.hydra.registry.entity.Namespace; import com.pinecone.hydra.registry.entity.Properties; import com.pinecone.hydra.registry.entity.Property; import com.pinecone.hydra.registry.entity.RegistryTreeNode; import com.pinecone.hydra.registry.entity.TextFile; import com.pinecone.hydra.system.ko.kom.KOMSelector; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.system.ko.kom.ReparseLinkSelector; /** * RegistryJPathSelector * TODO: Advance Functions */ public class RegistryJPathSelector extends ReparseLinkSelector implements KOMSelector { protected Reader mReader; protected char mcPrevious; protected long mnCharacter; protected boolean mbUsePrevious; protected int mnParseAt ; protected int mnLineAt; protected TokenType mTokenType; protected StringBuilder mCurrentToken; protected KOMRegistry mRegistry; protected CursorParser mThisCursor; protected List mQueriedList ; enum TokenType { T_UNDEFINED, T_DELIMITER, T_IDENTIFIER, T_INTEGER, T_FLOAT, T_KEYWORD, T_TEMP, T_STRING, T_BLOCK, T_ENDLINE } public RegistryJPathSelector(Reader reader, PathResolver pathResolver, KOMRegistry registry, GUIDNameManipulator dirMan, GUIDNameManipulator[] fileMans ) { super( pathResolver, registry.getMasterTrieTree(), dirMan, fileMans ); this.mRegistry = registry; this.mReader = (Reader)(reader.markSupported() ? reader : new BufferedReader(reader)); this.mCurrentToken = new StringBuilder(); this.mThisCursor = new RegistrySelectorCursorParser( this ); } protected SelectorParseException parseException( String message ) { return new SelectorParseException( message + " at " + this.mnParseAt + " [character " + this.mnCharacter + " line " + this.mnLineAt + "]", (int)this.mnParseAt ); } public void back() throws SelectorParseException { if (!this.mbUsePrevious && this.mnParseAt > 0L) { --this.mnParseAt; --this.mnCharacter; this.mbUsePrevious = true; this.mTokenType = TokenType.T_UNDEFINED; } else { throw new SelectorParseException( "Stepping back two steps is not supported" ); } } public boolean end() { return this.mTokenType == TokenType.T_ENDLINE && !this.mbUsePrevious; } public char next() throws SelectorParseException { int c; if ( this.mbUsePrevious ) { this.mbUsePrevious = false; c = this.mcPrevious; } else { try { c = this.mReader.read(); } catch ( IOException e ) { throw new SelectorParseException( e, this.mnParseAt ); } if ( c <= 0 ) { this.mTokenType = TokenType.T_ENDLINE; c = 0; } } ++this.mnParseAt; if ( this.mcPrevious == '\r' ) { ++this.mnLineAt; this.mnCharacter = (long)(c == 10 ? 0 : 1); } else if ( c == '\n' ) { this.mnCharacter = 0L; ++this.mnLineAt; } else { ++this.mnCharacter; } // if ( c != 0 ) { // this.mszNowAt = this.mszRaw.substring(this.mnParseAt); // } this.mcPrevious = (char)c; return this.mcPrevious; } public String next( int n ) throws SelectorParseException { if ( n == 0 ) { return ""; } else { char[] chars = new char[n]; for( int pos = 0; pos < n; ++pos ) { chars[pos] = this.next(); if ( this.end() ) { throw this.parseException( "Error parser XPath string with substring bounds error." ); } } return new String(chars); } } public void getNextToken() throws SelectorParseException { this.mTokenType = TokenType.T_UNDEFINED; StringBuilder temp = this.mCurrentToken; temp.setLength(0); char nextChar = this.next(); if ( this.end() ) { return; } while ( nextChar != 0 && Character.isWhitespace(nextChar) ) { nextChar = this.next(); } while ( nextChar == '\r' ) { nextChar = this.next(); if (nextChar == '\n') { nextChar = this.next(); } while ( nextChar != 0 && Character.isWhitespace(nextChar) ) { nextChar = this.next(); } } if ( nextChar == 0 ) { this.mTokenType = TokenType.T_ENDLINE; return; } boolean isDoubleQuote = true; if ( nextChar == '"' || nextChar == '\'' ) { if ( nextChar == '\'' ) { isDoubleQuote = false; } nextChar = this.next(); while ( (isDoubleQuote && nextChar != '"') || (!isDoubleQuote && nextChar != '\'') && nextChar != '\r' && nextChar != 0 ) { if ( nextChar == '\\' ) { nextChar = this.next(); GeneralStrings.transferCharParse( nextChar, this.mThisCursor, temp ); } else { this.mCurrentToken.append( nextChar ); } nextChar = this.next(); } if ( nextChar == '\r' || nextChar == 0 ) { throw this.parseException( "Unexpected End-line, with '\r' / '\0'." ); } this.mTokenType = TokenType.T_STRING; return; } if ( "./".indexOf( nextChar ) >= 0 ) { temp.append((char) nextChar); this.mTokenType = TokenType.T_DELIMITER; return; } if ( Character.isLetter( nextChar ) || nextChar == '_' ) { while (!("./".indexOf(nextChar) >= 0 || nextChar == '\r' || nextChar == '\t' || nextChar == '\n' || nextChar == 0)) { temp.append( nextChar ); nextChar = this.next(); } if( "./".indexOf(nextChar) >= 0 ){ this.back(); } this.mTokenType = TokenType.T_TEMP; } // String szCurrentToken = this.mCurrentToken.toString(); // if ( this.mTokenType == TokenType.T_TEMP ) { // this.mTokenType = TokenType.T_KEYWORD; // } if ( this.mTokenType == TokenType.T_UNDEFINED ) { throw this.parseException( "\nIllegal token found ! What-> \"" + this.mCurrentToken.toString() + "\"" ); } } public List eval() { int depth = 0; GUID parentGuid = null; List preGUIDs = null; do { this.getNextToken(); if( this.mTokenType == TokenType.T_ENDLINE ) { break; } if( this.mTokenType == TokenType.T_DELIMITER ) { continue; } String currentPart = this.mCurrentToken.toString(); List guids; if ( depth == 0 ) { guids = this.fetchAllGuidsRootCase( currentPart ); } else { // Case3: For middle and last parts, retrieve children GUIDs using distributedTrieTree guids = this.imperialTree.fetchChildrenGuids( parentGuid ); } this.getNextToken(); if ( guids == null || guids.isEmpty() ) { if( this.mTokenType == TokenType.T_ENDLINE ) { guids = preGUIDs; if ( guids == null || guids.isEmpty() ) { continue; } } else { continue; } } boolean bNone = true; for ( GUID guid : guids ) { List result = this.eval_entities( guid, currentPart, parentGuid ); if ( result != null && !result.isEmpty() ) { if ( this.mTokenType == TokenType.T_ENDLINE ) { return result; } parentGuid = guid; preGUIDs = guids; bNone = false; } } if( bNone ) { break; } this.back(); ++depth; } while ( this.mTokenType != TokenType.T_ENDLINE ); return null; } protected List eval_entities( GUID guid, String partName, GUID parentGuid ) { // 在中间部分只匹配文件夹,最后一部分匹配文件和文件夹 // In the last part, check both files and directories if ( this.mTokenType == TokenType.T_ENDLINE ) { RegistryTreeNode node = this.mRegistry.get( guid ); if( !this.checkPartInAllManipulators( guid, partName ) ) { if( node instanceof Properties && node.getGuid().equals( parentGuid ) ) { return List.of ( ((Properties) node).get( partName ) ); } return null; } else { return List.of ( node ); } } else { return this.searchAllManipulators( guid, partName ); } } protected List searchAllManipulators ( GUID guid, String partName ) { List guids = this.searchDirAndLinks( guid, partName ); if( guids != null && !guids.isEmpty() ) { return guids; } for ( GUIDNameManipulator manipulator : this.fileManipulators ) { guids = manipulator.getGuidsByNameID( partName, guid ); if ( guids != null && !guids.isEmpty() ) { return guids; } } return null; } protected RegistryJPathSelector reinit( String szSelector ) { if( szSelector != null ) { // For thread safe. return new RegistryJPathSelector( new StringReader( szSelector ), this.pathResolver, this.mRegistry, this.dirManipulators[ 0 ], this.fileManipulators ); } return this; } @Override public List querySelectorAll( String szSelector ) { return this.reinit( szSelector ).eval(); } @Override public Object querySelector( String szSelector ) { List ret = this.reinit( szSelector ).eval(); if( ret != null && !ret.isEmpty() ) { return ret.get( 0 ); } return null; } @Override public Object querySelectorJ( String szSelector ) { Object raw = this.querySelector( szSelector ); if( raw instanceof Properties ) { return ((Properties) raw).toJSONObject(); } else if( raw instanceof TextFile) { return ((TextFile) raw).toJSON(); } else if( raw instanceof Namespace) { return ((Namespace) raw).toJSONObject(); } else if( raw instanceof Property ) { return ((Property) raw).getValue(); } else if( raw == null ) { return null; } JSONObject repare = new JSONMaptron(); repare.put( "type", raw.getClass().getSimpleName() ); repare.put( "value", raw ); return repare; } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/RegistrySelectorCursorParser.java ================================================ package com.pinecone.hydra.registry; import com.pinecone.framework.system.ParseException; import com.pinecone.framework.util.CursorParser; public class RegistrySelectorCursorParser implements CursorParser { protected RegistryJPathSelector mParser; protected RegistrySelectorCursorParser( RegistryJPathSelector parser ) { this.mParser = parser; } @Override public void back() throws ParseException { this.mParser.back(); } @Override public char next() throws ParseException { return this.mParser.next(); } @Override public String next( int n ) throws ParseException { return this.mParser.next(n); } @Override public Object nextValue() throws ParseException { return this.mParser.eval(); } @Override public Object nextValue( Object indexKey, Object parent, Object[] args ) throws ParseException { return this.nextValue(); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/RenderDistributeRegistry.java ================================================ package com.pinecone.hydra.registry; import com.pinecone.framework.util.template.UniformTemplateRenderer; public interface RenderDistributeRegistry extends KOMRegistry { UniformTemplateRenderer getRenderer(); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/SelectorParseException.java ================================================ package com.pinecone.hydra.registry; import com.pinecone.framework.system.ParseException; public class SelectorParseException extends ParseException { public SelectorParseException( String what ) { this( what, -1 ); } public SelectorParseException ( String what, int errorOffset ) { super( what, errorOffset ); } public SelectorParseException ( String message, int errorOffset, Throwable cause ) { super( message, errorOffset, cause ); } public SelectorParseException ( Throwable cause, int errorOffset ) { super(cause.getMessage(), errorOffset, cause); } public SelectorParseException ( Throwable cause ) { this( cause, -1 ); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/ArchConfigNode.java ================================================ package com.pinecone.hydra.registry.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.registry.KOMRegistry; import com.pinecone.framework.util.id.GuidAllocator; import java.time.LocalDateTime; public abstract class ArchConfigNode extends ArchElementNode implements ConfigNode { protected GUID dataAffinityGuid; protected ConfigNodeMeta configNodeMeta; protected KOMRegistry registry; protected ArchConfigNode() { } public ArchConfigNode( KOMRegistry registry ) { this.registry = registry; GuidAllocator guidAllocator = this.registry.getGuidAllocator(); this.setGuid( guidAllocator.nextGUID() ); this.setCreateTime( LocalDateTime.now() ); } public void apply( KOMRegistry registry ) { this.registry = registry; } @Override public GUID getDataAffinityGuid() { return this.dataAffinityGuid; } @Override public void setDataAffinityGuid( GUID parentGuid ) { this.dataAffinityGuid = parentGuid; } @Override public void copyMetaTo( GUID guid ) { this.registry.setDataAffinityGuid( guid, this.getDataAffinityGuid() ); } @Override public void moveTo( String path ) { this.moveTo( this.registry.affirmNamespace( path ).getGuid() ); } @Override public void moveTo( GUID destinationGuid ) { this.registry.getMasterTrieTree().moveTo( this.guid, destinationGuid ); } @Override public ConfigNodeMeta getConfigNodeMeta() { return this.configNodeMeta; } @Override public void setConfigNodeMeta( ConfigNodeMeta configNodeMeta ) { this.configNodeMeta = configNodeMeta; } @Override public void setAttributes( Attributes attributes) { this.attributes = attributes; } @Override public KOMRegistry parentRegistry() { return this.registry; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } protected void putNewCopy(ConfigNode thisCopy, GUID destinationGuid ) { thisCopy.setName( this.getName() ); thisCopy.setConfigNodeMeta( this.getConfigNodeMeta() ); this.registry.put( thisCopy ); this.registry.getMasterTrieTree().setGuidLineage( thisCopy.getGuid(), destinationGuid ); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/ArchElementNode.java ================================================ package com.pinecone.hydra.registry.entity; import java.time.LocalDateTime; import com.pinecone.framework.util.id.GUID; public abstract class ArchElementNode implements ElementNode { protected long enumId; protected GUID guid; protected LocalDateTime createTime; protected LocalDateTime updateTime; protected String name; protected Attributes attributes; @Override public long getEnumId() { return this.enumId; } public void setEnumId( long enumId ) { this.enumId = enumId; } @Override public GUID getGuid() { return this.guid; } public void setGuid( GUID guid ) { this.guid = guid; } @Override public LocalDateTime getCreateTime() { return this.createTime; } public void setCreateTime( LocalDateTime createTime ) { this.createTime = createTime; } @Override public LocalDateTime getUpdateTime() { return this.updateTime; } public void setUpdateTime( LocalDateTime updateTime ) { this.updateTime = updateTime; } @Override public String getName() { return this.name; } public void setName( String name ) { this.name = name; } @Override public Attributes getAttributes() { return this.attributes; } public void setAttributes( Attributes attributes ) { this.attributes = attributes; } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/Attributes.java ================================================ package com.pinecone.hydra.registry.entity; import java.util.Collection; import java.util.Map; import java.util.Set; import com.pinecone.framework.system.prototype.PineUnit; import com.pinecone.framework.util.id.GUID; public interface Attributes extends PineUnit, Map { GUID getGuid(); void setGuid( GUID guid ); String getAttribute( String key ); void setAttribute( String key, String value ); Map getAttributes(); void setAttributes( Map attributes ); ElementNode parentElement(); @Override default boolean isEmpty() { return this.getAttributes().isEmpty(); } @Override default int size() { return this.getAttributes().size(); } @Override default boolean containsKey( Object key ) { return this.getAttributes().containsKey( key ); } @Override default boolean hasOwnProperty( Object key ) { return this.containsKey( key ); } @Override default boolean containsValue( Object value ) { return this.getAttributes().containsValue(value); } @Override default String get( Object key ) { return this.getAttributes().get(key); } @Override default Set keySet() { return this.getAttributes().keySet(); } @Override default Collection values() { return this.getAttributes().values(); } @Override default Set > entrySet() { return this.getAttributes().entrySet(); } String insert( String key, String value ) ; String update( String key, String value ) ; } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/ConfigNode.java ================================================ package com.pinecone.hydra.registry.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.KOMRegistry; import java.time.LocalDateTime; public interface ConfigNode extends ElementNode { @Override default ConfigNode evinceConfigNode() { return this; } void setEnumId( long enumId ); void setGuid( GUID guid ); GUID getDataAffinityGuid(); void setDataAffinityGuid( GUID guid ); void setCreateTime( LocalDateTime createTime ); void setUpdateTime( LocalDateTime updateTime ); void setName( String name ); void copyMetaTo( GUID guid ); ConfigNodeMeta getConfigNodeMeta(); void setConfigNodeMeta( ConfigNodeMeta configNodeMeta ); void setAttributes( Attributes attributes ); KOMRegistry parentRegistry(); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/ConfigNodeMeta.java ================================================ package com.pinecone.hydra.registry.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface ConfigNodeMeta extends Pinenut { long getEnumId(); void setEnumId(long id); GUID getGuid(); void setGuid(GUID guid); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/DefaultPropertyConverter.java ================================================ package com.pinecone.hydra.registry.entity; public class DefaultPropertyConverter implements TypeConverter { @Override public Object converter( String val, String type ) { return PropertyTypes.queryValue( val, type ); } @Override public String queryType( Object val ) { return PropertyTypes.queryType( val ); } @Override public String queryRecognizedType( String type ) { return PropertyTypes.queryRecognizedType( type ); } @Override public boolean isJSON( Object val ) { return PropertyTypes.isJSON( val ); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/DefaultTextValueConverter.java ================================================ package com.pinecone.hydra.registry.entity; public class DefaultTextValueConverter implements TypeConverter { @Override public Object converter( String val, String type ) { return TextValueTypes.queryValue( val, type ); } @Override public String queryType( Object val ) { return TextValueTypes.queryType( val ); } @Override public String queryRecognizedType( String type ) { return TextValueTypes.queryRecognizedType( type ); } @Override public boolean isJSON( Object val ) { return TextValueTypes.isJSON( val ); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/ElementNode.java ================================================ package com.pinecone.hydra.registry.entity; import java.time.LocalDateTime; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.Registry; import com.pinecone.hydra.system.ko.meta.ElementObject; public interface ElementNode extends RegistryTreeNode, ElementObject { long getEnumId(); GUID getGuid(); LocalDateTime getCreateTime(); LocalDateTime getUpdateTime(); String getName(); Attributes getAttributes(); Registry parentRegistry(); @Override default String objectCategoryName() { return "Registry"; } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/GenericAttributes.java ================================================ package com.pinecone.hydra.registry.entity; import java.util.Map; import com.pinecone.framework.unit.LinkedTreeMap; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSON; import com.pinecone.hydra.registry.source.RegistryAttributesManipulator; public class GenericAttributes implements Attributes { protected GUID guid; protected Map attributes = new LinkedTreeMap<>(); protected ElementNode elementNode; protected RegistryAttributesManipulator attributesManipulator; public GenericAttributes( GUID guid, ElementNode element, RegistryAttributesManipulator attributesManipulator ) { this.guid = guid; this.elementNode = element; this.attributesManipulator = attributesManipulator; } @Override public GUID getGuid() { return this.guid; } @Override public void setGuid( GUID guid ) { this.guid = guid; } @Override public String getAttribute( String key ) { return this.attributes.get(key); } @Override public void setAttribute( String key, String value ) { this.put( key, value ); } @Override public Map getAttributes() { return this.attributes; } @Override public void setAttributes( Map attributes ) { this.attributes = attributes; for( Map.Entry kv : attributes.entrySet() ) { this.put( kv.getKey(), kv.getValue() ); } } @Override public ElementNode parentElement() { return this.elementNode; } @Override public String toJSONString() { return JSON.stringify( this.attributes ); } @Override public String toString() { return this.toJSONString(); } @Override public void putAll( Map m ) { for( Map.Entry kv : m.entrySet() ) { this.put( kv.getKey(), kv.getValue() ); } } @Override public String insert( String key, String value ) { if ( !this.attributesManipulator.containsKey( this.guid, key ) ) { this.attributesManipulator.insertAttribute( this.guid, key, value ); this.attributes.put( key, value ); return value; } return null; } @Override public String update( String key, String value ) { if ( !this.attributesManipulator.containsKey( this.guid, key ) ) { this.attributesManipulator.updateAttribute( this.guid, key, value ); this.attributes.put( key, value ); return value; } return null; } @Override public String put( String key, String value ) { if ( this.attributesManipulator.containsKey( this.guid, key ) ) { this.attributesManipulator.updateAttribute( this.guid, key, value ); } else { this.attributesManipulator.insertAttribute( this.guid, key, value ); } return this.attributes.put( key, value ); } @Override public void clear() { this.attributesManipulator.clearAttributes( this.guid ); this.attributes.clear(); } @Override public boolean remove( Object key, Object value ) { if ( this.attributesManipulator.containsKey( this.guid, key.toString() ) ) { this.attributesManipulator.removeAttributeWithValue( this.guid, key.toString(), value.toString() ); this.attributes.remove( key, value ); return true; } return false; } @Override public String remove( Object key ) { if ( this.attributesManipulator.containsKey( this.guid, key.toString() ) ) { this.attributesManipulator.removeAttribute( this.guid, key.toString() ); return this.attributes.remove( key ); } return null; } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/GenericConfigNodeMeta.java ================================================ package com.pinecone.hydra.registry.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; public class GenericConfigNodeMeta implements ConfigNodeMeta { private long enumId; private GUID guid; public GenericConfigNodeMeta() { } public GenericConfigNodeMeta(long enumId, GUID guid) { this.enumId = enumId; this.guid = guid; } @Override public long getEnumId() { return enumId; } @Override public void setEnumId( long enumId ) { this.enumId = enumId; } @Override public GUID getGuid() { return guid; } @Override public void setGuid( GUID guid ) { this.guid = guid; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/GenericNamespace.java ================================================ package com.pinecone.hydra.registry.entity; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSONEncoder; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.registry.KOMRegistry; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.framework.util.id.GuidAllocator; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; public class GenericNamespace extends ArchElementNode implements Namespace { protected NamespaceMeta namespaceMeta; protected KOMRegistry registry; protected Map children; protected List childrenGuids; public GenericNamespace() { } public GenericNamespace(KOMRegistry registry ) { this.registry = registry; GuidAllocator guidAllocator = this.registry.getGuidAllocator(); this.setGuid( guidAllocator.nextGUID() ); this.setCreateTime( LocalDateTime.now() ); } public void apply( KOMRegistry registry ) { this.registry = registry; } @Override public NamespaceMeta getNamespaceWithMeta() { return this.namespaceMeta; } @Override public void setNamespaceMeta( NamespaceMeta namespaceMeta ) { this.namespaceMeta = namespaceMeta; } /** Thread unsafe */ @Override public Map getChildren() { if( this.children == null ) { Map nodeHashMap = new LinkedHashMap<>(); for( GUID guid : this.childrenGuids ){ RegistryTreeNode registryTreeNode = this.registry.get( guid ); nodeHashMap.put( registryTreeNode.getName(), registryTreeNode ); } this.children = nodeHashMap; } return this.children; } @Override public List fetchChildrenGuids() { return this.childrenGuids; } @Override public void setChildrenGuids( List contentGuids, int depth ) { this.childrenGuids = contentGuids; } @Override public List listItem() { ArrayList registryTreeNodes = new ArrayList<>(); registryTreeNodes.addAll( this.getChildren().values() ); return registryTreeNodes; } @Override public void put( RegistryTreeNode child ) { String key = child.getName(); if ( this.getChildren().get( key ) != null ){ throw new IllegalArgumentException( "key is exist." ); } this.getChildren().put( key, child ); this.registry.affirmOwnedNode( this.guid, child.getGuid() ); } @Override public void remove( String key ) { RegistryTreeNode registryTreeNode = this.getChildren().get(key); this.registry.remove(registryTreeNode.getGuid()); this.getChildren().remove(key); } @Override public KOMRegistry parentRegistry() { return this.registry; } @Override public boolean containsKey( String key ) { return this.getChildren().containsKey(key); } @Override public JSONObject toJSONObject() { Map children = this.getChildren(); JSONObject jo = new JSONMaptron(); for( Map.Entry kv : children.entrySet() ) { if( kv.getValue().evinceNamespace() != null ) { jo.put( kv.getKey(), kv.getValue().evinceNamespace().toJSONObject() ); } else if( kv.getValue().evinceProperties() != null ) { jo.put( kv.getKey(), kv.getValue().evinceProperties().toJSONObject() ); } else if( kv.getValue().evinceTextFile() != null ) { jo.put( kv.getKey(), kv.getValue().evinceTextFile().toJSON() ); } } return jo; } @Override public ConfigNode getConfigNode(String key ) { return (ConfigNode) this.getChildren().get(key); } @Override public Namespace getNamespace( String key ) { return (Namespace) this.getChildren().get( key ); } @Override public int size() { return this.childrenGuids.size(); } @Override public boolean isEmpty() { return this.childrenGuids.isEmpty(); } @Override public Set keySet() { return this.getChildren().keySet(); } @Override public Set > entrySet() { return this.getChildren().entrySet(); } @Override public void copyTo( String path ) { this.copyTo( this.registry.affirmNamespace( path ).getGuid() ); } @Override public void copyTo( GUID destinationGuid ) { List destChildren = this.registry.getChildren( destinationGuid ); Namespace thisCopy = null; for( TreeNode node : destChildren ) { if( this.getName().equals( node.getName() ) ) { if( node instanceof Namespace) { thisCopy = (Namespace) node; break; } else { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be namespace.", this.getName() ) ); } } } // Child-Destination non-exist. if( thisCopy == null ) { this.copyNamespaceMetaTo( destinationGuid ); thisCopy = new GenericNamespace( this.registry ); thisCopy.setName( this.getName() ); thisCopy.setNamespaceMeta( this.getNamespaceWithMeta() ); this.registry.put( thisCopy ); this.registry.getMasterTrieTree().setGuidLineage( thisCopy.getGuid(), destinationGuid ); } this.copyChildrenTo( thisCopy.getGuid() ); } @Override public void copyChildrenTo( GUID destinationGuid ) { Collection childrenNodes = this.getChildren().values(); for ( RegistryTreeNode node : childrenNodes ) { node.copyTo( destinationGuid ); } } @Override public void moveTo( String path ) { this.moveTo( this.registry.affirmNamespace( path ).getGuid() ); } @Override public void moveTo( GUID destinationGuid ) { this.registry.getMasterTrieTree().moveTo( this.guid, destinationGuid ); } @Override public void copyNamespaceMetaTo( GUID destinationGuid ) { } @Override public String toJSONString() { return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "name" , this.getName() ), new KeyValue<>( "guid" , this.getGuid() ), new KeyValue<>( "createTime" , this.getCreateTime() ), new KeyValue<>( "updateTime" , this.getUpdateTime() ), new KeyValue<>( "childrenSize", this.childrenGuids.size() ), } ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/GenericNamespaceMeta.java ================================================ package com.pinecone.hydra.registry.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; public class GenericNamespaceMeta implements NamespaceMeta { private long enumId; private GUID guid; public GenericNamespaceMeta() { } public GenericNamespaceMeta(long enumId, GUID guid ) { this.enumId = enumId; this.guid = guid; } @Override public long getEnumId() { return this.enumId; } @Override public void setEnumId(long enumId) { this.enumId = enumId; } @Override public GUID getGuid() { return this.guid; } @Override public void setGuid(GUID guid) { this.guid = guid; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/GenericProperties.java ================================================ package com.pinecone.hydra.registry.entity; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.unit.UniScopeMap; import com.pinecone.framework.unit.UniScopeMaptron; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.KOMRegistry; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import java.io.IOException; import java.io.StringWriter; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; public class GenericProperties extends ArchConfigNode implements Properties { protected Properties parent; protected UniScopeMap properties = new UniScopeMaptron<>(); public GenericProperties() { } public GenericProperties( KOMRegistry registry ) { super( registry ); } @Override public Properties getAffinityParent() { return this.parent; } @Override public void setAffinityParent( Properties parent ) { this.parent = parent; } public Properties getOwner( String szKey ) { Properties owned = this; while ( owned != null ) { if( owned.hasOwnProperty( szKey ) ) { break; } owned = owned.getAffinityParent(); } return owned; } @Override public void put( String key, Object val ) { Property p = new GenericProperty( this ); p.setKey( key ); p.setValue( val ); this.putProperty( p ); } @Override public void puts( Map map ) { for( Map.Entry kv : map.entrySet() ) { this.put( kv.getKey(), kv.getValue() ); } } @Override public void putProperty( Property property ) { String szKey = property.getKey(); Properties owned = this.getOwner( szKey ); property.setCreateTime( LocalDateTime.now() ); property.setUpdateTime( LocalDateTime.now() ); if( owned == null ) { // Insert to current scope. property.setGuid( this.guid ); this.properties.put( property.getKey(), property ); this.registry.putProperty( property, this.guid ); } else { owned.updateFromDummy( property ); } } @Override public void remove( String key ) { Properties owner = this.getOwner( key ); if( owner != null ) { this.properties.remove( key ); this.registry.removeProperty( owner.getGuid(), key ); } } @Override public void update( Property property ) { if( property.getGuid().equals( this.guid ) ) { Property p = this.get( property.getKey() ); // If p == property, which is owned element, no needs to copy. if( p != null && p != property ) { p.from( property ); property = p; } } this.registry.updateProperty( property ); } @Override public void updateFromDummy( Property dummy ) { Property p = this.get( dummy.getKey() ); // If p == property, which is owned element, no needs to copy. if( p != null ) { p.from( dummy ); } this.registry.updateProperty( p ); } @Override public void set( String key, Object val ) { Property p = this.get( key ); if( p != null ) { p.setValue( val ); } this.registry.updateProperty( p ); } @Override public Property get( String key ) { return this.properties.get( key ); } @Override public Object getValue( String key ) { Property property = this.get( key ); if( property != null ) { return property.getValue(); } return null; } @Override public boolean containsKey( String key ) { return this.properties.containsKey( key ); } @Override public boolean containsKey( Object key ) { return this.properties.containsKey( key ); } @Override public boolean hasOwnProperty( Object key ) { return this.properties.hasOwnProperty( key ); } @Override public int size() { return this.properties.size(); } @Override public boolean isEmpty() { return !this.properties.isEmpty(); } @Override public Collection values() { ArrayList values = new ArrayList<>(); for( Property p : this.properties.values() ){ values.add(p.getValue()); } return values; } @Override public Set keySet() { HashSet keys = new HashSet<>(); for ( Property p : this.properties.values() ){ keys.add( p.getKey() ); } return keys; } @Override public Set entrySet() { HashSet propertyHashSet = new HashSet<>(); for( Property p : this.properties.values() ){ propertyHashSet.add(p); } return propertyHashSet; } @Override public void copyValueTo( GUID destinationGuid ) { if ( destinationGuid != null ){ this.registry.copyPropertiesTo( this.guid, destinationGuid ); } } @Override public Collection getProperties() { return this.properties.values(); } @Override public Map toMap() { Map jo = new LinkedHashMap<>(); LinkedHashMap overridden = new LinkedHashMap<>(); this.properties.overrideTo( overridden ); for( Property property : overridden.values() ) { jo.put( property.getKey(), property.getValue() ); } return jo; } @Override public UniScopeMap getPropertiesMap() { return this.properties; } @Override public void setProperties( List properties ) { this.properties = new UniScopeMaptron<>(); for( Property p : properties ) { this.properties.put( p.getKey(), p ); } } @Override public void setProperties( UniScopeMap properties ) { this.properties = properties; } @Override public void setThisProperties( Map properties ) { this.properties.setThisScope( properties ); } @Override public void setParentProperties( UniScopeMap parent ) { this.properties.setParent( parent ); } @Override public KOMRegistry parentRegistry() { return this.registry; } @Override public void copyTo( String path ) { this.copyTo( this.registry.affirmProperties( path ).getGuid() ); } @Override public void copyTo( GUID destinationGuid ) { Properties thisCopy = null; RegistryTreeNode tn = this.registry.get( destinationGuid ); if( tn.evinceProperties() == null ) { List destChildren = this.registry.getChildren( destinationGuid ); for( TreeNode node : destChildren ) { if( this.getName().equals( node.getName() ) ) { if( node instanceof Properties ) { thisCopy = (Properties) node; break; } else { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be properties.", this.getName() ) ); } } } } else { thisCopy = (Properties) tn; } // Child-Destination non-exist. if( thisCopy == null ) { thisCopy = new GenericProperties( this.registry ); this.putNewCopy( thisCopy, destinationGuid ); } this.copyMetaTo( thisCopy.getGuid() ); this.copyValueTo( thisCopy.getGuid() ); } @Override public String toJSONString() { try{ PropertyJSONEncoder encoder = new PropertyJSONEncoder(); try( StringWriter writer = new StringWriter() ){ encoder.write( this, writer ); return writer.toString(); } } catch ( IOException e ) { throw new ProxyProvokeHandleException( e ); } } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/GenericProperty.java ================================================ package com.pinecone.hydra.registry.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import java.time.LocalDateTime; public class GenericProperty implements Property { private long enumId; private GUID guid; private String key; private String type; private LocalDateTime createTime; private LocalDateTime updateTime; private Object rawValue; //TODO private Object value; protected Properties properties; public GenericProperty() { } public GenericProperty( Properties properties ) { this.properties = properties; } public GenericProperty( Properties properties, long enumId, GUID guid, String key, String type, LocalDateTime createTime, LocalDateTime updateTime, String value ) { this( properties ); this.enumId = enumId; this.guid = guid; this.key = key; this.type = type; this.createTime = createTime; this.updateTime = updateTime; this.rawValue = value; } @Override public long getEnumId() { return this.enumId; } @Override public void setEnumId(long enumId) { this.enumId = enumId; } @Override public GUID getGuid() { return this.guid; } @Override public void setGuid(GUID guid) { this.guid = guid; } @Override public String getKey() { return this.key; } @Override public void setKey(String key) { this.key = key; } @Override public String getType() { return this.type; } @Override public void setType(String type) { this.type = type; } @Override public LocalDateTime getCreateTime() { return this.createTime; } @Override public void setCreateTime(LocalDateTime createTime) { this.createTime = createTime; } @Override public LocalDateTime getUpdateTime() { return this.updateTime; } @Override public void setUpdateTime( LocalDateTime updateTime ) { this.updateTime = updateTime; } @Override public Object getRawValue() { return this.rawValue; } protected String queryType( Object val ) { return this.parentProperties().parentRegistry().getPropertyTypeConverter().queryType( val ); } protected Object converterValue( String val, String type ) { return this.parentProperties().parentRegistry().getPropertyTypeConverter().converter( val, type ); } @Override public void setRawValue( Object rawValue ) { this.rawValue = rawValue; this.value = this.converterValue( this.rawValue.toString(), this.type ); } @Override public Object getValue() { return this.value; } @Override public void setValue( Object value ) { this.rawValue = value.toString(); this.type = this.queryType( value ); this.value = this.converterValue( this.rawValue.toString(), this.type ); } @Override public boolean isStringBasedType() { return PropertyTypes.isStringBasedType( this.type ); } @Override public void fromValue( Property that ) { this.key = that.getKey(); this.type = that.getType(); this.rawValue = that.getValue(); } @Override public void from( Property that ) { this.fromValue( that ); this.createTime = that.getCreateTime(); this.updateTime = that.getUpdateTime(); } @Override public void copy( Property that ) { this.setEnumId( that.getEnumId() ); this.setGuid( that.getGuid() ); this.from( that ); } @Override public Properties parentProperties() { return this.properties; } @Override public void setParentProperties( Properties parentProperties ) { this.properties = parentProperties; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/GenericTextFile.java ================================================ package com.pinecone.hydra.registry.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSON; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.registry.KOMRegistry; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import java.util.List; public class GenericTextFile extends ArchConfigNode implements TextFile { protected TextValue mTextValue; public GenericTextFile() { } public GenericTextFile( KOMRegistry registry ) { this.registry = registry; } @Override public void setTextValue( TextValue textValue ) { this.mTextValue = textValue; } @Override public void put( TextValue textValue ) { if( this.mTextValue == null ) { this.registry.putTextValue( textValue.getGuid(), textValue.getValue(), textValue.getType() ); } else { this.update( textValue ); this.mTextValue = textValue; } } @Override public Object decode() { TextValue value = this.get(); return this.registry.getTextValueTypeConverter().converter( value.getValue(), value.getType() ); } @Override public Object toJSON() { TextValue value = this.get(); String type = this.registry.getTextValueTypeConverter().queryRecognizedType( value.getType() ); Object ret = this.decode(); if( type == null || !this.registry.getTextValueTypeConverter().isJSON( ret ) ) { JSONObject reparse = new JSONMaptron(); reparse.put( "type", value.getType() ); reparse.put( "value", value.getValue() ); return reparse; } return ret; } @Override public void remove( GUID guid ) { this.registry.removeTextValue(guid); } @Override public void update( TextValue textValue ) { this.registry.updateTextValue( textValue, this.guid ); } @Override public void update( String text, String format ) { TextValue textValue = GenericTextValue.newUpdateTextValue( this.guid, text, format ); this.update( textValue ); } @Override public void put( String text, String format ) { if( this.mTextValue == null ) { this.registry.putTextValue( this.guid, text, format ); } else { this.update( text, format ); } } @Override public TextValue get() { return this.mTextValue; } public KOMRegistry parentRegistry() { return this.registry; } public void setRegistry(KOMRegistry registry) { this.registry = registry; } @Override public void copyTo( String path ) { this.copyTo( this.registry.affirmTextConfig( path ).getGuid() ); } @Override public void copyTo( GUID destinationGuid ) { TextFile thisCopy = null; RegistryTreeNode tn = this.registry.get( destinationGuid ); if( tn.evinceTextFile() == null ) { List destChildren = this.registry.getChildren( destinationGuid ); for( TreeNode node : destChildren ) { if( this.getName().equals( node.getName() ) ) { if( node instanceof TextFile) { thisCopy = (TextFile) node; break; } else { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be text config.", this.getName() ) ); } } } } else { thisCopy = (TextFile) tn; } // Child-Destination non-exist. if( thisCopy == null ) { thisCopy = new GenericTextFile( this.registry ); this.putNewCopy( thisCopy, destinationGuid ); } this.copyMetaTo( thisCopy.getGuid() ); this.copyValueTo( thisCopy.getGuid() ); } @Override public void copyValueTo( GUID destinationGuid ) { if ( destinationGuid != null ){ this.registry.copyTextValueTo( this.guid, destinationGuid ); } } @Override public String toJSONString() { return JSON.stringify( this.toJSON() ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/GenericTextValue.java ================================================ package com.pinecone.hydra.registry.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import java.time.LocalDateTime; public class GenericTextValue implements TextValue { private long enumId; private GUID guid; private String value; private String type; private LocalDateTime createTime; private LocalDateTime updateTime; public GenericTextValue() { } public GenericTextValue( GUID guid, String value, String type ) { this.setGuid( guid ); this.setValue( value ); this.setType( type ); this.setCreateTime( LocalDateTime.now() ); this.setUpdateTime( LocalDateTime.now() ); } public GenericTextValue( long enumId, GUID guid, String value, String type, LocalDateTime createTime, LocalDateTime updateTime ) { this.enumId = enumId; this.guid = guid; this.value = value; this.type = type; this.createTime = createTime; this.updateTime = updateTime; } /** * 获取 * @return enumId */ @Override public long getEnumId() { return this.enumId; } /** * 设置 * @param enumId */ @Override public void setEnumId(long enumId) { this.enumId = enumId; } /** * 获取 * @return guid */ @Override public GUID getGuid() { return this.guid; } /** * 设置 * @param guid */ @Override public void setGuid(GUID guid) { this.guid = guid; } /** * 获取 * @return value */ @Override public String getValue() { return this.value; } /** * 设置 * @param value */ @Override public void setValue(String value) { this.value = value; } /** * 获取 * @return type */ @Override public String getType() { return this.type; } /** * 设置 * @param type */ @Override public void setType(String type) { this.type = type; } /** * 获取 * @return createTime */ @Override public LocalDateTime getCreateTime() { return this.createTime; } /** * 设置 * @param createTime */ @Override public void setCreateTime(LocalDateTime createTime) { this.createTime = createTime; } /** * 获取 * @return updateTime */ @Override public LocalDateTime getUpdateTime() { return updateTime; } /** * 设置 * @param updateTime */ @Override public void setUpdateTime(LocalDateTime updateTime) { this.updateTime = updateTime; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } public static TextValue newUpdateTextValue( GUID guid, String text, String format ) { TextValue textValue = new GenericTextValue(); textValue.setGuid( guid ); textValue.setUpdateTime(LocalDateTime.now()); textValue.setValue(text); textValue.setType(format); return textValue; } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/Namespace.java ================================================ package com.pinecone.hydra.registry.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.registry.KOMRegistry; import java.time.LocalDateTime; import java.util.List; import java.util.Map; import java.util.Set; public interface Namespace extends ElementNode { long getEnumId(); void setEnumId( long enumId ); GUID getGuid(); void setGuid( GUID guid ); String getName(); void setName( String name ); LocalDateTime getCreateTime(); @Override default Namespace evinceNamespace() { return this; } void setCreateTime( LocalDateTime createTime ); LocalDateTime getUpdateTime(); void setUpdateTime( LocalDateTime updateTime ); NamespaceMeta getNamespaceWithMeta(); void setNamespaceMeta( NamespaceMeta namespaceMeta ); Attributes getAttributes(); void setAttributes(Attributes attributes); Map getChildren(); List fetchChildrenGuids(); void setChildrenGuids( List contentGuids, int depth ); List listItem(); void put ( RegistryTreeNode child ); void remove ( String key ); KOMRegistry parentRegistry(); boolean containsKey ( String key ); JSONObject toJSONObject(); ConfigNode getConfigNode(String key ); Namespace getNamespace( String key ); int size(); boolean isEmpty(); Set keySet(); Set> entrySet(); void copyTo( String path ) ; /** * Copy itself and its owned elements into destination. * 复制自己和自己的孩子元素到目的地址. * @param destinationGuid Guid of destination. */ void copyTo( GUID destinationGuid ); /** * Only copy its owned elements into destination. * 仅复制自己的孩子元素到目的地址. * @param destinationGuid Guid of destination. */ void copyChildrenTo( GUID destinationGuid ); void copyNamespaceMetaTo( GUID destinationGuid ); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/NamespaceMeta.java ================================================ package com.pinecone.hydra.registry.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface NamespaceMeta extends Pinenut { long getEnumId(); void setEnumId(long id); GUID getGuid(); void setGuid(GUID guid); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/Properties.java ================================================ package com.pinecone.hydra.registry.entity; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import com.pinecone.framework.system.prototype.PineUnit; import com.pinecone.framework.unit.UniScopeMap; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; public interface Properties extends ConfigNode, PineUnit { Properties getAffinityParent(); void setAffinityParent( Properties parent ); Collection getProperties(); Map toMap(); default JSONObject toJSONObject() { return new JSONMaptron( this.toMap(), true ); } UniScopeMap getPropertiesMap(); void setProperties ( List properties ); void setProperties ( UniScopeMap properties ); void setThisProperties ( Map properties ); void setParentProperties ( UniScopeMap parent ); Properties getOwner ( String szKey ); void put ( String key, Object val ); void puts ( Map map ); void putProperty ( Property property ); void remove ( String key ); void update ( Property property ); void updateFromDummy ( Property dummy ); void set ( String key, Object val ); Property get ( String key ); Object getValue ( String key ); boolean containsKey ( String key ); boolean containsKey ( Object key ); boolean hasOwnProperty ( Object key ); int size(); boolean isEmpty(); Collection values(); Set keySet(); Set entrySet(); @Override default Properties evinceProperties() { return this; } void copyValueTo( GUID destinationGuid ); void copyTo ( GUID destinationGuid ); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/Property.java ================================================ package com.pinecone.hydra.registry.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public interface Property extends Pinenut { static Property newDummy() { return new GenericProperty(); } long getEnumId(); void setEnumId( long enumId ); GUID getGuid(); void setGuid( GUID guid ); String getKey(); void setKey( String key ); String getType(); void setType( String type ); Object getRawValue(); void setRawValue( Object value ); Object getValue(); void setValue( Object value ); boolean isStringBasedType() ; LocalDateTime getCreateTime(); void setCreateTime( LocalDateTime createTime ); LocalDateTime getUpdateTime(); void setUpdateTime( LocalDateTime updateTime ); // Not copy guid void fromValue ( Property that ); void from ( Property that ); void copy ( Property that ); Properties parentProperties(); void setParentProperties( Properties parentProperties ); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/PropertyJSONEncoder.java ================================================ package com.pinecone.hydra.registry.entity; import java.io.IOException; import java.io.Writer; import java.util.Iterator; import java.util.Map; import com.pinecone.framework.unit.UniScopeMap; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.json.GenericJSONEncoder; public class PropertyJSONEncoder extends GenericJSONEncoder { protected boolean mbSimpleEncode; public PropertyJSONEncoder( boolean bSimpleEncode ) { this.mbSimpleEncode = bSimpleEncode; } public PropertyJSONEncoder() { this( true ); } @Override public Writer write ( Object that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { if( that instanceof Properties ) { this.write( (Properties) that, writer, nIndentFactor, nIndentBlankNum ); return writer; } return super.write( that, writer, nIndentFactor, nIndentBlankNum ); } public Writer write( Properties that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { writer.write("{"); UniScopeMap propertyMap = that.getPropertiesMap(); int nNewIndent = nIndentBlankNum + nIndentFactor; boolean bHasNextElement = false; if ( that.getAffinityParent() != null ) { GenericJSONEncoder.beforeJsonElementWrote( writer, nIndentFactor, nNewIndent, false ); writer.write("\"__parent__\": "); this.write( that.getAffinityParent(), writer, nIndentFactor, nNewIndent ); bHasNextElement = true; } Iterator > iter = propertyMap.entrySet().iterator(); for( ; iter.hasNext(); bHasNextElement = true ) { GenericJSONEncoder.beforeJsonElementWrote( writer, nIndentFactor, nNewIndent, bHasNextElement ); Map.Entry kv = iter.next(); writer.write( StringUtils.jsonQuote( kv.getKey() ) ); writer.write(':'); if ( nIndentFactor > 0 ) { writer.write( ' '); } if( this.mbSimpleEncode ) { this.write( kv.getValue().getValue(), writer, nIndentFactor, nIndentBlankNum ); } else { this.write( kv.getValue(), writer, nIndentFactor, nIndentBlankNum ); } } if ( nIndentFactor > 0 ) { writer.write( '\n' ); } GenericJSONEncoder.indentBlank( writer, nIndentBlankNum ); writer.write("}"); return writer; } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/PropertyTypes.java ================================================ package com.pinecone.hydra.registry.entity; import java.util.Date; import com.pinecone.framework.util.json.JSONArray; import com.pinecone.framework.util.json.JSONArraytron; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; public final class PropertyTypes { public final static String ELEMENT_STRING_TYPE_NAME = "String"; public final static String ELEMENT_INT64_TYPE_NAME = "int64"; public final static String ELEMENT_INT32_TYPE_NAME = "int32"; public final static String ELEMENT_FLOAT32_TYPE_NAME = "float32"; public final static String ELEMENT_FLOAT64_TYPE_NAME = "float64"; public final static String ELEMENT_BOOLEAN_TYPE_NAME = "bool"; public final static String ELEMENT_NULL_TYPE_NAME = "Null"; public final static String ELEMENT_JSONOBJECT_TYPE_NAME = "JSONObject"; public final static String ELEMENT_JSONARRAY_TYPE_NAME = "JSONArray"; public static String queryType( Object val ) { String type = PropertyTypes.ELEMENT_STRING_TYPE_NAME; if( val == null ) { type = PropertyTypes.ELEMENT_NULL_TYPE_NAME; } else if( val instanceof JSONObject ) { type = PropertyTypes.ELEMENT_JSONOBJECT_TYPE_NAME; } else if( val instanceof JSONArray ) { type = PropertyTypes.ELEMENT_JSONARRAY_TYPE_NAME; } else if( val instanceof Byte || val instanceof Short || val instanceof Integer ) { type = PropertyTypes.ELEMENT_INT32_TYPE_NAME; } else if( val instanceof Long ) { type = PropertyTypes.ELEMENT_INT64_TYPE_NAME; } else if( val instanceof Float ) { type = PropertyTypes.ELEMENT_FLOAT32_TYPE_NAME; } else if( val instanceof Double ) { type = PropertyTypes.ELEMENT_FLOAT64_TYPE_NAME; } else if( val instanceof Boolean ) { type = PropertyTypes.ELEMENT_BOOLEAN_TYPE_NAME; } return type; } public static Object queryValue( String val, String type ) { switch ( type ) { case PropertyTypes.ELEMENT_NULL_TYPE_NAME: { return null; } case PropertyTypes.ELEMENT_STRING_TYPE_NAME :{ return val; } case PropertyTypes.ELEMENT_JSONOBJECT_TYPE_NAME :{ return new JSONMaptron( val ); } case PropertyTypes.ELEMENT_JSONARRAY_TYPE_NAME :{ return new JSONArraytron( val ); } case PropertyTypes.ELEMENT_INT32_TYPE_NAME :{ return Integer.parseInt( val ); } case PropertyTypes.ELEMENT_INT64_TYPE_NAME :{ return Long.parseLong( val ); } case PropertyTypes.ELEMENT_FLOAT32_TYPE_NAME :{ return Float.parseFloat( val ); } case PropertyTypes.ELEMENT_FLOAT64_TYPE_NAME :{ return Double.parseDouble( val ); } case PropertyTypes.ELEMENT_BOOLEAN_TYPE_NAME :{ return Boolean.parseBoolean( val ); } } return null; } public static String queryRecognizedType( String type ) { switch ( type ) { case PropertyTypes.ELEMENT_NULL_TYPE_NAME: { return PropertyTypes.ELEMENT_NULL_TYPE_NAME; } case PropertyTypes.ELEMENT_STRING_TYPE_NAME :{ return PropertyTypes.ELEMENT_STRING_TYPE_NAME; } case PropertyTypes.ELEMENT_JSONOBJECT_TYPE_NAME :{ return PropertyTypes.ELEMENT_JSONOBJECT_TYPE_NAME; } case PropertyTypes.ELEMENT_JSONARRAY_TYPE_NAME :{ return PropertyTypes.ELEMENT_JSONARRAY_TYPE_NAME; } case PropertyTypes.ELEMENT_INT32_TYPE_NAME :{ return PropertyTypes.ELEMENT_INT32_TYPE_NAME; } case PropertyTypes.ELEMENT_INT64_TYPE_NAME :{ return PropertyTypes.ELEMENT_INT64_TYPE_NAME; } case PropertyTypes.ELEMENT_FLOAT32_TYPE_NAME :{ return PropertyTypes.ELEMENT_FLOAT32_TYPE_NAME; } case PropertyTypes.ELEMENT_FLOAT64_TYPE_NAME :{ return PropertyTypes.ELEMENT_FLOAT64_TYPE_NAME; } case PropertyTypes.ELEMENT_BOOLEAN_TYPE_NAME :{ return PropertyTypes.ELEMENT_BOOLEAN_TYPE_NAME; } } return null; } public static boolean isStringBasedType( String type ) { switch ( type ) { case PropertyTypes.ELEMENT_STRING_TYPE_NAME :{ return true; } case PropertyTypes.ELEMENT_JSONOBJECT_TYPE_NAME :{ return true; } case PropertyTypes.ELEMENT_JSONARRAY_TYPE_NAME :{ return true; } } return false; } public static boolean isJSON( Object val ) { Class type = val.getClass(); return val instanceof JSONObject || val instanceof JSONArray || ( type.isPrimitive() || type == String.class || Number.class.isAssignableFrom(type) || type == Boolean.class || type == Character.class || type == Date.class || type.isEnum() || type == byte[].class ); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/RegistryTreeNode.java ================================================ package com.pinecone.hydra.registry.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public interface RegistryTreeNode extends TreeNode { default ConfigNode evinceConfigNode(){ return null; } default Namespace evinceNamespace(){ return null; } default Properties evinceProperties() { return null; } default TextFile evinceTextFile() { return null; } void copyTo( String path ); void copyTo( GUID guid ); void moveTo( String path ); void moveTo( GUID destinationGuid ); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/TextFile.java ================================================ package com.pinecone.hydra.registry.entity; import com.pinecone.framework.util.id.GUID; public interface TextFile extends ConfigNode { void setTextValue( TextValue textValue ); void put ( TextValue textValue ); void remove ( GUID guid ); void update ( TextValue textValue ); void update ( String text, String format ) ; void put ( String text, String format ) ; TextValue get (); Object decode(); Object toJSON(); void copyValueTo( GUID destinationGuid ); void copyTo ( GUID destinationGuid ); @Override default TextFile evinceTextFile() { return this; } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/TextValue.java ================================================ package com.pinecone.hydra.registry.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public interface TextValue extends Pinenut { long getEnumId(); void setEnumId( long enumId ); GUID getGuid(); void setGuid( GUID guid ); String getValue(); void setValue( String value ); LocalDateTime getCreateTime(); void setCreateTime( LocalDateTime createTime ); LocalDateTime getUpdateTime(); void setUpdateTime( LocalDateTime updateTime ); String getType(); void setType( String type ); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/TextValueTypes.java ================================================ package com.pinecone.hydra.registry.entity; public final class TextValueTypes { public final static String STRING_TYPE_NAME = PropertyTypes.ELEMENT_STRING_TYPE_NAME; public final static String INT64_TYPE_NAME = PropertyTypes.ELEMENT_INT64_TYPE_NAME; public final static String INT32_TYPE_NAME = PropertyTypes.ELEMENT_INT32_TYPE_NAME; public final static String FLOAT32_TYPE_NAME = PropertyTypes.ELEMENT_FLOAT32_TYPE_NAME; public final static String FLOAT64_TYPE_NAME = PropertyTypes.ELEMENT_FLOAT64_TYPE_NAME; public final static String BOOLEAN_TYPE_NAME = PropertyTypes.ELEMENT_BOOLEAN_TYPE_NAME; public final static String NULL_TYPE_NAME = PropertyTypes.ELEMENT_NULL_TYPE_NAME; public final static String JSONOBJECT_TYPE_NAME = PropertyTypes.ELEMENT_JSONOBJECT_TYPE_NAME; public final static String JSONARRAY_TYPE_NAME = PropertyTypes.ELEMENT_JSONARRAY_TYPE_NAME; public final static String YAML_TYPE_NAME = "Yaml"; public final static String XML_TYPE_NAME = "XML"; public final static String INI_TYPE_NAME = "INI"; public static String queryType( Object val ) { return PropertyTypes.queryType( val ); } public static Object queryValue( String val, String type ) { if( val == null ) { return null; } Object ret = PropertyTypes.queryValue( val, type ); if( ret != null ) { return ret; } return val; } public static String queryRecognizedType( String type ) { String ret = PropertyTypes.queryRecognizedType( type ); if( ret == null ) { switch ( type ) { case TextValueTypes.YAML_TYPE_NAME: { return TextValueTypes.YAML_TYPE_NAME; } case TextValueTypes.XML_TYPE_NAME: { return TextValueTypes.XML_TYPE_NAME; } case TextValueTypes.INI_TYPE_NAME: { return TextValueTypes.INI_TYPE_NAME; } } } else { return ret; } return null; } public static boolean isJSON( Object val ) { return PropertyTypes.isJSON( val ); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/TypeConverter.java ================================================ package com.pinecone.hydra.registry.entity; import com.pinecone.framework.system.prototype.Pinenut; public interface TypeConverter extends Pinenut { Object converter( String val, String type ); String queryType( Object val ); String queryRecognizedType( String type ); boolean isJSON( Object val ); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/marshaling/AnnotatedRegObjectInjector.java ================================================ package com.pinecone.hydra.registry.marshaling; public class AnnotatedRegObjectInjector { } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/marshaling/RegistryDOMEncoder.java ================================================ package com.pinecone.hydra.registry.marshaling; import com.pinecone.hydra.registry.KOMRegistry; import com.pinecone.hydra.registry.entity.Attributes; import com.pinecone.hydra.registry.entity.ElementNode; import com.pinecone.hydra.registry.entity.Namespace; import com.pinecone.hydra.registry.entity.Properties; import com.pinecone.hydra.registry.entity.RegistryTreeNode; import com.pinecone.hydra.registry.entity.TextFile; import org.jsoup.nodes.Element; public class RegistryDOMEncoder implements RegistryEncoder { protected KOMRegistry registry; public RegistryDOMEncoder( KOMRegistry registry ) { this.registry = registry; } @Override public Object encode( ElementNode node ) { if ( node.evinceNamespace() != null ) { return this.encodeNS(node.evinceNamespace() ); } else if ( node.evinceProperties() != null ) { return this.encodeProperties(node.evinceProperties() ); } else if ( node.evinceTextFile() != null ) { return this.encodeTextFile(node.evinceTextFile()); } return null; } protected Element encodeNS( Namespace ns ) { Element element = new Element(ns.getName()); Attributes attributes = ns.getAttributes(); setDOMAttributes(element, attributes); for ( RegistryTreeNode child : ns.getChildren().values() ) { Object encodedChild = this.encode((ElementNode)child); if ( encodedChild instanceof Element ) { element.appendChild((Element) encodedChild); } } return element; } protected Element encodeProperties( Properties properties ) { Element element = new Element( properties.getName() ); Attributes attributes = properties.getAttributes(); setDOMAttributes(element, attributes); for ( String key : properties.keySet() ) { Element propertyElement = new Element(key); propertyElement.text( properties.get(key).getValue().toString() ); element.appendChild( propertyElement ); } return element; } protected Element encodeTextFile( TextFile textFile ) { Element element = new Element( textFile.getName() ); Attributes attributes = textFile.getAttributes(); setDOMAttributes( element, attributes ); element.append( textFile.get().getValue() ); return element; } private void setDOMAttributes( Element element, Attributes attributes ) { for ( String key : attributes.keySet() ) { element.attr(key, attributes.get(key)); } } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/marshaling/RegistryDecoder.java ================================================ package com.pinecone.hydra.registry.marshaling; import java.util.List; import java.util.Map; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.entity.ElementNode; public interface RegistryDecoder extends Pinenut { default ElementNode decode( Object val, GUID parentGUID ) { if ( val instanceof Map ) { Map map = (Map) val; if( map.isEmpty() ) { return null; } else if( map.size() > 1 ) { throw new IllegalArgumentException( "Root element should be 1" ); } Map.Entry kv = (Map.Entry) map.entrySet().iterator().next(); return this.decode( kv.getKey().toString(), kv.getValue(), parentGUID ); } else if ( val instanceof List ) { List list = (List) val; if( list.isEmpty() ) { return null; } else if( list.size() > 1 ) { throw new IllegalArgumentException( "Root element should be 1" ); } return this.decode( Integer.toString( 0 ), list.get( 0 ), parentGUID ); } return null; } ElementNode decode( String key, Object val, GUID parentGUID ); default ElementNode decode( Map.Entry kv, GUID parentGUID ) { return this.decode( kv.getKey().toString(), kv.getValue(), parentGUID ); } default ElementNode decode( Object val ) { return this.decode( val, null ); } default ElementNode decode( String key, Object val ) { return this.decode( key, val, null ); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/marshaling/RegistryEncoder.java ================================================ package com.pinecone.hydra.registry.marshaling; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.registry.entity.ElementNode; public interface RegistryEncoder extends Pinenut { Object encode( ElementNode node ); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/marshaling/RegistryJQuery.java ================================================ package com.pinecone.hydra.registry.marshaling; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Target({ElementType.FIELD}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface RegistryJQuery { String value() default ""; } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/marshaling/RegistryJSONDecoder.java ================================================ package com.pinecone.hydra.registry.marshaling; import java.util.Collection; import java.util.List; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.KOMRegistry; import com.pinecone.hydra.registry.entity.ElementNode; import com.pinecone.hydra.registry.entity.GenericNamespace; import com.pinecone.hydra.registry.entity.GenericProperties; import com.pinecone.hydra.registry.entity.GenericTextFile; import com.pinecone.hydra.registry.entity.GenericTextValue; import com.pinecone.hydra.registry.entity.Namespace; import com.pinecone.hydra.registry.entity.Properties; import com.pinecone.hydra.registry.entity.RegistryTreeNode; import com.pinecone.hydra.registry.entity.TextFile; import com.pinecone.hydra.registry.entity.TextValueTypes; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public class RegistryJSONDecoder implements RegistryDecoder { protected KOMRegistry registry; public RegistryJSONDecoder( KOMRegistry registry ) { this.registry = registry; } protected boolean isPropertiesFormat( Map jo ) { boolean b = false; for( Object o : jo.entrySet() ) { Map.Entry kv = (Map.Entry) o; if( kv.getValue() instanceof Map ) { b = true; } if( kv.getValue() instanceof List ) { b = true; } } return b; } protected boolean isPropertiesFormat( List jo ) { boolean b = false; for( Object o : jo ) { if( o instanceof Map ) { b = true; } if( o instanceof List ) { b = true; } } return b; } @Override public ElementNode decode( String szName, Object o, GUID parentGuid ) { if ( o instanceof Map ) { return (ElementNode) this.registry.get( this.decodeJSONObject( szName, (Map) o, parentGuid ).getGuid() ); } else if ( o instanceof List ) { return (ElementNode) this.registry.get( this.decodeJSONArray(szName, (List) o, parentGuid).getGuid() ); } // Handling text file as a leaf node TextFile file = new GenericTextFile(this.registry); file.setName( szName ); this.registry.put( file ); file.put( new GenericTextValue( file.getGuid(), o.toString(), TextValueTypes.queryType(o) ) ); this.registry.affirmOwnedNode( parentGuid, file.getGuid() ); return file; } protected Namespace newNamespace( String szName ) { Namespace ns = new GenericNamespace( this.registry ); ns.setName( szName ); return ns; } protected Object[] affirmNSExisted( String szName, GUID parentGuid ) { Namespace ns = null; if( parentGuid == null ) { ElementNode rootE = this.registry.queryElement( szName ); if( rootE != null ) { if( rootE.evinceNamespace() == null ) { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be namespace.", szName ) ); } ns = rootE.evinceNamespace(); } } else { ElementNode parentNode = (ElementNode)this.registry.get( parentGuid ); if( parentNode instanceof Namespace ) { Collection destChildren = parentNode.evinceNamespace().getChildren().values(); for( TreeNode node : destChildren ) { if( szName.equals( node.getName() ) ) { if( node instanceof Namespace ) { ns = (Namespace) node; break; } else { throw new IllegalArgumentException( String.format( " Existed child-destination [%s] should be namespace.", szName ) ); } } } } } GUID currentGuid; if( ns == null ) { ns = this.newNamespace( szName ); currentGuid = this.registry.put( ns ); this.registry.affirmOwnedNode( parentGuid, currentGuid ); } else { currentGuid = ns.getGuid(); } return new Object[] { ns, currentGuid }; } protected Object[] affirmPrExisted( String szName, GUID parentGuid ) { Properties pr = null; if( parentGuid == null ) { ElementNode rootE = this.registry.queryElement( szName ); if( rootE != null ) { if( rootE.evinceProperties() == null ) { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be properties.", szName ) ); } pr = rootE.evinceProperties(); } } else { ElementNode parentNode = (ElementNode)this.registry.get( parentGuid ); if( parentNode instanceof Namespace ) { Collection destChildren = parentNode.evinceNamespace().getChildren().values(); for( TreeNode node : destChildren ) { if( szName.equals( node.getName() ) ) { if( node instanceof Properties ) { pr = (Properties) node; break; } else { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be properties.", szName ) ); } } } } } Properties neo ; if( pr == null ) { neo = new GenericProperties( this.registry ); neo.setName( szName ); } else { neo = pr; } return new Object[] { pr, neo }; } protected ElementNode decodeJSONObject( String szName, Map jo, GUID parentGuid ) { boolean isNamespace = this.isPropertiesFormat(jo); ElementNode elementNode; GUID currentGuid; if ( isNamespace ) { Object[] pair = this.affirmNSExisted( szName, parentGuid ); Namespace ns = (Namespace) pair[ 0 ]; currentGuid = (GUID) pair[ 1 ]; for ( Object o : jo.entrySet() ) { Map.Entry kv = (Map.Entry) o; this.decode( kv.getKey().toString(), kv.getValue(), currentGuid ); } elementNode = ns; } else { Object[] pair = this.affirmPrExisted( szName, parentGuid ); Properties prX = (Properties) pair[ 0 ]; Properties pro = (Properties) pair[ 1 ]; for ( Object o : jo.entrySet() ) { Map.Entry kv = (Map.Entry) o; pro.put( kv.getKey().toString(), kv.getValue() ); } if( prX == null ) { currentGuid = this.registry.put( pro ); this.registry.affirmOwnedNode( parentGuid, currentGuid ); } elementNode = pro; } return elementNode; } protected ElementNode decodeJSONArray( String szName, List ja, GUID parentGuid ) { boolean isNamespace = this.isPropertiesFormat(ja); ElementNode elementNode; GUID currentGuid; if ( isNamespace ) { Object[] pair = this.affirmNSExisted( szName, parentGuid ); Namespace ns = (Namespace) pair[ 0 ]; currentGuid = (GUID) pair[ 1 ]; int i = 0; for ( Object o : ja ) { this.decode( Integer.toString(i), o, currentGuid ); ++i; } elementNode = ns; } else { Object[] pair = this.affirmPrExisted( szName, parentGuid ); Properties prX = (Properties) pair[ 0 ]; Properties pro = (Properties) pair[ 1 ]; int i = 0; for ( Object o : ja ) { pro.put( Integer.toString(i), o ); ++i; } if( prX == null ) { currentGuid = this.registry.put( pro ); this.registry.affirmOwnedNode( parentGuid, currentGuid ); } elementNode = pro; } return elementNode; } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/marshaling/RegistryJSONEncoder.java ================================================ package com.pinecone.hydra.registry.marshaling; import com.pinecone.hydra.registry.KOMRegistry; import com.pinecone.hydra.registry.entity.ElementNode; public class RegistryJSONEncoder implements RegistryEncoder { protected KOMRegistry registry; public RegistryJSONEncoder( KOMRegistry registry ) { this.registry = registry; } @Override public Object encode( ElementNode node ) { if( node.evinceNamespace() != null ) { return node.evinceNamespace().toJSONObject(); } else if( node.evinceProperties() != null ) { return node.evinceProperties().toJSONObject(); } else if( node.evinceTextFile() != null ) { return node.evinceTextFile().toJSON(); } return null; } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/marshaling/RegistryQuery.java ================================================ package com.pinecone.hydra.registry.marshaling; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Target({ElementType.FIELD}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface RegistryQuery { String value() default ""; } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/operator/ArchConfigNodeOperator.java ================================================ package com.pinecone.hydra.registry.operator; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.KOMRegistry; import com.pinecone.hydra.registry.entity.ConfigNode; import com.pinecone.hydra.registry.entity.ConfigNodeMeta; import com.pinecone.hydra.registry.entity.ArchConfigNode; import com.pinecone.hydra.registry.entity.Attributes; import com.pinecone.hydra.registry.entity.RegistryTreeNode; import com.pinecone.hydra.registry.source.RegistryMasterManipulator; import com.pinecone.hydra.registry.source.RegistryConfigNodeManipulator; import com.pinecone.hydra.registry.source.RegistryNodeMetaManipulator; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.framework.util.id.GuidAllocator; import java.lang.reflect.Field; import java.time.LocalDateTime; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; public abstract class ArchConfigNodeOperator extends ArchRegistryOperator { protected Map cacheMap = new HashMap<>(); protected RegistryConfigNodeManipulator registryConfigNodeManipulator; protected RegistryNodeMetaManipulator configNodeMetaManipulator; public ArchConfigNodeOperator( RegistryOperatorFactory factory ) { this( factory.getMasterManipulator(), (KOMRegistry) factory.getRegistry() ); this.factory = factory; } public ArchConfigNodeOperator( RegistryMasterManipulator masterManipulator, KOMRegistry registry ) { super( masterManipulator, registry ); this.registryConfigNodeManipulator = this.registryMasterManipulator.getConfigNodeManipulator(); this.configNodeMetaManipulator = this.registryMasterManipulator.getNodeMetaManipulator(); } @Override public GUID insert( TreeNode treeNode ) { ArchConfigNode configNode = (ArchConfigNode) treeNode; ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize( treeNode ); GuidAllocator guidAllocator = this.registry.getGuidAllocator(); GUID guid72 = configNode.getGuid(); GUID configNodeMetaGuid = guidAllocator.nextGUID(); ConfigNodeMeta configNodeMeta = configNode.getConfigNodeMeta(); if ( configNodeMeta != null ){ configNodeMeta.setGuid(configNodeMetaGuid); this.configNodeMetaManipulator.insert(configNodeMeta); } else { configNodeMetaGuid = null; } GUID commonDataGuid = guidAllocator.nextGUID(); Attributes attributes = configNode.getAttributes(); if (attributes != null){ attributes.setGuid(commonDataGuid); this.attributesManipulator.insert(attributes); } else { commonDataGuid = null; } imperialTreeNode.setBaseDataGUID( commonDataGuid ); imperialTreeNode.setNodeMetadataGUID( configNodeMetaGuid ); this.imperialTree.insert(imperialTreeNode); this.registryConfigNodeManipulator.insert( configNode ); return guid72; } @Override public void purge( GUID guid ) { //ConfigNode为叶子节点只需要删除节点信息与引用继承关系 GUIDImperialTrieNode node = this.imperialTree.getNode(guid); this.imperialTree.purge( guid ); this.registryConfigNodeManipulator.remove(guid); this.attributesManipulator.remove(node.getAttributesGUID()); this.configNodeMetaManipulator.remove(node.getNodeMetadataGUID()); this.imperialTree.removeCachePath(guid); } @Override public RegistryTreeNode get( GUID guid ) { ConfigNode rootConfig = this.cacheMap.get( guid ); if ( rootConfig == null ) { rootConfig = this.getConfigNodeWideData( guid ); ConfigNode thisConfig = rootConfig; while ( true ) { GUID affinityGuid = thisConfig.getDataAffinityGuid(); if ( affinityGuid != null ){ ConfigNode parent = this.getConfigNodeWideData( affinityGuid ); this.inherit( thisConfig, parent ); thisConfig = parent; } else { break; } } this.cacheMap.put( guid, rootConfig ); } return rootConfig; } @Override public RegistryTreeNode getAsRootDepth( GUID guid ) { return this.getConfigNodeWideData( guid ); } @Override public void update( TreeNode treeNode ) { ConfigNode configNode = (ConfigNode) treeNode; ConfigNodeMeta configNodeMeta = configNode.getConfigNodeMeta(); Attributes attributes = configNode.getAttributes(); configNode.setUpdateTime(LocalDateTime.now()); if (configNodeMeta != null){ this.configNodeMetaManipulator.update(configNodeMeta); } if (attributes != null){ this.attributesManipulator.update(attributes); } this.registryConfigNodeManipulator.update(configNode); } @Override public void updateName( GUID guid, String name ) { this.registryConfigNodeManipulator.updateName( guid, name ); } protected ConfigNode getConfigNodeWideData( GUID guid ){ GUIDImperialTrieNode node = this.imperialTree.getNode( guid ); ConfigNode cn = this.registryConfigNodeManipulator.getConfigNode( guid ); if( cn instanceof ArchConfigNode ) { ((ArchConfigNode) cn).apply( this.registry ); } ConfigNodeMeta configNodeMeta = this.configNodeMetaManipulator.getConfigNodeMeta( node.getNodeMetadataGUID() ); //Notice: Registry attributes is difference from other tree, -- that is, same as DOM; // So in this case, this field is deprecated. //Attributes attributes = this.attributesManipulator.getAttributes( node.getAttributesGUID(), cn ); Attributes attributes = this.attributesManipulator.getAttributes( guid, cn ); cn.setAttributes ( attributes ); cn.setConfigNodeMeta( configNodeMeta ); return cn; } protected void inherit(ConfigNode self, ConfigNode prototype ){ Class clazz = self.getClass(); Field[] fields = clazz.getDeclaredFields(); for ( Field field : fields ){ field.setAccessible(true); try { Object value1 = field.get( self ); Object value2 = field.get( prototype ); if ( Objects.isNull(value1) || (value1 instanceof List && ((List) value1).isEmpty()) ){ field.set(self,value2); } } catch ( IllegalAccessException e ) { throw new ProxyProvokeHandleException(e); } } } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/operator/ArchRegistryOperator.java ================================================ package com.pinecone.hydra.registry.operator; import java.time.LocalDateTime; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.KOMRegistry; import com.pinecone.hydra.registry.entity.ArchElementNode; import com.pinecone.hydra.registry.source.RegistryAttributesManipulator; import com.pinecone.hydra.registry.source.RegistryMasterManipulator; import com.pinecone.hydra.system.ko.UOIUtils; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.ImperialTree; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.framework.util.id.GuidAllocator; public abstract class ArchRegistryOperator implements RegistryNodeOperator { protected KOMRegistry registry; protected ImperialTree imperialTree; protected RegistryMasterManipulator registryMasterManipulator; protected RegistryAttributesManipulator attributesManipulator; protected RegistryOperatorFactory factory; public ArchRegistryOperator ( RegistryOperatorFactory factory ) { this( factory.getMasterManipulator(),(KOMRegistry) factory.getRegistry() ); this.factory = factory; } public ArchRegistryOperator( RegistryMasterManipulator masterManipulator, KOMRegistry registry ){ this.registryMasterManipulator = masterManipulator; this.imperialTree = registry.getMasterTrieTree(); this.attributesManipulator = this.registryMasterManipulator.getAttributesManipulator(); this.registry = registry; } protected ImperialTreeNode affirmPreinsertionInitialize(TreeNode treeNode ) { ArchElementNode entityNode = (ArchElementNode) treeNode; GUID guid72 = entityNode.getGuid(); // Case 1: Dummy config node. GuidAllocator guidAllocator = this.registry.getGuidAllocator(); if( guid72 == null ) { guid72 = guidAllocator.nextGUID(); entityNode.setGuid( guid72 ); entityNode.setCreateTime( LocalDateTime.now() ); } entityNode.setUpdateTime( LocalDateTime.now() ); ImperialTreeNode imperialTreeNode = new GUIDImperialTrieNode(); imperialTreeNode.setGuid( guid72 ); imperialTreeNode.setType( UOIUtils.createLocalJavaClass( entityNode.getClass().getName() ) ); return imperialTreeNode; } public RegistryOperatorFactory getOperatorFactory() { return this.factory; } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/operator/GenericRegistryOperatorFactory.java ================================================ package com.pinecone.hydra.registry.operator; import com.pinecone.hydra.registry.KOMRegistry; import com.pinecone.hydra.registry.entity.GenericNamespace; import com.pinecone.hydra.registry.entity.GenericProperties; import com.pinecone.hydra.registry.entity.GenericTextFile; import com.pinecone.hydra.registry.source.RegistryMasterManipulator; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; import java.util.HashMap; import java.util.Map; import java.util.TreeMap; public class GenericRegistryOperatorFactory implements RegistryOperatorFactory { protected RegistryMasterManipulator registryMasterManipulator; protected KOMRegistry registry; protected Map registerer = new HashMap<>(); protected Map metaTypeMap = new TreeMap<>(); protected void registerDefaultMetaType( Class genericType ) { this.metaTypeMap.put( genericType.getName(), genericType.getSimpleName().replace("Generic","") ); } protected void registerDefaultMetaTypes() { this.registerDefaultMetaType( GenericNamespace.class ); this.registerDefaultMetaType( GenericProperties.class ); this.registerDefaultMetaType( GenericTextFile.class ); } public GenericRegistryOperatorFactory(KOMRegistry registry, RegistryMasterManipulator registryMasterManipulator ){ this.registry = registry; this.registryMasterManipulator = registryMasterManipulator; this.registerer.put( RegistryOperatorFactory.DefaultNamespaceNodeKey, new NamespaceNodeOperator( this ) ); this.registerer.put(RegistryOperatorFactory.DefaultPropertyConfigNodeKey, new PropertiesOperator(this) ); this.registerer.put(RegistryOperatorFactory.DefaultTextConfigNode, new TextValueNodeOperator(this) ); this.registerDefaultMetaTypes(); } @Override public void register( String typeName, TreeNodeOperator functionalNodeOperation ) { this.registerer.put( typeName, functionalNodeOperation ); } @Override public void registerMetaType( Class clazz, String metaType ){ this.registerMetaType( clazz.getName(), metaType ); } @Override public void registerMetaType( String classFullName, String metaType ){ this.metaTypeMap.put( classFullName, metaType ); } @Override public String getMetaType( String classFullName ) { return this.metaTypeMap.get( classFullName ); } @Override public RegistryNodeOperator getOperator( String typeName ) { //Debug.trace( this.registerer.toString() ); return (RegistryNodeOperator)this.registerer.get( typeName ); } @Override public KOMRegistry getRegistry() { return this.registry; } @Override public RegistryMasterManipulator getMasterManipulator() { return this.registryMasterManipulator; } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/operator/NamespaceNodeOperator.java ================================================ package com.pinecone.hydra.registry.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.registry.KOMRegistry; import com.pinecone.hydra.registry.entity.GenericNamespace; import com.pinecone.hydra.registry.entity.Namespace; import com.pinecone.hydra.registry.entity.NamespaceMeta; import com.pinecone.hydra.registry.entity.Attributes; import com.pinecone.hydra.registry.entity.RegistryTreeNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.registry.source.RegistryMasterManipulator; import com.pinecone.hydra.registry.source.RegistryNSNodeManipulator; import com.pinecone.hydra.registry.source.RegistryNSNodeMetaManipulator; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.framework.util.id.GuidAllocator; import java.util.ArrayList; import java.util.List; public class NamespaceNodeOperator extends ArchRegistryOperator { private RegistryNSNodeManipulator namespaceNodeManipulator; private RegistryNSNodeMetaManipulator namespaceNodeMetaManipulator; public NamespaceNodeOperator ( RegistryOperatorFactory factory ) { this( factory.getMasterManipulator(),(KOMRegistry) factory.getRegistry() ); this.factory = factory; } public NamespaceNodeOperator( RegistryMasterManipulator masterManipulator , KOMRegistry registry ){ super( masterManipulator, registry ); this.namespaceNodeManipulator = this.registryMasterManipulator.getNSNodeManipulator(); this.namespaceNodeMetaManipulator = this.registryMasterManipulator.getNSNodeMetaManipulator(); } @Override public GUID insert( TreeNode treeNode ) { Namespace nsNode = (Namespace) treeNode; ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize( treeNode ); GuidAllocator guidAllocator = this.registry.getGuidAllocator(); GUID guid72 = nsNode.getGuid(); NamespaceMeta namespaceMeta = nsNode.getNamespaceWithMeta(); GUID namespaceNodeMetaGuid = guidAllocator.nextGUID(); if (namespaceMeta != null){ namespaceMeta.setGuid(namespaceNodeMetaGuid); this.namespaceNodeMetaManipulator.insert(namespaceMeta); } else { namespaceNodeMetaGuid = null; } Attributes attributes = nsNode.getAttributes(); GUID nodeAttributesGuid = guidAllocator.nextGUID(); if (attributes != null){ attributes.setGuid( nodeAttributesGuid ); this.attributesManipulator.insert(attributes); } else { nodeAttributesGuid = null; } imperialTreeNode.setNodeMetadataGUID(namespaceNodeMetaGuid); imperialTreeNode.setBaseDataGUID(nodeAttributesGuid); this.imperialTree.insert(imperialTreeNode); this.namespaceNodeManipulator.insert( nsNode ); return guid72; } @Override public void purge( GUID guid ) { //namespace节点需要递归删除其拥有节点若其引用节点,没有其他引用则进行清理 List childNodes = this.imperialTree.getChildren(guid); GUIDImperialTrieNode node = this.imperialTree.getNode(guid); if ( !childNodes.isEmpty() ){ List subordinates = this.imperialTree.getSubordinates(guid); if ( !subordinates.isEmpty() ){ for ( GUID subordinateGuid : subordinates ){ this.purge( subordinateGuid ); } } childNodes = this.imperialTree.getChildren( guid ); for( GUIDImperialTrieNode childNode : childNodes ){ List parentNodes = this.imperialTree.fetchParentGuids(childNode.getGuid()); if ( parentNodes.size() > 1 ){ this.imperialTree.removeInheritance(childNode.getGuid(),guid); } else { this.purge( childNode.getGuid() ); } } } if ( node.getType().getObjectName().equals(GenericNamespace.class.getName()) ){ this.removeNode(guid); } else { UOI uoi = node.getType(); String metaType = this.getOperatorFactory().getMetaType( uoi.getObjectName() ); if( metaType == null ) { TreeNode newInstance = (TreeNode)uoi.newInstance( new Class[]{ KOMRegistry.class }, this.registry ); metaType = newInstance.getMetaType(); } RegistryNodeOperator operator = this.getOperatorFactory().getOperator( metaType ); operator.purge( guid ); } } @Override public RegistryTreeNode get( GUID guid ) { return this.getNamespaceNodeWideData( guid, 0 ); } @Override public RegistryTreeNode get( GUID guid, int depth ) { return this.getNamespaceNodeWideData( guid, depth ); } @Override public RegistryTreeNode getAsRootDepth( GUID guid ) { return this.getNamespaceNodeWideData( guid, 0 ); } @Override public void update( TreeNode treeNode ) { } @Override public void updateName( GUID guid, String name ) { this.namespaceNodeManipulator.updateName( guid, name ); } private Namespace getNamespaceNodeWideData( GUID guid, int depth ){ Namespace ns = this.namespaceNodeManipulator.getNamespaceWithMeta( guid ); if ( ns instanceof GenericNamespace ){ ((GenericNamespace) ns).apply( this.registry ); } GUIDImperialTrieNode node = this.imperialTree.getNode(guid); if( depth <= 0 ) { List childNode = this.imperialTree.getChildren(guid); ArrayList guids = new ArrayList<>(); for ( GUIDImperialTrieNode n : childNode ){ guids.add( n.getGuid() ); } ++depth; ns.setChildrenGuids( guids, depth ); } Attributes attributes = this.attributesManipulator.getAttributes( guid, ns ); NamespaceMeta namespaceNodeMeta = this.namespaceNodeMetaManipulator.getNamespaceNodeMeta( node.getNodeMetadataGUID() ); ns.setAttributes ( attributes ); ns.setNamespaceMeta ( namespaceNodeMeta ); return ns; } private void removeNode( GUID guid ){ GUIDImperialTrieNode node = this.imperialTree.getNode(guid); this.imperialTree.purge( guid ); this.imperialTree.removeCachePath(guid); this.namespaceNodeManipulator.remove(guid); this.namespaceNodeMetaManipulator.remove(node.getNodeMetadataGUID()); this.attributesManipulator.remove(node.getAttributesGUID()); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/operator/PropertiesOperator.java ================================================ package com.pinecone.hydra.registry.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.entity.ConfigNode; import com.pinecone.hydra.registry.entity.Properties; import com.pinecone.hydra.registry.entity.Property; import com.pinecone.hydra.registry.source.RegistryPropertiesManipulator; import java.util.List; public class PropertiesOperator extends ArchConfigNodeOperator { protected RegistryPropertiesManipulator registryPropertiesManipulator; public PropertiesOperator( RegistryOperatorFactory factory ) { super(factory); this.registryPropertiesManipulator=factory.getMasterManipulator().getPropertiesManipulator(); } @Override public Properties get( GUID guid ) { return (Properties) super.get( guid ); } @Override public Properties get( GUID guid, int depth ) { return this.get( guid ); } @Override protected void inherit( ConfigNode self, ConfigNode prototype ) { // Extends meta data. super.inherit( self, prototype ); Properties sp = (Properties) self; Properties pp = (Properties) prototype; sp.setAffinityParent( pp ); sp.setParentProperties( pp.getPropertiesMap() ); } @Override protected Properties getConfigNodeWideData( GUID guid ) { ConfigNode configNodeWideData = super.getConfigNodeWideData( guid ); if( configNodeWideData instanceof Properties ) { Properties propertiesNode = (Properties) configNodeWideData; List properties = this.registryPropertiesManipulator.getProperties( guid, propertiesNode ); propertiesNode.setProperties( properties ); return propertiesNode; } throw new IllegalStateException( String.format( "'%s' should be `PropertiesNode` but `%s` found.", guid.toString(), configNodeWideData.getClass().getSimpleName() ) ); } @Override public void purge( GUID guid ) { super.purge(guid); this.registryPropertiesManipulator.removeAll(guid); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/operator/RegistryNodeOperator.java ================================================ package com.pinecone.hydra.registry.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.entity.RegistryTreeNode; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; public interface RegistryNodeOperator extends TreeNodeOperator { @Override RegistryTreeNode get( GUID guid ); RegistryTreeNode get( GUID guid, int depth ); @Override RegistryTreeNode getAsRootDepth( GUID guid ); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/operator/RegistryOperatorFactory.java ================================================ package com.pinecone.hydra.registry.operator; import com.pinecone.hydra.registry.Registry; import com.pinecone.hydra.registry.entity.ConfigNode; import com.pinecone.hydra.registry.entity.Namespace; import com.pinecone.hydra.registry.entity.Properties; import com.pinecone.hydra.registry.entity.TextFile; import com.pinecone.hydra.registry.source.RegistryMasterManipulator; import com.pinecone.hydra.unit.imperium.operator.OperatorFactory; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; public interface RegistryOperatorFactory extends OperatorFactory { String DefaultNamespaceNodeKey = Namespace.class.getSimpleName(); String DefaultConfigNodeKey = ConfigNode.class.getSimpleName(); String DefaultPropertyConfigNodeKey = Properties.class.getSimpleName(); String DefaultTextConfigNode = TextFile.class.getSimpleName(); void register( String typeName, TreeNodeOperator functionalNodeOperation ); void registerMetaType( Class clazz, String metaType ); void registerMetaType( String classFullName, String metaType ); String getMetaType( String classFullName ); RegistryNodeOperator getOperator( String typeName ); Registry getRegistry(); RegistryMasterManipulator getMasterManipulator(); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/operator/TextValueNodeOperator.java ================================================ package com.pinecone.hydra.registry.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.entity.ConfigNode; import com.pinecone.hydra.registry.entity.GenericTextFile; import com.pinecone.hydra.registry.entity.TextFile; import com.pinecone.hydra.registry.entity.TextValue; import com.pinecone.hydra.registry.source.RegistryTextFileManipulator; public class TextValueNodeOperator extends ArchConfigNodeOperator { protected RegistryTextFileManipulator registryTextFileManipulator; public TextValueNodeOperator(RegistryOperatorFactory factory) { super(factory); this.registryTextFileManipulator = factory.getMasterManipulator().getTextFileManipulator(); } @Override public TextFile get( GUID guid ) { return (TextFile) super.get( guid ); } @Override public TextFile get( GUID guid, int depth ) { return this.get( guid ); } @Override protected TextFile getConfigNodeWideData( GUID guid ) { ConfigNode configNodeWideData = super.getConfigNodeWideData( guid ); GenericTextFile textConfNode = new GenericTextFile(); TextValue textValue = this.registryTextFileManipulator.getTextValue( guid ); textConfNode.setTextValue ( textValue ); textConfNode.setConfigNodeMeta ( configNodeWideData.getConfigNodeMeta() ); textConfNode.setAttributes ( configNodeWideData.getAttributes() ); textConfNode.setGuid ( configNodeWideData.getGuid() ); textConfNode.setName ( configNodeWideData.getName() ); textConfNode.setCreateTime ( configNodeWideData.getCreateTime() ); textConfNode.setRegistry ( configNodeWideData.parentRegistry() ); textConfNode.setUpdateTime ( configNodeWideData.getUpdateTime() ); return textConfNode; } @Override public void purge( GUID guid ) { super.purge(guid); this.registryTextFileManipulator.remove(guid); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/GenericRenderNamespace.java ================================================ package com.pinecone.hydra.registry.render; import com.pinecone.hydra.registry.RenderDistributeRegistry; import com.pinecone.hydra.registry.entity.GenericNamespace; public class GenericRenderNamespace extends GenericNamespace implements RenderNamespace { protected RenderDistributeRegistry registry; public void apply( RenderDistributeRegistry registry ) { this.registry = registry; } @Override public RenderDistributeRegistry getRegistry() { return this.registry; } @Override public RenderConfigNode getConfigNode(String key) { return (RenderConfigNode) this.children.get(key); } @Override public RenderNamespace getNamespace(String key) { return (RenderNamespace) this.children.get(key); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/GenericRenderProperties.java ================================================ package com.pinecone.hydra.registry.render; import com.pinecone.hydra.registry.RenderDistributeRegistry; import com.pinecone.hydra.registry.entity.GenericProperties; import com.pinecone.hydra.registry.entity.Property; import java.util.Collection; import java.util.Set; public class GenericRenderProperties extends GenericProperties implements RenderProperties { protected RenderDistributeRegistry registry; public void apply(RenderDistributeRegistry registry) { this.registry = registry; } @Override public RenderProperty get(String key) { return (RenderProperty)super.get( key ); } @Override public Set entrySet() { return super.entrySet(); } @Override public Collection getProperties() { return super.getProperties(); } @Override public RenderDistributeRegistry getRegistry() { return this.registry; } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/GenericRenderProperty.java ================================================ package com.pinecone.hydra.registry.render; import com.pinecone.hydra.registry.entity.GenericProperty; import com.pinecone.hydra.registry.entity.Properties; public class GenericRenderProperty extends GenericProperty implements RenderProperty { public GenericRenderProperty() { super(); } public GenericRenderProperty( Properties properties ) { super( properties ); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/GenericRenderTextFile.java ================================================ package com.pinecone.hydra.registry.render; import com.pinecone.hydra.registry.RenderDistributeRegistry; import com.pinecone.hydra.registry.entity.GenericTextFile; public class GenericRenderTextFile extends GenericTextFile implements RenderTextFile { protected RenderDistributeRegistry registry; public void apply(RenderDistributeRegistry registry) { super.apply(registry); } @Override public RenderTextValue get() { return (RenderTextValue) this.mTextValue; } @Override public RenderDistributeRegistry getRegistry() { return (RenderDistributeRegistry) super.parentRegistry(); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/GenericRenderTextValue.java ================================================ package com.pinecone.hydra.registry.render; import com.pinecone.hydra.registry.entity.GenericTextValue; public class GenericRenderTextValue extends GenericTextValue implements RenderTextValue{ } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/RenderConfigNode.java ================================================ package com.pinecone.hydra.registry.render; import com.pinecone.hydra.registry.entity.ConfigNode; public interface RenderConfigNode extends ConfigNode { } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/RenderNamespace.java ================================================ package com.pinecone.hydra.registry.render; import com.pinecone.hydra.registry.entity.Namespace; public interface RenderNamespace extends Namespace,RenderRegistryTreeNode { } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/RenderProperties.java ================================================ package com.pinecone.hydra.registry.render; import com.pinecone.framework.util.template.UniformTemplateRenderer; import com.pinecone.hydra.registry.entity.Property; import com.pinecone.hydra.registry.entity.Properties; public interface RenderProperties extends Properties, RenderRegistryTreeNode { default Object renderValue ( String key, UniformTemplateRenderer renderer, Object tpl ) { Property v = this.get( key ); if( v.isStringBasedType() ) { return renderer.render( v.getValue().toString(), tpl ); } return v.getValue().toString(); } default Object renderValue ( String key, Object tpl ) { Property v = this.get( key ); if( v.isStringBasedType() ) { return this.getRegistry().getRenderer().render( v.getValue().toString(), tpl ); } return v.getValue().toString(); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/RenderProperty.java ================================================ package com.pinecone.hydra.registry.render; import com.pinecone.hydra.registry.entity.Property; public interface RenderProperty extends Property { } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/RenderRegistryTreeNode.java ================================================ package com.pinecone.hydra.registry.render; import com.pinecone.hydra.registry.RenderDistributeRegistry; import com.pinecone.hydra.registry.entity.RegistryTreeNode; public interface RenderRegistryTreeNode extends RegistryTreeNode { RenderDistributeRegistry getRegistry(); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/RenderTextFile.java ================================================ package com.pinecone.hydra.registry.render; import com.pinecone.framework.util.template.UniformTemplateRenderer; import com.pinecone.hydra.registry.entity.TextFile; import com.pinecone.hydra.registry.entity.TextValue; public interface RenderTextFile extends TextFile,RenderRegistryTreeNode { default Object renderValue ( UniformTemplateRenderer renderer, Object tpl ) { TextValue v = this.get( ); return renderer.render( v.toString() ,tpl ); } default Object renderValue ( Object tpl ) { TextValue v = this.get(); return this.getRegistry().getRenderer().render( v.toString(), tpl ); } } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/RenderTextValue.java ================================================ package com.pinecone.hydra.registry.render; import com.pinecone.hydra.registry.entity.TextValue; public interface RenderTextValue extends TextValue { } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryAttributesManipulator.java ================================================ package com.pinecone.hydra.registry.source; import java.util.List; import java.util.Map; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.entity.Attributes; import com.pinecone.hydra.registry.entity.ElementNode; public interface RegistryAttributesManipulator extends Pinenut { void insertAttribute( GUID guid, String key, String value ); List > getAttributesByGuid(GUID guid ); void updateAttribute( GUID guid, String key, String value ); void remove( GUID guid ); Attributes getAttributes( GUID guid, ElementNode element ); default void insert( Attributes attributes) { for ( Map.Entry entry : attributes.getAttributes().entrySet() ) { this.insertAttribute( attributes.getGuid(), entry.getKey(), entry.getValue() ); } } default void update( Attributes attributes) { for ( Map.Entry entry : attributes.getAttributes().entrySet() ) { this.updateAttribute( attributes.getGuid(), entry.getKey(), entry.getValue() ); } } boolean containsKey ( GUID guid, String key ); void clearAttributes( GUID guid ); void removeAttributeWithValue( GUID guid, String key, String value ); void removeAttribute( GUID guid, String key ); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryConfigNodeManipulator.java ================================================ package com.pinecone.hydra.registry.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.entity.ConfigNode; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import java.util.List; public interface RegistryConfigNodeManipulator extends GUIDNameManipulator { void insert( ConfigNode configNode ); void remove( GUID guid ); boolean isConfigNode( GUID guid ); ConfigNode getConfigNode(GUID guid ); void update( ConfigNode configNode ); List getGuidsByName( String name ); List getGuidsByNameID( String name, GUID guid ); List dumpGuid(); void updateName(GUID guid ,String name); GUID getDataAffinityGuid( GUID guid ); void setDataAffinityGuid( GUID guid, GUID affinityGuid ); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryMasterManipulator.java ================================================ package com.pinecone.hydra.registry.source; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; public interface RegistryMasterManipulator extends KOIMasterManipulator { RegistryConfigNodeManipulator getConfigNodeManipulator(); RegistryNSNodeManipulator getNSNodeManipulator(); RegistryPropertiesManipulator getPropertiesManipulator(); RegistryTextFileManipulator getTextFileManipulator(); RegistryNodeMetaManipulator getNodeMetaManipulator(); RegistryNSNodeMetaManipulator getNSNodeMetaManipulator(); RegistryAttributesManipulator getAttributesManipulator(); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryNSNodeManipulator.java ================================================ package com.pinecone.hydra.registry.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.entity.Namespace; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import java.util.List; public interface RegistryNSNodeManipulator extends GUIDNameManipulator { void insert( Namespace namespace); void remove( GUID guid ); boolean isNamespaceNode( GUID guid ); Namespace getNamespaceWithMeta( GUID guid ); void update( Namespace namespace); List getGuidsByName( String name ); List getGuidsByNameID( String name, GUID guid ); List dumpGuid(); void updateName( GUID guid, String name ); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryNSNodeMetaManipulator.java ================================================ package com.pinecone.hydra.registry.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.entity.NamespaceMeta; public interface RegistryNSNodeMetaManipulator extends Pinenut { void insert( NamespaceMeta namespaceMeta); void remove( GUID guid ); NamespaceMeta getNamespaceNodeMeta(GUID guid ); void update( NamespaceMeta namespaceMeta); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryNodeMetaManipulator.java ================================================ package com.pinecone.hydra.registry.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.entity.ConfigNodeMeta; public interface RegistryNodeMetaManipulator { void insert(ConfigNodeMeta configNodeMeta); void remove(GUID guid); ConfigNodeMeta getConfigNodeMeta(GUID guid); void update(ConfigNodeMeta configNodeMeta); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryNodeOwnerManipulator.java ================================================ package com.pinecone.hydra.registry.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.util.List; public interface RegistryNodeOwnerManipulator extends Pinenut { void insert(GUID subordinateGuid,GUID ownerGuid); void remove(GUID subordinateGuid,GUID ownerGuid); void removeBySubordinate(GUID subordinateGuid); void removeByOwner(GUID OwnerGuid); GUID getOwner(GUID subordinateGuid); List getSubordinates(GUID guid); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryNodePathManipulator.java ================================================ package com.pinecone.hydra.registry.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface RegistryNodePathManipulator extends Pinenut { void insert( GUID guid, String path ); void remove( GUID guid ); String getPath( GUID guid ); GUID getNode( String path ); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryPropertiesManipulator.java ================================================ package com.pinecone.hydra.registry.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.entity.Properties; import com.pinecone.hydra.registry.entity.Property; import java.util.List; public interface RegistryPropertiesManipulator extends Pinenut { void insert(Property property); void remove( GUID guid, String key ); List getProperties( GUID guid, Properties parent ); void update( Property property ); void removeAll( GUID guid ); void copyPropertiesTo(GUID sourceGuid, GUID destinationGuid); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryTextFileManipulator.java ================================================ package com.pinecone.hydra.registry.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.entity.TextValue; public interface RegistryTextFileManipulator extends Pinenut { void insert(TextValue textValue); void remove(GUID guid); TextValue getTextValue(GUID guid); void update(TextValue textValue); void copyTextValueTo(GUID sourceGuid, GUID destinationGuid); } ================================================ FILE: Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryTreeManipulator.java ================================================ package com.pinecone.hydra.registry.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import java.util.List; public interface RegistryTreeManipulator extends Pinenut { void insert (ImperialTreeNode distributedConfTreeNode); GUIDImperialTrieNode getNode(GUID guid); void remove(GUID guid); void removeInheritance(GUID childGuid, GUID parentGuid); List fetchParentGuids(GUID guid); List getChild(GUID guid); } ================================================ FILE: Hydra/hydra-framework-device/pom.xml ================================================ hydra com.pinecone.hydra 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.pinecone.hydra.kernel hydra-framework-device 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile com.pinecone.hydra.kernel hydra-architecture 2.1.0 compile com.pinecone.ulf ulfhedinn 1.2.1 compile ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/ArchDeployFamilyMeta.java ================================================ package com.pinecone.hydra.deploy; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.Identification; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import com.pinecone.ulf.util.guid.GUIDs; public abstract class ArchDeployFamilyMeta implements DeployFamilyMeta { protected GUID guid; protected String name; protected String extraInformation; protected String ipAddress; protected String szElementaryConfig; protected Map elementaryConfig; protected String description; public ArchDeployFamilyMeta() { } public ArchDeployFamilyMeta(Map joEntity ) { this.apply( joEntity ); } protected ArchDeployFamilyMeta apply(Map joEntity ) { String szGuid = (String) joEntity.get( "guid" ); if( szGuid != null ) { this.guid = GUIDs.GUID128( (String) joEntity.get( "guid" ) ); } BeanMapDecoder.BasicDecoder.decode( this, joEntity ); return this; } public GUID getGuid() { return this.guid; } @Override public Identification getId() { return this.getGuid(); } @Override public String getName() { return this.name; } @Override public String getExtraInformation() { return this.extraInformation; } @Override public String getIpAddress() { return this.ipAddress; } @Override public String getDescription() { return this.description; } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/DeployExtraMeta.java ================================================ package com.pinecone.hydra.deploy; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface DeployExtraMeta extends Pinenut { DeployFamilyMeta getKernelMeta(); GUID getGuid() ; String getDeployName(); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/DeployFamilyMeta.java ================================================ package com.pinecone.hydra.deploy; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.Identification; public interface DeployFamilyMeta extends Pinenut { //long getEnumId(); //GUID getGuid(); Identification getId() ; String getName(); String getExtraInformation(); String getIpAddress(); String getDescription(); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/entity/GenericContainer.java ================================================ package com.pinecone.hydra.deploy.entity; import com.pinecone.hydra.deploy.Container; public class GenericContainer implements Container { protected String status; @Override public String getStatus() { return this.status; } @Override public void setStatus( String status ) { this.status = status; } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/entity/GenericPhysicalHost.java ================================================ package com.pinecone.hydra.deploy.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.deploy.PhysicalHost; import com.pinecone.hydra.deploy.kom.entity.ArchElementNode; public class GenericPhysicalHost extends ArchElementNode implements PhysicalHost { protected GUID guid; protected String status; protected String hardwareSpecs; protected String name; @Override public String getStatus() { return this.status; } @Override public void setStatus( String status ) { this.status = status; } @Override public String getHardwareSpecs() { return this.hardwareSpecs; } @Override public void setHardwareSpecs( String hardwareSpecs ) { this.hardwareSpecs = hardwareSpecs; } @Override public GUID getGuid() { return this.guid; } @Override public void setGuid(GUID guid) { this.guid = guid; } // @Override // public GUID getGuid() { // return this.guid; // } // // @Override // public void setGuid(GUID guid) { // this.guid = guid; // } @Override public String getName() { return this.name; } @Override public void setName( String name ) { this.name = name; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/entity/GenericQuick.java ================================================ package com.pinecone.hydra.deploy.entity; import com.pinecone.hydra.deploy.Quick; public class GenericQuick implements Quick { protected String status; @Override public String getStatus() { return this.status; } @Override public void setStatus(String status) { this.status = status; } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/entity/GenericVirtualMachine.java ================================================ package com.pinecone.hydra.deploy.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.deploy.PhysicalHost; import com.pinecone.hydra.deploy.VirtualMachine; public class GenericVirtualMachine implements VirtualMachine { protected GUID affiliateHostGuid; protected String name; protected String ipAddress; protected String status; protected GUID guid; @Override public String getName() { return this.name; } @Override public void setName(String name) { this.name = name; } @Override public String getIpAddress() { return this.ipAddress; } @Override public void setIpAddress(String ipAddress) { this.ipAddress = ipAddress; } @Override public String getStatus() { return this.status; } @Override public void setStatus(String status) { this.status = status; } @Override public PhysicalHost getAffiliateHost() { return null; //return this.affiliateHostGuid; } // @Override // public void setGuid(GUID guid) { // this.guid = guid; // } // // @Override // public void setAffiliateHostGuid(GUID guid) { // this.affiliateHostGuid = guid; // } // @Override // public void setGuid(GUID guid) { // this.guid = guid; // } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } // @Override // public void setAffiliateHostGuid( GUID guid ) { // this.affiliateHostGuid = guid; // } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/DeployConfig.java ================================================ package com.pinecone.hydra.deploy.kom; import com.pinecone.hydra.system.ko.KernelObjectConfig; public interface DeployConfig extends KernelObjectConfig { } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/DeployFamilyNode.java ================================================ package com.pinecone.hydra.deploy.kom; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.Identification; import com.pinecone.hydra.deploy.DeployFamilyMeta; public interface DeployFamilyNode extends DeployFamilyMeta { long getEnumId(); void setEnumId(long id); void setName(String name); GUID getGuid(); void setGuid(GUID guid); @Override default Identification getId() { return this.getGuid(); } String getExtraInformation(); void setExtraInformation(String extraInformation); String getDescription(); void setDescription(String description); String getIpAddress(); void setIpAddress( String ipAddress ); DeployFamilyNode apply(Map joEntity) ; } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/DeployInstrument.java ================================================ package com.pinecone.hydra.deploy.kom; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.kom.entity.ContainerElement; import com.pinecone.hydra.deploy.kom.entity.PhysicalHostElement; import com.pinecone.hydra.deploy.kom.entity.QuickElement; import com.pinecone.hydra.deploy.kom.entity.ServerElement; import com.pinecone.hydra.deploy.kom.entity.VirtualMachineElement; import com.pinecone.hydra.system.ko.kom.ReparseKOMTree; import com.pinecone.hydra.deploy.kom.entity.ElementNode; import com.pinecone.hydra.deploy.kom.entity.ClusterElement; import com.pinecone.hydra.deploy.kom.entity.Namespace; import com.pinecone.hydra.deploy.kom.entity.DeployElement; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public interface DeployInstrument extends ReparseKOMTree { DeployConfig KERNEL_DEPLOY_CONFIG = new KernelDeployConfig(); ClusterElement affirmCluster(String path ); Namespace affirmNamespace( String path ); ServerElement affirmServer( String path ); QuickElement affirmQuick( String path ); VirtualMachineElement affirmVirtualMachine( String path ); ContainerElement affirmContainerElement(String path); PhysicalHostElement affirmPhysicalHost(String path ); ElementNode queryElement( String path ); boolean containsChild( GUID parentGuid, String childName ); void update( TreeNode treeNode ); void createDeployServiceInsMapping( GUID deployGuid, GUID serviceInsGuid ); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/DeployPathSelector.java ================================================ package com.pinecone.hydra.deploy.kom; import com.pinecone.framework.util.name.path.PathResolver; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.system.ko.kom.StandardPathSelector; import com.pinecone.hydra.unit.imperium.ImperialTree; public class DeployPathSelector extends StandardPathSelector { public DeployPathSelector(PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator dirMan, GUIDNameManipulator[] fileMans ) { super( pathResolver, trieTree, dirMan, fileMans ); } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/KernelDeployConfig.java ================================================ package com.pinecone.hydra.deploy.kom; import com.pinecone.hydra.system.ko.ArchKernelObjectConfig; public class KernelDeployConfig extends ArchKernelObjectConfig implements DeployConfig { } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/UniformDeployInstrument.java ================================================ package com.pinecone.hydra.deploy.kom; import java.util.ArrayList; import java.util.List; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.deploy.kom.entity.ClusterElement; import com.pinecone.hydra.deploy.kom.entity.ContainerElement; import com.pinecone.hydra.deploy.kom.entity.GenericContainerElement; import com.pinecone.hydra.deploy.kom.entity.GenericDeployInsMapping; import com.pinecone.hydra.deploy.kom.entity.GenericPhysicalHostElement; import com.pinecone.hydra.deploy.kom.entity.GenericQuickElement; import com.pinecone.hydra.deploy.kom.entity.ArchServerElement; import com.pinecone.hydra.deploy.kom.entity.GenericVirtualMachineElement; import com.pinecone.hydra.deploy.kom.entity.PhysicalHostElement; import com.pinecone.hydra.deploy.kom.entity.QuickElement; import com.pinecone.hydra.deploy.kom.entity.ServerElement; import com.pinecone.hydra.deploy.kom.entity.VirtualMachineElement; import com.pinecone.hydra.deploy.kom.source.DeployServiceInsMappingManipulator; import com.pinecone.hydra.deploy.kom.source.PhysicalHostManipulator; import com.pinecone.hydra.deploy.kom.source.QuickElementManipulator; import com.pinecone.hydra.deploy.kom.source.VirtualMachineManipulator; import com.pinecone.hydra.system.identifier.KOPathResolver; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; import com.pinecone.hydra.system.ko.kom.ArchReparseKOMTree; import com.pinecone.hydra.system.ko.kom.GenericReparseKOMTreeAddition; import com.pinecone.hydra.system.ko.kom.MultiFolderPathSelector; import com.pinecone.hydra.deploy.kom.entity.ElementNode; import com.pinecone.hydra.deploy.kom.entity.GenericClusterElement; import com.pinecone.hydra.deploy.kom.entity.GenericNamespace; import com.pinecone.hydra.deploy.kom.entity.Namespace; import com.pinecone.hydra.deploy.kom.entity.DeployTreeNode; import com.pinecone.hydra.deploy.kom.operator.GenericElementOperatorFactory; import com.pinecone.hydra.deploy.kom.source.ClusterNodeManipulator; import com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator; import com.pinecone.hydra.deploy.kom.source.DeployNamespaceManipulator; import com.pinecone.hydra.deploy.kom.source.DeployNodeManipulator; import com.pinecone.hydra.unit.imperium.ImperialTree; import com.pinecone.hydra.unit.imperium.RegimentedImperialTree; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; import com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator; import com.pinecone.ulf.util.guid.GUIDs; public class UniformDeployInstrument extends ArchReparseKOMTree implements DeployInstrument { //GenericDistributedScopeTree protected ImperialTree imperialTree; protected DeployMasterManipulator deployMasterManipulator; protected DeployNamespaceManipulator deployNamespaceManipulator; protected ClusterNodeManipulator clusterNodeManipulator; protected DeployNodeManipulator deployNodeManipulator; protected List folderManipulators; protected List fileManipulators; protected PhysicalHostManipulator physicalHostManipulator; protected VirtualMachineManipulator virtualMachineManipulator; protected QuickElementManipulator quickElementManipulator; protected DeployServiceInsMappingManipulator deployServiceInsMappingManipulator; public UniformDeployInstrument( Processum superiorProcess, KOIMasterManipulator masterManipulator, DeployInstrument parent, String name, @Nullable GuidAllocator guidAllocator ) { super( superiorProcess, masterManipulator, DeployInstrument.KERNEL_DEPLOY_CONFIG, parent, name, guidAllocator ); this.deployMasterManipulator = (DeployMasterManipulator) masterManipulator; this.deployNamespaceManipulator = this.deployMasterManipulator.getNamespaceManipulator(); this.clusterNodeManipulator = this.deployMasterManipulator.getJobNodeManipulator(); this.deployNodeManipulator = this.deployMasterManipulator.getDeployNodeManipulator(); KOISkeletonMasterManipulator skeletonMasterManipulator = this.deployMasterManipulator.getSkeletonMasterManipulator(); TreeMasterManipulator treeMasterManipulator = (TreeMasterManipulator) skeletonMasterManipulator; this.imperialTree = new RegimentedImperialTree(treeMasterManipulator); this.operatorFactory = new GenericElementOperatorFactory(this,(DeployMasterManipulator) masterManipulator); this.physicalHostManipulator = this.deployMasterManipulator.getPhysicalHostManipulator(); this.virtualMachineManipulator = this.deployMasterManipulator.getVirtualMachineManipulator(); this.pathResolver = new KOPathResolver( this.kernelObjectConfig ); this.quickElementManipulator = this.deployMasterManipulator.getQuickElementManipulator(); this.deployServiceInsMappingManipulator = this.deployMasterManipulator.getDeployServiceInsMappingManipulator(); // TODO for customize service tree architecture. this.folderManipulators = new ArrayList<>( List.of( this.deployNamespaceManipulator, this.clusterNodeManipulator) ); this.fileManipulators = new ArrayList<>( List.of( this.clusterNodeManipulator, this.physicalHostManipulator, this.virtualMachineManipulator, this.quickElementManipulator) ); this.pathSelector = new MultiFolderPathSelector( this.pathResolver, this.imperialTree, this.folderManipulators.toArray( new GUIDNameManipulator[]{} ), this.fileManipulators.toArray( new GUIDNameManipulator[]{} ) ); this.mReparseKOM = new GenericReparseKOMTreeAddition( this ); } public UniformDeployInstrument( Processum superiorProcess, KOIMasterManipulator masterManipulator ) { this( superiorProcess, masterManipulator, null, DeployInstrument.class.getSimpleName(), null ); } // public UniformTaskInstrument( Hydrogen hydrogen ) { // this.hydrogen = hydrogen; // } public UniformDeployInstrument( KOIMappingDriver driver ) { this( driver.getSuperiorProcess(), driver.getMasterManipulator() ); } public UniformDeployInstrument( KOIMappingDriver driver, DeployInstrument parent, String name ) { this( driver.getSuperiorProcess(), driver.getMasterManipulator(), parent, name, null ); } protected DeployTreeNode affirmTreeNodeByPath(String path, Class cnSup, Class nsSup ) { String[] parts = this.pathResolver.segmentPathParts( path ); String currentPath = ""; GUID parentGuid = GUIDs.Dummy128(); DeployTreeNode node = this.queryElement(path); if ( node != null ){ return node; } DeployTreeNode ret = null; for( int i = 0; i < parts.length; ++i ){ currentPath = currentPath + ( i > 0 ? this.getConfig().getPathNameSeparator() : "" ) + parts[ i ]; node = this.queryElement( currentPath ); if ( node == null){ if ( i == parts.length - 1 && cnSup != null ){ ElementNode en = (ElementNode) this.dynamicFactory.optNewInstance( cnSup, new Object[]{ this } ); en.setName( parts[i] ); GUID guid = this.put( en ); this.affirmOwnedNode( parentGuid, guid ); return en; } else { Namespace namespace = (Namespace) this.dynamicFactory.optNewInstance( nsSup, new Object[]{ this } ); namespace.setName( parts[i] ); GUID guid = this.put( namespace ); if ( i != 0 ){ this.affirmOwnedNode( parentGuid, guid ); parentGuid = guid; } else { parentGuid = guid; } ret = namespace; } } else { parentGuid = node.getGuid(); } } return ret; } @Override public ClusterElement affirmCluster(String path ) { return (ClusterElement) this.affirmTreeNodeByPath( path, GenericClusterElement.class, GenericNamespace.class ); } @Override public ServerElement affirmServer(String path) { return (ServerElement) this.affirmTreeNodeByPath( path, ArchServerElement.class, GenericNamespace.class ); } @Override public QuickElement affirmQuick(String path) { return (QuickElement) this.affirmTreeNodeByPath( path, GenericQuickElement.class, GenericNamespace.class ); } @Override public VirtualMachineElement affirmVirtualMachine(String path) { return (VirtualMachineElement) this.affirmTreeNodeByPath( path, GenericVirtualMachineElement.class, GenericNamespace.class ); } @Override public ContainerElement affirmContainerElement(String path) { return (ContainerElement) this.affirmTreeNodeByPath( path, GenericContainerElement.class, GenericNamespace.class ); } @Override public PhysicalHostElement affirmPhysicalHost(String path) { return (PhysicalHostElement) this.affirmTreeNodeByPath( path, GenericPhysicalHostElement.class, GenericNamespace.class ); } @Override public ElementNode queryElement( String path ) { GUID guid = this.queryGUIDByPath( path ); if( guid != null ) { return this.get( guid ).evinceElementNode(); } return null; } @Override public Namespace affirmNamespace( String path ) { return ( Namespace ) this.affirmTreeNodeByPath( path, null, GenericNamespace.class ); } protected boolean containsChild( GUIDNameManipulator manipulator, GUID parentGuid, String childName ) { List guids = manipulator.getGuidsByName( childName ); for( GUID guid : guids ) { List ps = this.imperialTree.fetchParentGuids( guid ); if( ps.contains( parentGuid ) ){ return true; } } return false; } @Override public boolean containsChild( GUID parentGuid, String childName ) { for( GUIDNameManipulator manipulator : this.fileManipulators ) { if( this.containsChild( manipulator, parentGuid, childName ) ) { return true; } } for( GUIDNameManipulator manipulator : this.folderManipulators ) { if( this.containsChild( manipulator, parentGuid, childName ) ) { return true; } } return false; } /** * Affirm path exist in cache, if required. * 确保路径存在于缓存,如果有明确实现必要的话。 * 对于GenericDistributedScopeTree::getPath, 默认会自动写入缓存,因此这里可以通过getPath保证路径缓存一定存在。 * @param guid, target guid. * @return Path */ protected void affirmPathExist( GUID guid ) { this.imperialTree.getCachePath( guid ); } @Override public DeployTreeNode get( GUID guid ){ return (DeployTreeNode) super.get( guid ); } @Override public void update( TreeNode treeNode ) { TreeNodeOperator operator = this.operatorFactory.getOperator( treeNode.getMetaType() ); operator.update( treeNode ); } @Override public void remove( GUID guid ) { super.remove( guid ); } @Override public void createDeployServiceInsMapping(GUID deployGuid, GUID serviceInsGuid) { GenericDeployInsMapping insMapping = new GenericDeployInsMapping(); insMapping.setServiceInsGuid( serviceInsGuid ); insMapping.setDeployGuid( deployGuid ); this.deployServiceInsMappingManipulator.insert( insMapping ); } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/ArchElementNode.java ================================================ package com.pinecone.hydra.deploy.kom.entity; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.BeanColonist; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.ArchDeployFamilyMeta; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; public abstract class ArchElementNode extends ArchDeployFamilyMeta implements ElementNode { protected long enumId; protected GUID metaGuid; protected GUIDImperialTrieNode distributedTreeNode; protected DeployInstrument deployInstrument; protected LocalDateTime createTime; protected LocalDateTime updateTime; public ArchElementNode() { super(); this.createTime = LocalDateTime.now(); this.updateTime = LocalDateTime.now(); } public ArchElementNode( Map joEntity ) { super( joEntity ); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); this.createTime = LocalDateTime.now(); this.updateTime = LocalDateTime.now(); } public ArchElementNode( Map joEntity, DeployInstrument deployInstrument) { super( joEntity ); this.apply(deployInstrument); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); } public ArchElementNode( DeployInstrument deployInstrument) { this.apply(deployInstrument); } public void apply( DeployInstrument deployInstrument) { this.deployInstrument = deployInstrument; GuidAllocator guidAllocator = this.deployInstrument.getGuidAllocator(); this.setGuid( guidAllocator.nextGUID() ); if ( this.createTime == null ) { this.createTime = LocalDateTime.now(); this.updateTime = LocalDateTime.now(); } } @Override public ArchElementNode apply( Map joEntity ) { super.apply( joEntity ); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); return this; } @Override public String getKomPath() { return this.deployInstrument.getPath( this.getGuid() ); } @Override public GUID getMetaGuid() { return this.metaGuid; } @Override public void setMetaGuid( GUID metaGuid ) { this.metaGuid = metaGuid; } @Override public long getEnumId() { return this.enumId; } @Override public void setEnumId( long enumId ) { this.enumId = enumId; } @Override public GUID getGuid() { return this.guid; } @Override public void setGuid( GUID guid ) { this.guid = guid; } @Override public void setName( String name ) { this.name = name; } @Override public void setExtraInformation( String extraInformation ) { this.extraInformation = extraInformation; } @Override public void setIpAddress( String ipAddress ) { this.ipAddress = ipAddress; } @Override public String getIpAddress() { return this.ipAddress; } @Override public void setDescription( String description ) { this.description = description; } @Override public LocalDateTime getCreateTime() { return this.createTime; } @Override public void setCreateTime( LocalDateTime createTime ) { this.createTime = createTime; } @Override public LocalDateTime getUpdateTime() { return this.updateTime; } @Override public void setUpdateTime( LocalDateTime updateTime ) { this.updateTime = updateTime; } @Override public GUIDImperialTrieNode getDistributedTreeNode() { return this.distributedTreeNode; } @Override public void setDistributedTreeNode( GUIDImperialTrieNode distributedTreeNode ) { this.distributedTreeNode = distributedTreeNode; } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } protected Collection fetchChildren() { Collection guids = this.fetchChildrenGuids(); List elementNodes = new ArrayList<>(); for( GUID guid : guids ){ ElementNode elementNode = (ElementNode) this.deployInstrument.get( guid ); elementNodes.add( elementNode ); } return elementNodes; } protected Collection fetchChildrenGuids() { return this.deployInstrument.fetchChildrenGuids( this.getGuid() ); } protected void addChild( ElementNode child ) { GUID childId; boolean bContainsChild = this.containsChild( child.getName() ); if( bContainsChild ) { return; } else { childId = this.deployInstrument.put( child ); } this.deployInstrument.affirmOwnedNode( this.guid, childId ); } protected boolean containsChild( String childName ) { return this.deployInstrument.containsChild( this.guid, childName ); } @Override public JSONObject toJSONObject() { return BeanColonist.DirectColonist.populate( this, UnbeanifiedKeys ); } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/ArchServerElement.java ================================================ package com.pinecone.hydra.deploy.kom.entity; import com.pinecone.framework.util.json.JSON; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import com.pinecone.hydra.deploy.DeployExtraMeta; import com.pinecone.hydra.deploy.kom.DeployInstrument; import java.util.Map; public abstract class ArchServerElement extends ArchElementNode implements ServerElement { protected String localDomain; protected String wideDomain; protected boolean enable; protected DeployExtraMeta extraMeta; protected DeployElement affiliateDeployment; private void initSelf( Map joEntity ) { BeanMapDecoder.BasicDecoder.decode( this, joEntity ); if ( this.szElementaryConfig != null ) { this.elementaryConfig = (JSONObject) JSON.parse( this.szElementaryConfig ); } } public ArchServerElement(Map joEntity ) { super( joEntity ); this.initSelf( joEntity ); } public ArchServerElement(Map joEntity, DeployInstrument deployInstrument ) { super( joEntity, deployInstrument); this.initSelf( joEntity ); } public ArchServerElement(DeployInstrument deployInstrument ) { super(deployInstrument); } public ArchServerElement() { super(); } @Override public String getLocalDomain() { return this.localDomain; } @Override public void setLocalDomain(String localDomain) { this.localDomain = localDomain; } @Override public String getWideDomain() { return this.wideDomain; } @Override public void setWideDomain(String wideDomain) { this.wideDomain = wideDomain; } @Override public boolean isEnable() { return this.enable; } @Override public void setEnable( boolean enable ) { this.enable = enable; } @Override public DeployExtraMeta getExtraMeta() { return this.extraMeta; } @Override public void setExtraMeta(DeployExtraMeta extraMeta) { this.extraMeta = extraMeta; } @Override public DeployElement getAffiliateDeployment() { return this.affiliateDeployment; } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/ClusterElement.java ================================================ package com.pinecone.hydra.deploy.kom.entity; import com.pinecone.hydra.deploy.kom.DeployFamilyNode; public interface ClusterElement extends FolderElement, DeployFamilyNode { @Override default ClusterElement evinceClusterElement() { return this; } String getType(); void setType( String type ); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/CommonMeta.java ================================================ package com.pinecone.hydra.deploy.kom.entity; public interface CommonMeta extends ElementNode { } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/ContainerElement.java ================================================ package com.pinecone.hydra.deploy.kom.entity; public interface ContainerElement extends DeployElement { void setStatus( String status ); String getStatus(); @Override default ContainerElement evinceContainerElement() { return this; } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/DeployElement.java ================================================ package com.pinecone.hydra.deploy.kom.entity; import com.pinecone.hydra.deploy.DeployExtraMeta; public interface DeployElement extends ElementNode { @Override default DeployElement evinceDeployElement() { return this; } boolean isEnable() ; void setEnable( boolean enable ) ; DeployExtraMeta getExtraMeta(); void setExtraMeta(DeployExtraMeta extraMeta); DeployElement getAffiliateDeployment(); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/DeployInsMapping.java ================================================ package com.pinecone.hydra.deploy.kom.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public interface DeployInsMapping extends Pinenut { void setEnumId( long enumId ); long getEnumId(); void setDeployGuid( GUID deployGuid ); GUID getDeployGuid(); void setServiceInsGuid( GUID serviceInsGuid ); GUID getServiceInsGuid(); void setCreateTime( LocalDateTime createTime ); LocalDateTime getCreateTime(); void setUpdateTime( LocalDateTime updateTime ); LocalDateTime getUpdateTime(); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/DeployTreeNode.java ================================================ package com.pinecone.hydra.deploy.kom.entity; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public interface DeployTreeNode extends TreeNode { String getName(); default String getMetaType() { return this.className().replace("Generic",""); } default DeployTreeNode evinceTreeNode(){ return this; } default ElementNode evinceElementNode(){ return null; } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/ElementNode.java ================================================ package com.pinecone.hydra.deploy.kom.entity; import java.time.LocalDateTime; import java.util.Set; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.deploy.kom.DeployFamilyNode; import com.pinecone.hydra.system.ko.meta.ElementObject; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; public interface ElementNode extends DeployTreeNode, DeployFamilyNode, ElementObject { Set UnbeanifiedKeys = Set.of( "distributedTreeNode" ); @Override default String objectCategoryName() { return "Deploy"; } default Namespace evinceNamespace() { return null; } default QuickElement evinceQuickElement() { return null; } default ClusterElement evinceClusterElement() { return null; } default DeployElement evinceDeployElement() { return null; } default VirtualMachineElement evinceVirtualMachineElement() { return null; } default PhysicalHostElement evincePhysicalHostElement() { return null; } default ContainerElement evinceContainerElement() { return null; } GUIDImperialTrieNode getDistributedTreeNode(); void setDistributedTreeNode(GUIDImperialTrieNode distributedTreeNode); JSONObject toJSONObject(); @Override default ElementNode evinceElementNode(){ return this; } GUID getMetaGuid(); void setMetaGuid(GUID metaGuid); String getKomPath(); String getName(); void setName(String name); LocalDateTime getCreateTime(); void setCreateTime(LocalDateTime createTime); LocalDateTime getUpdateTime(); void setUpdateTime(LocalDateTime updateTime); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/FolderElement.java ================================================ package com.pinecone.hydra.deploy.kom.entity; import java.util.Collection; import com.pinecone.framework.util.id.GUID; public interface FolderElement extends ElementNode { Collection fetchChildren(); Collection fetchChildrenGuids(); void addChild( ElementNode child ); boolean containsChild( String childName ); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/GenericClusterElement.java ================================================ package com.pinecone.hydra.deploy.kom.entity; import java.util.Collection; import java.util.List; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.BeanColonist; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import com.pinecone.hydra.deploy.kom.DeployInstrument; public class GenericClusterElement extends ArchElementNode implements ClusterElement { protected String taskType; public GenericClusterElement() { super(); } public GenericClusterElement(Map joEntity ) { super( joEntity ); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); } public GenericClusterElement(Map joEntity, DeployInstrument deployInstrument) { super( joEntity, deployInstrument); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); } public GenericClusterElement(DeployInstrument deployInstrument) { super(deployInstrument); } @Override public String getIpAddress() { return this.ipAddress; } @Override public void setIpAddress(String ipAddress) { this.ipAddress = ipAddress; } @Override public String getType() { return this.taskType; } @Override public void setType( String taskType ) { this.taskType = taskType; } @Override public Collection fetchChildren() { return super.fetchChildren(); } @Override public Collection fetchChildrenGuids() { return super.fetchChildrenGuids(); } @Override public void addChild( ElementNode child ) { if( child instanceof FolderElement ) { throw new IllegalArgumentException( "Foisting `FolderElement` into application node is not accepted." ); } super.addChild( child ); } @Override public boolean containsChild( String childName ) { return super.containsChild( childName ); } @Override public JSONObject toJSONObject() { Collection children = this.fetchChildren(); JSONObject jo = BeanColonist.DirectColonist.populate( this, UnbeanifiedKeys ); JSONObject joChildren = new JSONMaptron(); for( ElementNode node : children ) { joChildren.put( node.getName(), node.toJSONObject() ); } jo.put( "deployments", joChildren ); return jo; } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/GenericCommonMeta.java ================================================ package com.pinecone.hydra.deploy.kom.entity; public class GenericCommonMeta extends ArchElementNode implements CommonMeta { public GenericCommonMeta() { super(); } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/GenericContainerElement.java ================================================ package com.pinecone.hydra.deploy.kom.entity; import com.pinecone.framework.util.json.JSON; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import com.pinecone.hydra.deploy.DeployExtraMeta; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.source.ContainerElementManipulator; import java.util.Map; public class GenericContainerElement extends ArchElementNode implements ContainerElement{ protected String status; protected boolean enable; protected DeployExtraMeta extraMeta; protected ContainerElementManipulator ContainerElementManipulator; private void initSelf( Map joEntity ) { BeanMapDecoder.BasicDecoder.decode( this, joEntity ); if ( this.szElementaryConfig != null ) { this.elementaryConfig = (JSONObject) JSON.parse( this.szElementaryConfig ); } } public GenericContainerElement(DeployInstrument deployInstrument, ContainerElementManipulator containerElementManipulator) { super(deployInstrument); this.ContainerElementManipulator = containerElementManipulator; } public GenericContainerElement( Map joEntity, DeployInstrument deployInstrument ) { super( joEntity, deployInstrument); this.initSelf( joEntity ); } public GenericContainerElement( Map joEntity ) { super( joEntity ); this.initSelf( joEntity ); } public GenericContainerElement( DeployInstrument deployInstrument ) { super(deployInstrument); } public GenericContainerElement() { super(); } @Override public void setStatus(String status) { this.status = status; } @Override public String getStatus() { return this.status; } @Override public boolean isEnable() { return this.enable; } @Override public void setEnable(boolean enable) { this.enable = enable; } @Override public DeployExtraMeta getExtraMeta() { return this.extraMeta; } @Override public void setExtraMeta(DeployExtraMeta extraMeta) { this.extraMeta = extraMeta; } @Override public DeployElement getAffiliateDeployment() { return null; } @Override public String getIpAddress() { return this.ipAddress; } @Override public void setIpAddress(String ipAddress) { this.ipAddress = ipAddress; } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/GenericDeployInsMapping.java ================================================ package com.pinecone.hydra.deploy.kom.entity; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public class GenericDeployInsMapping implements DeployInsMapping { protected long mEnumId; protected GUID mDeployGuid; protected GUID mServiceInsGuid; protected LocalDateTime mCreateTime; protected LocalDateTime mUpdateTime; @Override public void setEnumId(long enumId) { this.mEnumId = enumId; } @Override public long getEnumId() { return this.mEnumId; } @Override public void setDeployGuid(GUID deployGuid) { this.mDeployGuid = deployGuid; } @Override public GUID getDeployGuid() { return this.mDeployGuid; } @Override public void setServiceInsGuid(GUID serviceInsGuid) { this.mServiceInsGuid = serviceInsGuid; } @Override public GUID getServiceInsGuid() { return this.mServiceInsGuid; } @Override public void setCreateTime(LocalDateTime createTime) { this.mCreateTime = createTime; } @Override public LocalDateTime getCreateTime() { return this.mCreateTime; } @Override public void setUpdateTime(LocalDateTime updateTime) { this.mUpdateTime = updateTime; } @Override public LocalDateTime getUpdateTime() { return this.mUpdateTime; } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/GenericNamespace.java ================================================ package com.pinecone.hydra.deploy.kom.entity; import java.util.Collection; import java.util.Map; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSONEncoder; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.BeanColonist; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.source.DeployNamespaceManipulator; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; public class GenericNamespace extends ArchElementNode implements Namespace { protected GUID metaGuid; protected GUIDImperialTrieNode distributedTreeNode; protected DeployNamespaceManipulator namespaceManipulator; public GenericNamespace() { super(); } public GenericNamespace( Map joEntity ) { super( joEntity ); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); } public GenericNamespace( Map joEntity, DeployInstrument deployInstrument) { super( joEntity, deployInstrument); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); } public GenericNamespace( DeployInstrument deployInstrument) { super(deployInstrument); } public GenericNamespace(DeployInstrument deployInstrument, DeployNamespaceManipulator namespaceManipulator ) { this(deployInstrument); this.namespaceManipulator = namespaceManipulator; } @Override public GUIDImperialTrieNode getDistributedTreeNode() { return this.distributedTreeNode; } @Override public void setDistributedTreeNode( GUIDImperialTrieNode distributedTreeNode ) { this.distributedTreeNode = distributedTreeNode; } @Override public GUID getMetaGuid() { return this.metaGuid; } @Override public void setMetaGuid( GUID metaGuid ) { this.metaGuid = metaGuid; } @Override public JSONObject toJSONObject() { Collection children = this.fetchChildren(); JSONObject jo = new JSONMaptron(); for( ElementNode node : children ) { jo.put( node.getName(), node.toJSONObject() ); } return jo; } @Override public JSONObject toJSONDetails() { return BeanColonist.DirectColonist.populate( this, ElementNode.UnbeanifiedKeys ); } @Override public String toJSONString() { return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "guid" , this.getGuid() ), new KeyValue<>( "name" , this.getName() ) } ); } @Override public String toString() { return this.name; } @Override public Collection fetchChildren() { return super.fetchChildren(); } @Override public Collection fetchChildrenGuids() { return super.fetchChildrenGuids(); } @Override public void addChild( ElementNode child ) { super.addChild( child ); } @Override public boolean containsChild( String childName ) { return super.containsChild( childName ); } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/GenericPhysicalHostElement.java ================================================ package com.pinecone.hydra.deploy.kom.entity; import com.pinecone.framework.util.json.JSON; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import com.pinecone.hydra.deploy.kom.DeployInstrument; import java.util.Map; public class GenericPhysicalHostElement extends ArchServerElement implements PhysicalHostElement { protected String hardwareSpecs; protected String status; private void initSelf( Map joEntity ) { BeanMapDecoder.BasicDecoder.decode( this, joEntity ); if ( this.szElementaryConfig != null ) { this.elementaryConfig = (JSONObject) JSON.parse( this.szElementaryConfig ); } } public GenericPhysicalHostElement(Map joEntity) { super(joEntity); } public GenericPhysicalHostElement(DeployInstrument deployInstrument) { super(deployInstrument); } public GenericPhysicalHostElement() { super(); } public GenericPhysicalHostElement( Map joEntity, DeployInstrument deployInstrument ) { super( joEntity, deployInstrument); this.initSelf( joEntity ); } @Override public void setHardwareSpecs(String hardwareSpecs) { this.hardwareSpecs = hardwareSpecs; } @Override public String getHardwareSpecs() { return this.hardwareSpecs; } @Override public void setStatus(String status) { this.status = status; } @Override public String getStatus() { return this.status; } @Override public String getIpAddress() { return this.ipAddress; } @Override public void setIpAddress(String ipAddress) { this.ipAddress = ipAddress; } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/GenericQuickElement.java ================================================ package com.pinecone.hydra.deploy.kom.entity; import com.pinecone.framework.util.json.JSON; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import com.pinecone.hydra.deploy.DeployExtraMeta; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.source.QuickElementManipulator; import java.util.Map; public class GenericQuickElement extends ArchElementNode implements QuickElement { protected String typeName; protected boolean enable; protected DeployExtraMeta extraMeta; protected QuickElementManipulator quickElementManipulator; private void initSelf( Map joEntity ) { BeanMapDecoder.BasicDecoder.decode( this, joEntity ); if ( this.szElementaryConfig != null ) { this.elementaryConfig = (JSONObject) JSON.parse( this.szElementaryConfig ); } } public GenericQuickElement(DeployInstrument deployInstrument, QuickElementManipulator quickElementManipulator) { super(deployInstrument); this.quickElementManipulator = quickElementManipulator; } public GenericQuickElement( Map joEntity, DeployInstrument deployInstrument ) { super( joEntity, deployInstrument); this.initSelf( joEntity ); } public GenericQuickElement( Map joEntity ) { super( joEntity ); this.initSelf( joEntity ); } public GenericQuickElement( DeployInstrument deployInstrument ) { super(deployInstrument); } public GenericQuickElement() { super(); } @Override public String getTypeName() { return this.typeName; } @Override public void setTypeName(String typeName) { this.typeName = typeName; } @Override public boolean isEnable() { return this.enable; } @Override public void setEnable(boolean enable) { this.enable = enable; } @Override public DeployExtraMeta getExtraMeta() { return this.extraMeta; } @Override public void setExtraMeta(DeployExtraMeta extraMeta) { this.extraMeta = extraMeta; } @Override public DeployElement getAffiliateDeployment() { return null; } @Override public void addChild(ElementNode child) { super.addChild( child ); } @Override public String getIpAddress() { return this.ipAddress; } @Override public void setIpAddress(String ipAddress) { this.ipAddress = ipAddress; } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/GenericVirtualMachineElement.java ================================================ package com.pinecone.hydra.deploy.kom.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSON; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import com.pinecone.hydra.deploy.DeployExtraMeta; import com.pinecone.hydra.deploy.kom.DeployInstrument; import java.util.Map; public class GenericVirtualMachineElement extends ArchServerElement implements VirtualMachineElement { protected GUID affiliateHostGuid; protected String status; protected DeployExtraMeta vmExtraMeta; private void initSelf( Map joEntity ) { BeanMapDecoder.BasicDecoder.decode( this, joEntity ); if ( this.szElementaryConfig != null ) { this.elementaryConfig = (JSONObject) JSON.parse( this.szElementaryConfig ); } } public GenericVirtualMachineElement( Map joEntity ) { super(joEntity); } public GenericVirtualMachineElement( DeployInstrument deployInstrument ) { super(deployInstrument); } public GenericVirtualMachineElement() { super(); } public GenericVirtualMachineElement( Map joEntity, DeployInstrument deployInstrument ) { super( joEntity, deployInstrument); this.initSelf( joEntity ); } @Override public String getName() { return this.name; } @Override public void setName(String name) { this.name = name; } @Override public String getIpAddress() { return this.ipAddress; } @Override public void setIpAddress(String ipAddress) { this.ipAddress = ipAddress; } @Override public String getLocalDomain() { return this.localDomain; } @Override public void setLocalDomain(String localDomain) { } @Override public String getWideDomain() { return this.wideDomain; } @Override public void setWideDomain(String wideDomain) { this.wideDomain =wideDomain ; } @Override public String getStatus() { return this.status; } @Override public void setStatus(String status) { this.status = status; } @Override public GUID getAffiliateHostGuid() { return this.affiliateHostGuid; } @Override public void setAffiliateHostGuid(GUID guid) { this.affiliateHostGuid = guid; } @Override public DeployExtraMeta getVmExtraMeta() { return this.vmExtraMeta; } @Override public DeployExtraMeta getExtraMeta() { return this.extraMeta; } @Override public void setExtraMeta(DeployExtraMeta extraMeta) { this.extraMeta = extraMeta; } @Override public DeployElement getAffiliateDeployment() { return null; } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/Namespace.java ================================================ package com.pinecone.hydra.deploy.kom.entity; import java.util.Set; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; public interface Namespace extends FolderElement { Set UnbeanifiedKeys = Set.of( "distributedTreeNode", "classificationRules" ); long getEnumId(); void setEnumId(long id); GUID getGuid(); void setGuid(GUID guid); GUID getMetaGuid(); void setMetaGuid(GUID metaGuid); String getName(); void setName(String name); GUIDImperialTrieNode getDistributedTreeNode(); void setDistributedTreeNode(GUIDImperialTrieNode distributedTreeNode); @Override default Namespace evinceNamespace() { return this; } JSONObject toJSONDetails(); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/PhysicalHostElement.java ================================================ package com.pinecone.hydra.deploy.kom.entity; public interface PhysicalHostElement extends ServerElement { void setHardwareSpecs( String hardwareSpecs ); String getHardwareSpecs(); void setStatus( String status ); String getStatus(); default PhysicalHostElement evincePhysicalHostElement() { return this; } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/QuickElement.java ================================================ package com.pinecone.hydra.deploy.kom.entity; public interface QuickElement extends DeployElement { String getTypeName();// e.g. Script, POD void setTypeName(String typeName); @Override default QuickElement evinceQuickElement() { return this; } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/ServerElement.java ================================================ package com.pinecone.hydra.deploy.kom.entity; public interface ServerElement extends DeployElement { String getLocalDomain(); void setLocalDomain( String localDomain ); String getWideDomain(); void setWideDomain( String wideDomain ); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/VirtualMachineElement.java ================================================ package com.pinecone.hydra.deploy.kom.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.DeployExtraMeta; public interface VirtualMachineElement extends ServerElement { @Override default VirtualMachineElement evinceVirtualMachineElement() { return this; } /* String getName(); void setName(String name);*/ String getIpAddress(); void setIpAddress( String ipAddress ); String getStatus(); void setStatus( String status ); GUID getAffiliateHostGuid(); void setAffiliateHostGuid( GUID guid ); DeployExtraMeta getVmExtraMeta(); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/marshaling/DeployInstrumentDecoder.java ================================================ package com.pinecone.hydra.deploy.kom.marshaling; import java.util.Map; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.kom.entity.ElementNode; public interface DeployInstrumentDecoder extends Pinenut { default ElementNode decode(Object val, GUID parentGUID) { if ( val instanceof Map ) { Map map = (Map) val; if( map.isEmpty() ) { return null; } else if( map.size() > 1 ) { throw new IllegalArgumentException( "Root element should has at last 1." ); } Map.Entry kv = (Map.Entry) map.entrySet().iterator().next(); return this.decode( kv.getKey().toString(), kv.getValue(), parentGUID ); } return null; } ElementNode decode(String key, Object val, GUID parentGUID); default ElementNode decode(Map.Entry kv, GUID parentGUID) { return this.decode( kv.getKey().toString(), kv.getValue(), parentGUID ); } default ElementNode decode(Object val) { return this.decode( val, null ); } default ElementNode decode(String key, Object val) { return this.decode( key, val, null ); } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/marshaling/DeployInstrumentEncoder.java ================================================ package com.pinecone.hydra.deploy.kom.marshaling; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.deploy.kom.entity.ElementNode; public interface DeployInstrumentEncoder extends Pinenut { Object encode(ElementNode node); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/marshaling/DeployJSONDecoder.java ================================================ package com.pinecone.hydra.deploy.kom.marshaling; import java.util.Collection; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.entity.ContainerElement; import com.pinecone.hydra.deploy.kom.entity.ElementNode; import com.pinecone.hydra.deploy.kom.entity.FolderElement; import com.pinecone.hydra.deploy.kom.entity.GenericClusterElement; import com.pinecone.hydra.deploy.kom.entity.GenericContainerElement; import com.pinecone.hydra.deploy.kom.entity.GenericNamespace; import com.pinecone.hydra.deploy.kom.entity.ClusterElement; import com.pinecone.hydra.deploy.kom.entity.GenericPhysicalHostElement; import com.pinecone.hydra.deploy.kom.entity.GenericQuickElement; import com.pinecone.hydra.deploy.kom.entity.GenericVirtualMachineElement; import com.pinecone.hydra.deploy.kom.entity.Namespace; import com.pinecone.hydra.deploy.kom.entity.PhysicalHostElement; import com.pinecone.hydra.deploy.kom.entity.QuickElement; import com.pinecone.hydra.deploy.kom.entity.VirtualMachineElement; public class DeployJSONDecoder implements DeployInstrumentDecoder { protected DeployInstrument instrument; public DeployJSONDecoder(DeployInstrument instrument ) { this.instrument = instrument; } @Override @SuppressWarnings( "unchecked" ) public ElementNode decode( String szName, Object o, GUID parentGuid ) { if ( o instanceof Map ) { return (ElementNode) this.instrument.get( this.decodeJSONObject( szName, (Map) o, parentGuid ).getGuid() ); } throw new IllegalArgumentException( "Elements of `DeployInstrument` should all be object." ); } protected Namespace newNamespace( String szName, Map jo ) { Namespace ns = new GenericNamespace( jo, this.instrument ); ns.setName( szName ); return ns; } protected Object[] affirmNSExisted( String szName, GUID parentGuid, Map jo ) { Namespace ns = null; if( parentGuid == null ) { ElementNode rootE = this.instrument.queryElement( szName ); if( rootE != null ) { if( rootE.evinceNamespace() == null ) { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be namespace.", szName ) ); } ns = rootE.evinceNamespace(); } } else { ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid ); if( parentNode instanceof Namespace ) { Collection destChildren = parentNode.evinceNamespace().fetchChildren(); for( ElementNode node : destChildren ) { if( szName.equals( node.getName() ) ) { if( node instanceof Namespace ) { ns = (Namespace) node; break; } else { throw new IllegalArgumentException( String.format( " Existed child-destination [%s] should be namespace.", szName ) ); } } } } } GUID currentGuid; if( ns == null ) { ns = this.newNamespace( szName, jo ); currentGuid = this.instrument.put( ns ); this.instrument.affirmOwnedNode( parentGuid, currentGuid ); } else { currentGuid = ns.getGuid(); } return new Object[] { ns, currentGuid }; } protected Object[] affirmClusterExisted( String szName, GUID parentGuid, Map jo ) { ClusterElement cluster = null; if( parentGuid == null ) { ElementNode rootE = this.instrument.queryElement( szName ); if( rootE != null ) { if( rootE.evinceClusterElement() == null ) { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be `AppElement`.", szName ) ); } cluster = rootE.evinceClusterElement(); } } else { ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid ); if( parentNode instanceof Namespace ) { Collection destChildren = parentNode.evinceNamespace().fetchChildren(); for( ElementNode node : destChildren ) { if( szName.equals( node.getName() ) ) { if( node instanceof ClusterElement) { cluster = (ClusterElement) node; break; } else { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be `AppElement`.", szName ) ); } } } } } ClusterElement neo ; if( cluster == null ) { neo = new GenericClusterElement( jo, this.instrument ); neo.setName( szName ); } else { neo = cluster; } return new Object[] { cluster, neo }; } protected Object[] affirmPhyExisted( String szName, GUID parentGuid, Map jo ) { PhysicalHostElement dep = null; if( parentGuid == null ) { ElementNode rootE = this.instrument.queryElement( szName ); if( rootE != null ) { if( rootE.evincePhysicalHostElement() == null ) { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be `TaskElement`.", szName ) ); } dep = rootE.evincePhysicalHostElement(); } } else { ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid ); Collection destChildren; if( parentNode instanceof FolderElement ) { destChildren = ( (FolderElement) parentNode ).fetchChildren(); for( ElementNode node : destChildren ) { if( szName.equals( node.getName() ) ) { if( node instanceof PhysicalHostElement ) { dep = (PhysicalHostElement) node; break; } else { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be `PhysicalHostElement`.", szName ) ); } } } } else { throw new IllegalStateException( String.format( "Parent of `TaskElement` [%s] should be `FolderElement`.", szName ) ); } } PhysicalHostElement neo ; if( dep == null ) { neo = new GenericPhysicalHostElement( jo, this.instrument ); neo.setName( szName ); } else { neo = dep; } return new Object[] { dep, neo }; } protected Object[] affirmVMExisted( String szName, GUID parentGuid, Map jo ) { VirtualMachineElement dep = null; if( parentGuid == null ) { ElementNode rootE = this.instrument.queryElement( szName ); if( rootE != null ) { if( rootE.evinceVirtualMachineElement() == null ) { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be `TaskElement`.", szName ) ); } dep = rootE.evinceVirtualMachineElement(); } } else { ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid ); Collection destChildren; if( parentNode instanceof FolderElement ) { destChildren = ( (FolderElement) parentNode ).fetchChildren(); for( ElementNode node : destChildren ) { if( szName.equals( node.getName() ) ) { if( node instanceof VirtualMachineElement ) { dep = (VirtualMachineElement) node; break; } else { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be `VirtualMachineElement`.", szName ) ); } } } } else { throw new IllegalStateException( String.format( "Parent of `TaskElement` [%s] should be `FolderElement`.", szName ) ); } } VirtualMachineElement neo ; if( dep == null ) { neo = new GenericVirtualMachineElement( jo, this.instrument ); neo.setName( szName ); } else { neo = dep; } return new Object[] { dep, neo }; } protected Object[] affirmQuickExisted( String szName, GUID parentGuid, Map jo ) { QuickElement dep = null; if( parentGuid == null ) { ElementNode rootE = this.instrument.queryElement( szName ); if( rootE != null ) { if( rootE.evinceQuickElement() == null ) { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be `TaskElement`.", szName ) ); } dep = rootE.evinceQuickElement(); } } else { ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid ); Collection destChildren; if( parentNode instanceof FolderElement ) { destChildren = ( (FolderElement) parentNode ).fetchChildren(); for( ElementNode node : destChildren ) { if( szName.equals( node.getName() ) ) { if( node instanceof QuickElement ) { dep = (QuickElement) node; break; } else { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be `QuickElement`.", szName ) ); } } } } else { throw new IllegalStateException( String.format( "Parent of `TaskElement` [%s] should be `FolderElement`.", szName ) ); } } QuickElement neo ; if( dep == null ) { neo = new GenericQuickElement( jo, this.instrument ); neo.setName( szName ); } else { neo = dep; } return new Object[] { dep, neo }; } protected Object[] affirmContainerExisted( String szName, GUID parentGuid, Map jo ) { ContainerElement dep = null; if( parentGuid == null ) { ElementNode rootE = this.instrument.queryElement( szName ); if( rootE != null ) { if( rootE.evinceContainerElement() == null ) { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be `TaskElement`.", szName ) ); } dep = rootE.evinceContainerElement(); } } else { ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid ); Collection destChildren; if( parentNode instanceof FolderElement ) { destChildren = ( (FolderElement) parentNode ).fetchChildren(); for( ElementNode node : destChildren ) { if( szName.equals( node.getName() ) ) { if( node instanceof ContainerElement ) { dep = (ContainerElement) node; break; } else { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be `ContainerElement`.", szName ) ); } } } } else { throw new IllegalStateException( String.format( "Parent of `TaskElement` [%s] should be `FolderElement`.", szName ) ); } } ContainerElement neo ; if( dep == null ) { neo = new GenericContainerElement( jo, this.instrument ); neo.setName( szName ); } else { neo = dep; } return new Object[] { dep, neo }; } protected Object[] decodeExternalElements( String szMetaType, String szName, GUID parentGuid, Map jo ) throws IllegalArgumentException { throw new IllegalArgumentException( "Unknown metaType '" + szMetaType + "'." ); } protected void decodeChildren ( Map jo, GUID currentGuid ) { for ( Object o : jo.entrySet() ) { Map.Entry kv = (Map.Entry) o; Object val = kv.getValue(); if( val instanceof Map ) { this.decode( kv.getKey().toString(), val, currentGuid ); } } } protected ElementNode decodeJSONObject( String szName, Map jo, GUID parentGuid ) { String szMetaType = (String) jo.get( "metaType" ); boolean isNamespace = szMetaType == null || szMetaType.equals( Namespace.class.getSimpleName() ); ElementNode elementNode; GUID currentGuid; if ( isNamespace ) { Object[] pair = this.affirmNSExisted( szName, parentGuid, jo ); Namespace ns = (Namespace) pair[ 0 ]; currentGuid = (GUID) pair[ 1 ]; this.decodeChildren( jo, currentGuid ); elementNode = ns; } else { Object[] pair; boolean bIsFolderElement = false; if( szMetaType.equals( ClusterElement.class.getSimpleName() ) ) { pair = this.affirmClusterExisted( szName, parentGuid, jo ); bIsFolderElement = true; } else if( szMetaType.equals( PhysicalHostElement.class.getSimpleName() ) ) { pair = this.affirmPhyExisted( szName, parentGuid, jo ); } else if( szMetaType.equals( VirtualMachineElement.class.getSimpleName() ) ) { pair = this.affirmVMExisted( szName, parentGuid, jo ); } else if( szMetaType.equals( QuickElement.class.getSimpleName() ) ) { pair = this.affirmQuickExisted( szName, parentGuid, jo ); } else if( szMetaType.equals( ContainerElement.class.getSimpleName() ) ) { pair = this.affirmContainerExisted( szName, parentGuid, jo ); } else { try{ pair = this.decodeExternalElements( szMetaType, szName, parentGuid, jo ); } catch ( RuntimeException e ) { throw new IllegalArgumentException( e ); } } ElementNode arc = (ElementNode) pair[ 0 ]; ElementNode neo = (ElementNode) pair[ 1 ]; if( arc == null ) { currentGuid = this.instrument.put( neo ); this.instrument.affirmOwnedNode( parentGuid, currentGuid ); } else { currentGuid = arc.getGuid(); this.instrument.update( neo ); } if( bIsFolderElement ) { Object services = jo.get( "deployments" ); if( services instanceof Map ) { Map joSer = (Map) services; this.decodeChildren( joSer, currentGuid ); } } elementNode = neo; } return elementNode; } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/marshaling/DeployJSONEncoder.java ================================================ package com.pinecone.hydra.deploy.kom.marshaling; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.entity.ElementNode; public class DeployJSONEncoder implements DeployInstrumentEncoder { protected DeployInstrument instrument; public DeployJSONEncoder(DeployInstrument instrument ) { this.instrument = instrument; } @Override public Object encode( ElementNode node ) { return node.toJSONObject(); } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/operator/ArchElementOperator.java ================================================ package com.pinecone.hydra.deploy.kom.operator; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.entity.CommonMeta; import com.pinecone.hydra.deploy.kom.entity.ElementNode; import com.pinecone.hydra.deploy.kom.source.NodeMetaManipulator; import com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator; import com.pinecone.hydra.unit.imperium.ImperialTree; public abstract class ArchElementOperator implements ElementOperator { protected DeployInstrument deployInstrument; protected ImperialTree imperialTree; protected NodeMetaManipulator nodeMetaManipulator; protected DeployMasterManipulator deployMasterManipulator; protected ElementOperatorFactory factory; public ArchElementOperator( ElementOperatorFactory factory ){ this( factory.getTaskMasterManipulator(),factory.getServicesTree() ); this.factory = factory; } public ArchElementOperator(DeployMasterManipulator masterManipulator, DeployInstrument deployInstrument){ this.imperialTree = deployInstrument.getMasterTrieTree(); this.deployInstrument = deployInstrument; this.nodeMetaManipulator = masterManipulator.getNodeMetaManipulator(); this.deployMasterManipulator = masterManipulator; //this.factory = new GenericServiceOperatorFactory(servicesTree,masterManipulator); } public ElementOperatorFactory getOperatorFactory() { return this.factory; } protected void applyCommonMeta( ElementNode ele, CommonMeta commonMeta ){ if( commonMeta != null ) { ele.setGuid ( commonMeta.getGuid() ); ele.setExtraInformation ( commonMeta.getExtraInformation() ); ele.setDescription ( commonMeta.getDescription() ); ele.setIpAddress ( commonMeta.getIpAddress() ); } } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/operator/ClusterElementOperator.java ================================================ package com.pinecone.hydra.deploy.kom.operator; import java.util.List; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.entity.ClusterElement; import com.pinecone.hydra.deploy.kom.entity.GenericClusterElement; import com.pinecone.hydra.system.ko.UOIUtils; import com.pinecone.hydra.deploy.kom.entity.GenericNamespace; import com.pinecone.hydra.deploy.kom.source.ClusterNodeManipulator; import com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public class ClusterElementOperator extends ArchElementOperator implements ElementOperator { protected ClusterNodeManipulator jobNodeManipulator; public ClusterElementOperator(ElementOperatorFactory factory ) { this( factory.getTaskMasterManipulator(),factory.getServicesTree() ); this.factory = factory; } public ClusterElementOperator(DeployMasterManipulator masterManipulator, DeployInstrument deployInstrument){ super( masterManipulator, deployInstrument); this.jobNodeManipulator = masterManipulator.getJobNodeManipulator(); } @Override public GUID insert( TreeNode treeNode ) { GenericClusterElement jobElement = (GenericClusterElement) treeNode; GuidAllocator guidAllocator = this.deployInstrument.getGuidAllocator(); GUID jobNodeGUID = guidAllocator.nextGUID(); jobElement.setGuid( jobNodeGUID ); this.jobNodeManipulator.insert( jobElement ); //将应用元信息存入元信息表 this.nodeMetaManipulator.insert( jobElement ); //将节点信息存入主表 GUIDImperialTrieNode node = new GUIDImperialTrieNode(); node.setNodeMetadataGUID(jobNodeGUID); node.setGuid(jobNodeGUID); node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) ); this.imperialTree.insert( node ); return jobNodeGUID; } @Override public void purge( GUID guid ) { //namespace节点需要递归删除其拥有节点若其引用节点,没有其他引用则进行清理 List childNodes = this.imperialTree.getChildren(guid); GUIDImperialTrieNode node = this.imperialTree.getNode(guid); if ( !childNodes.isEmpty() ){ List subordinates = this.imperialTree.getSubordinates(guid); if ( !subordinates.isEmpty() ){ for ( GUID subordinateGuid : subordinates ){ this.purge( subordinateGuid ); } } childNodes = this.imperialTree.getChildren( guid ); for( GUIDImperialTrieNode childNode : childNodes ){ List parentNodes = this.imperialTree.fetchParentGuids(childNode.getGuid()); if ( parentNodes.size() > 1 ){ this.imperialTree.removeInheritance(childNode.getGuid(),guid); } else { this.purge( childNode.getGuid() ); } } } if ( node.getType().getObjectName().equals( GenericNamespace.class.getName() ) ){ this.removeNode(guid); } else { UOI uoi = node.getType(); String metaType = this.getOperatorFactory().getMetaType( uoi.getObjectName() ); if( metaType == null ) { TreeNode newInstance = (TreeNode)uoi.newInstance( new Class[]{ DeployInstrument.class }, this.deployInstrument); metaType = newInstance.getMetaType(); } ElementOperator operator = this.getOperatorFactory().getOperator( metaType ); operator.purge( guid ); } } @Override public ClusterElement get(GUID guid ) { ClusterElement clusterElement; clusterElement = this.jobNodeManipulator.getClusterElement( guid, this.deployInstrument); this.applyCommonMeta(clusterElement, this.nodeMetaManipulator.getNodeCommonMeta( guid ) ); clusterElement.setGuid(clusterElement.getGuid()); return clusterElement; } @Override public ClusterElement get(GUID guid, int depth ) { return this.get( guid ); } @Override public ClusterElement getAsRootDepth(GUID guid ) { return this.get( guid ); } @Override public void update( TreeNode treeNode ) { GenericClusterElement applicationElement = (GenericClusterElement) treeNode; this.jobNodeManipulator.update( applicationElement ); this.nodeMetaManipulator.update( applicationElement ); } @Override public void updateName( GUID guid, String name ) { } protected void removeNode( GUID guid ){ GUIDImperialTrieNode node = this.imperialTree.getNode( guid ); this.imperialTree.purge( guid ); this.imperialTree.removeCachePath(guid); this.nodeMetaManipulator.remove( node.getNodeMetadataGUID() ); this.jobNodeManipulator.remove( node.getGuid( )); } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/operator/ContainerElementOperator.java ================================================ package com.pinecone.hydra.deploy.kom.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.entity.GenericContainerElement; import com.pinecone.hydra.deploy.kom.entity.ContainerElement; import com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator; import com.pinecone.hydra.deploy.kom.source.ContainerElementManipulator; import com.pinecone.hydra.system.ko.UOIUtils; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public class ContainerElementOperator extends ArchElementOperator implements ElementOperator{ protected ContainerElementManipulator containerElementManipulator; public ContainerElementOperator(ElementOperatorFactory factory ) { this( factory.getTaskMasterManipulator(),factory.getServicesTree() ); this.factory = factory; } public ContainerElementOperator(DeployMasterManipulator masterManipulator, DeployInstrument deployInstrument){ super( masterManipulator, deployInstrument); this.containerElementManipulator = masterManipulator.getContainerElementManipulator(); } @Override public GUID insert(TreeNode treeNode ) { GenericContainerElement containerElement = ( GenericContainerElement ) treeNode; //将信息写入数据库 //将节点信息存入应用节点表 GuidAllocator guidAllocator = this.deployInstrument.getGuidAllocator(); GUID taskNodeGUID = guidAllocator.nextGUID(); containerElement.setGuid(taskNodeGUID); this.containerElementManipulator.insert( containerElement ); //将应用元信息存入元信息表 this.nodeMetaManipulator.insert( containerElement ); //将节点信息存入主表 GUIDImperialTrieNode node = new GUIDImperialTrieNode(); node.setNodeMetadataGUID( taskNodeGUID ); // Since 20250419, the meta has been merged into the `node`. node.setGuid( taskNodeGUID ); node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) ); this.imperialTree.insert( node ); return taskNodeGUID; } @Override public void purge( GUID guid ) { this.removeNode( guid ); } @Override public ContainerElement get( GUID guid ) { GUIDImperialTrieNode node = this.imperialTree.getNode( guid ); ContainerElement ContainerElement = this.containerElementManipulator.getContainerElement( guid, this.deployInstrument); //TODO /* this.applyCommonMeta( ContainerElement, this.nodeMetaManipulator.getNodeCommonMeta( guid ) ); */ ContainerElement.setDistributedTreeNode(node); ContainerElement.setGuid( guid ); return ContainerElement; } @Override public ContainerElement get(GUID guid, int depth ) { return this.get( guid ); } @Override public ContainerElement getAsRootDepth(GUID guid ) { return this.get( guid ); } @Override public void update( TreeNode nodeWideData ) { ContainerElement serviceElement = (ContainerElement) nodeWideData; this.containerElementManipulator.update( serviceElement ); this.nodeMetaManipulator.update( serviceElement ); } @Override public void updateName( GUID guid, String name ) { } private void removeNode( GUID guid ){ GUIDImperialTrieNode node = this.imperialTree.getNode(guid); this.imperialTree.purge( guid ); this.imperialTree.removeCachePath( guid ); this.containerElementManipulator.remove( node.getGuid() ); this.nodeMetaManipulator.remove( node.getNodeMetadataGUID() ); } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/operator/ElementOperator.java ================================================ package com.pinecone.hydra.deploy.kom.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.kom.entity.ElementNode; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; public interface ElementOperator extends TreeNodeOperator { @Override ElementNode get(GUID guid); @Override ElementNode get(GUID guid, int depth); @Override ElementNode getAsRootDepth(GUID guid); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/operator/ElementOperatorFactory.java ================================================ package com.pinecone.hydra.deploy.kom.operator; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.entity.ClusterElement; import com.pinecone.hydra.deploy.kom.entity.ContainerElement; import com.pinecone.hydra.deploy.kom.entity.Namespace; import com.pinecone.hydra.deploy.kom.entity.DeployElement; import com.pinecone.hydra.deploy.kom.entity.PhysicalHostElement; import com.pinecone.hydra.deploy.kom.entity.QuickElement; import com.pinecone.hydra.deploy.kom.entity.VirtualMachineElement; import com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator; import com.pinecone.hydra.unit.imperium.operator.OperatorFactory; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; public interface ElementOperatorFactory extends OperatorFactory { String DefaultServiceNode = DeployElement.class.getSimpleName(); String DefaultNamespace = Namespace.class.getSimpleName(); String DefaultApplicationNode = ClusterElement.class.getSimpleName(); String DefaultVirtualMachine = VirtualMachineElement.class.getSimpleName(); String DefaultPhysicalHost = PhysicalHostElement.class.getSimpleName(); String DefaultQuickElement = QuickElement.class.getSimpleName(); String DefaultContainerElement = ContainerElement.class.getSimpleName(); void register(String typeName, TreeNodeOperator functionalNodeOperation); void registerMetaType(Class clazz, String metaType); void registerMetaType(String classFullName, String metaType); String getMetaType(String classFullName); ElementOperator getOperator(String typeName); DeployInstrument getServicesTree(); DeployMasterManipulator getTaskMasterManipulator(); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/operator/GenericElementOperatorFactory.java ================================================ package com.pinecone.hydra.deploy.kom.operator; import java.util.HashMap; import java.util.Map; import java.util.TreeMap; import com.pinecone.hydra.deploy.kom.entity.GenericContainerElement; import com.pinecone.hydra.deploy.kom.entity.GenericPhysicalHostElement; import com.pinecone.hydra.deploy.kom.entity.GenericVirtualMachineElement; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.entity.GenericClusterElement; import com.pinecone.hydra.deploy.kom.entity.GenericNamespace; import com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; import com.pinecone.hydra.unit.iqueue.entity.GenericQueueElement; public class GenericElementOperatorFactory implements ElementOperatorFactory { protected DeployMasterManipulator deployMasterManipulator; protected DeployInstrument deployInstrument; protected Map registerer = new HashMap<>(); protected Map metaTypeMap = new TreeMap<>(); protected void registerDefaultMetaType( Class genericType ) { this.metaTypeMap.put( genericType.getName(), genericType.getSimpleName().replace("Generic","") ); } protected void registerDefaultMetaTypes() { this.registerDefaultMetaType( GenericNamespace.class ); this.registerDefaultMetaType( GenericClusterElement.class ); this.registerDefaultMetaType( GenericVirtualMachineElement.class ); this.registerDefaultMetaType( GenericPhysicalHostElement.class ); this.registerDefaultMetaType( GenericQueueElement.class); this.registerDefaultMetaType( GenericContainerElement.class); } public GenericElementOperatorFactory(DeployInstrument deployInstrument, DeployMasterManipulator deployMasterManipulator){ this.deployInstrument = deployInstrument; this.deployMasterManipulator = deployMasterManipulator; this.registerer.put( ElementOperatorFactory.DefaultApplicationNode, new ClusterElementOperator(this) ); this.registerer.put( ElementOperatorFactory.DefaultNamespace, new NamespaceOperator(this) ); this.registerer.put( ElementOperatorFactory.DefaultVirtualMachine, new VirtualMachineElementOperator(this) ); this.registerer.put( ElementOperatorFactory.DefaultPhysicalHost, new PhysicalHostElementOperator(this) ); this.registerer.put( ElementOperatorFactory.DefaultQuickElement, new QuickElementOperator(this) ); this.registerer.put( ElementOperatorFactory.DefaultContainerElement, new ContainerElementOperator(this) ); this.registerDefaultMetaTypes(); } @Override public void register( String typeName, TreeNodeOperator functionalNodeOperation ) { this.registerer.put( typeName, functionalNodeOperation ); } @Override public void registerMetaType( Class clazz, String metaType ){ this.registerMetaType( clazz.getName(), metaType ); } @Override public void registerMetaType( String classFullName, String metaType ){ this.metaTypeMap.put( classFullName, metaType ); } @Override public DeployInstrument getServicesTree() { return this.deployInstrument; } @Override public DeployMasterManipulator getTaskMasterManipulator() { return this.deployMasterManipulator; } @Override public String getMetaType( String classFullName ) { return this.metaTypeMap.get( classFullName ); } @Override public ElementOperator getOperator(String typeName ) { //Debug.trace( this.registerer.toString() ); return (ElementOperator) this.registerer.get( typeName ); } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/operator/NamespaceOperator.java ================================================ package com.pinecone.hydra.deploy.kom.operator; import java.util.List; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.deploy.kom.entity.GenericNamespace; import com.pinecone.hydra.system.ko.UOIUtils; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.entity.GenericClusterElement; import com.pinecone.hydra.deploy.kom.entity.Namespace; import com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator; import com.pinecone.hydra.deploy.kom.source.DeployNamespaceManipulator; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public class NamespaceOperator extends ArchElementOperator implements ElementOperator { protected DeployNamespaceManipulator namespaceManipulator; public NamespaceOperator( ElementOperatorFactory factory ) { this( factory.getTaskMasterManipulator(),factory.getServicesTree() ); this.factory = factory; } public NamespaceOperator( DeployMasterManipulator masterManipulator, DeployInstrument deployInstrument ){ super( masterManipulator, deployInstrument); this.namespaceManipulator = masterManipulator.getNamespaceManipulator(); } @Override public GUID insert( TreeNode treeNode ) { GenericNamespace ns = ( GenericNamespace ) treeNode; //存节点基础信息 GuidAllocator guidAllocator = this.deployInstrument.getGuidAllocator(); GUID namespaceRulesGuid = ns.getGuid(); GUID namespaceGuid = guidAllocator.nextGUID(); ns.setGuid( namespaceGuid ); this.namespaceManipulator.insert( ns ); //存元信息 GUID metadataGUID = guidAllocator.nextGUID(); ns.setMetaGuid( metadataGUID ); this.nodeMetaManipulator.insertNS( ns ); GUIDImperialTrieNode node = new GUIDImperialTrieNode(); node.setBaseDataGUID( namespaceRulesGuid ); node.setGuid( namespaceGuid ); node.setNodeMetadataGUID( metadataGUID ); node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) ); this.imperialTree.insert( node ); return namespaceGuid; } @Override public void purge( GUID guid ) { //namespace节点需要递归删除其拥有节点若其引用节点,没有其他引用则进行清理 List childNodes = this.imperialTree.getChildren(guid); GUIDImperialTrieNode node = this.imperialTree.getNode(guid); if ( !childNodes.isEmpty() ){ List subordinates = this.imperialTree.getSubordinates(guid); if ( !subordinates.isEmpty() ){ for ( GUID subordinateGuid : subordinates ){ this.purge( subordinateGuid ); } } childNodes = this.imperialTree.getChildren( guid ); for( GUIDImperialTrieNode childNode : childNodes ){ List parentNodes = this.imperialTree.fetchParentGuids(childNode.getGuid()); if ( parentNodes.size() > 1 ){ this.imperialTree.removeInheritance(childNode.getGuid(),guid); } else { this.purge( childNode.getGuid() ); } } } if ( node.getType().getObjectName().equals(GenericNamespace.class.getName()) || node.getType().getObjectName().equals(GenericClusterElement.class.getName())){ this.removeNode(guid); } else { UOI uoi = node.getType(); String metaType = this.getOperatorFactory().getMetaType( uoi.getObjectName() ); if( metaType == null ) { TreeNode newInstance = (TreeNode)uoi.newInstance( new Class[]{ DeployInstrument.class }, this.deployInstrument); metaType = newInstance.getMetaType(); } ElementOperator operator = this.getOperatorFactory().getOperator( metaType ); operator.purge( guid ); } } @Override public Namespace get( GUID guid ) { GUIDImperialTrieNode node = this.imperialTree.getNode( guid ); GenericNamespace namespace = new GenericNamespace( this.deployInstrument); GUIDImperialTrieNode guidDistributedTrieNode = this.imperialTree.getNode( node.getGuid() ); GUID metaGuid = guidDistributedTrieNode.getNodeMetadataGUID(); namespace.setDistributedTreeNode( guidDistributedTrieNode ); namespace.setName( this.namespaceManipulator.getNamespace( guid ).getName() ); this.applyCommonMeta( namespace, this.nodeMetaManipulator.getNodeCommonMeta( metaGuid ) ); // GUID / MetaGUID difference. namespace.setGuid( guid ); namespace.setMetaGuid( metaGuid ); return namespace; } @Override public Namespace get( GUID guid, int depth ) { return this.get( guid ); } @Override public Namespace getAsRootDepth( GUID guid ) { return this.get( guid ); } @Override public void update( TreeNode nodeWideData ) { GenericNamespace ns = ( GenericNamespace ) nodeWideData; this.namespaceManipulator.update( ns ); this.nodeMetaManipulator.update( ns ); } @Override public void updateName( GUID guid, String name ) { } protected void removeNode( GUID guid ){ GUIDImperialTrieNode node = this.imperialTree.getNode(guid); this.imperialTree.purge( guid ); this.imperialTree.removeCachePath( guid ); this.namespaceManipulator.remove( node.getGuid() ); this.nodeMetaManipulator.remove( node.getAttributesGUID() ); } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/operator/PhysicalHostElementOperator.java ================================================ package com.pinecone.hydra.deploy.kom.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.entity.GenericPhysicalHostElement; import com.pinecone.hydra.deploy.kom.entity.PhysicalHostElement; import com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator; import com.pinecone.hydra.deploy.kom.source.PhysicalHostManipulator; import com.pinecone.hydra.system.ko.UOIUtils; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public class PhysicalHostElementOperator extends ArchElementOperator implements ElementOperator{ protected PhysicalHostManipulator physicalHostManipulator; public PhysicalHostElementOperator( ElementOperatorFactory factory ) { this( factory.getTaskMasterManipulator(),factory.getServicesTree() ); this.factory = factory; } public PhysicalHostElementOperator( DeployMasterManipulator masterManipulator, DeployInstrument deployInstrument ){ super( masterManipulator, deployInstrument); this.physicalHostManipulator = masterManipulator.getPhysicalHostManipulator(); } @Override public GUID insert(TreeNode treeNode ) { GenericPhysicalHostElement physicalHostElement = ( GenericPhysicalHostElement ) treeNode; //将信息写入数据库 //将节点信息存入应用节点表 GuidAllocator guidAllocator = this.deployInstrument.getGuidAllocator(); GUID taskNodeGUID = guidAllocator.nextGUID(); physicalHostElement.setGuid(taskNodeGUID); this.physicalHostManipulator.insert( physicalHostElement ); //将应用元信息存入元信息表 this.nodeMetaManipulator.insert( physicalHostElement ); //将节点信息存入主表 GUIDImperialTrieNode node = new GUIDImperialTrieNode(); node.setNodeMetadataGUID( taskNodeGUID ); // Since 20250419, the meta has been merged into the `node`. node.setGuid( taskNodeGUID ); node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) ); this.imperialTree.insert( node ); return taskNodeGUID; } @Override public void purge( GUID guid ) { this.removeNode( guid ); } @Override public PhysicalHostElement get(GUID guid ) { GUIDImperialTrieNode node = this.imperialTree.getNode( guid ); PhysicalHostElement physicalHostElement = this.physicalHostManipulator.getPhysicalHostElement( guid, this.deployInstrument); //TODO /* this.applyCommonMeta( physicalHostElement, this.nodeMetaManipulator.getNodeCommonMeta( guid ) ); */ physicalHostElement.setDistributedTreeNode(node); physicalHostElement.setGuid( guid ); return physicalHostElement; } @Override public PhysicalHostElement get(GUID guid, int depth ) { return this.get( guid ); } @Override public PhysicalHostElement getAsRootDepth(GUID guid ) { return this.get( guid ); } @Override public void update( TreeNode nodeWideData ) { PhysicalHostElement serviceElement = (PhysicalHostElement) nodeWideData; this.physicalHostManipulator.update( serviceElement ); this.nodeMetaManipulator.update( serviceElement ); } @Override public void updateName( GUID guid, String name ) { } private void removeNode( GUID guid ){ GUIDImperialTrieNode node = this.imperialTree.getNode(guid); this.imperialTree.purge( guid ); this.imperialTree.removeCachePath( guid ); this.physicalHostManipulator.remove( node.getGuid() ); this.nodeMetaManipulator.remove( node.getNodeMetadataGUID() ); } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/operator/QuickElementOperator.java ================================================ package com.pinecone.hydra.deploy.kom.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.entity.GenericQuickElement; import com.pinecone.hydra.deploy.kom.entity.QuickElement; import com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator; import com.pinecone.hydra.deploy.kom.source.QuickElementManipulator; import com.pinecone.hydra.system.ko.UOIUtils; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public class QuickElementOperator extends ArchElementOperator implements ElementOperator{ protected QuickElementManipulator quickElementManipulator; public QuickElementOperator( ElementOperatorFactory factory ) { this( factory.getTaskMasterManipulator(),factory.getServicesTree() ); this.factory = factory; } public QuickElementOperator( DeployMasterManipulator masterManipulator, DeployInstrument deployInstrument ){ super( masterManipulator, deployInstrument); this.quickElementManipulator = masterManipulator.getQuickElementManipulator(); } @Override public GUID insert(TreeNode treeNode ) { GenericQuickElement quickElement = ( GenericQuickElement ) treeNode; //将信息写入数据库 //将节点信息存入应用节点表 GuidAllocator guidAllocator = this.deployInstrument.getGuidAllocator(); GUID taskNodeGUID = guidAllocator.nextGUID(); quickElement.setGuid(taskNodeGUID); this.quickElementManipulator.insert( quickElement ); //将应用元信息存入元信息表 this.nodeMetaManipulator.insert( quickElement ); //将节点信息存入主表 GUIDImperialTrieNode node = new GUIDImperialTrieNode(); node.setNodeMetadataGUID( taskNodeGUID ); // Since 20250419, the meta has been merged into the `node`. node.setGuid( taskNodeGUID ); node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) ); this.imperialTree.insert( node ); return taskNodeGUID; } @Override public void purge( GUID guid ) { this.removeNode( guid ); } @Override public QuickElement get(GUID guid ) { GUIDImperialTrieNode node = this.imperialTree.getNode( guid ); QuickElement quickElement = this.quickElementManipulator.getQuickElement( guid, this.deployInstrument); //TODO /* this.applyCommonMeta( quickElement, this.nodeMetaManipulator.getNodeCommonMeta( guid ) ); */ quickElement.setDistributedTreeNode(node); quickElement.setGuid( guid ); return quickElement; } @Override public QuickElement get(GUID guid, int depth ) { return this.get( guid ); } @Override public QuickElement getAsRootDepth(GUID guid ) { return this.get( guid ); } @Override public void update( TreeNode nodeWideData ) { QuickElement quickElement = (QuickElement) nodeWideData; this.quickElementManipulator.update( quickElement ); this.nodeMetaManipulator.update( quickElement ); } @Override public void updateName( GUID guid, String name ) { } private void removeNode( GUID guid ){ GUIDImperialTrieNode node = this.imperialTree.getNode(guid); this.imperialTree.purge( guid ); this.imperialTree.removeCachePath( guid ); this.quickElementManipulator.remove( node.getGuid() ); this.nodeMetaManipulator.remove( node.getNodeMetadataGUID() ); } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/operator/VirtualMachineElementOperator.java ================================================ package com.pinecone.hydra.deploy.kom.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.deploy.kom.entity.GenericVirtualMachineElement; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.entity.VirtualMachineElement; import com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator; import com.pinecone.hydra.deploy.kom.source.VirtualMachineManipulator; import com.pinecone.hydra.system.ko.UOIUtils; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public class VirtualMachineElementOperator extends ArchElementOperator implements ElementOperator{ protected VirtualMachineManipulator virtualMachineManipulator; public VirtualMachineElementOperator( ElementOperatorFactory factory ) { this( factory.getTaskMasterManipulator(),factory.getServicesTree() ); this.factory = factory; } public VirtualMachineElementOperator( DeployMasterManipulator masterManipulator, DeployInstrument deployInstrument ){ super( masterManipulator, deployInstrument); this.virtualMachineManipulator = masterManipulator.getVirtualMachineManipulator(); } @Override public GUID insert( TreeNode treeNode ) { GenericVirtualMachineElement virtualMachineElement = ( GenericVirtualMachineElement ) treeNode; //将信息写入数据库 //将节点信息存入应用节点表 GuidAllocator guidAllocator = this.deployInstrument.getGuidAllocator(); GUID taskNodeGUID = guidAllocator.nextGUID(); virtualMachineElement.setGuid(taskNodeGUID); this.virtualMachineManipulator.insert( virtualMachineElement ); //将应用元信息存入元信息表 this.nodeMetaManipulator.insert( virtualMachineElement ); //将节点信息存入主表 GUIDImperialTrieNode node = new GUIDImperialTrieNode(); node.setNodeMetadataGUID( taskNodeGUID ); // Since 20250419, the meta has been merged into the `node`. node.setGuid( taskNodeGUID ); node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) ); this.imperialTree.insert( node ); return taskNodeGUID; } @Override public void purge( GUID guid ) { this.removeNode( guid ); } @Override public VirtualMachineElement get(GUID guid ) { GUIDImperialTrieNode node = this.imperialTree.getNode( guid ); VirtualMachineElement virtualMachineElement = this.virtualMachineManipulator.getDeployNode( guid, this.deployInstrument); //TODO /* this.applyCommonMeta( virtualMachineElement, this.nodeMetaManipulator.getNodeCommonMeta( guid ) ); */ virtualMachineElement.setDistributedTreeNode(node); virtualMachineElement.setGuid( guid ); return virtualMachineElement; } @Override public VirtualMachineElement get(GUID guid, int depth ) { return this.get( guid ); } @Override public VirtualMachineElement getAsRootDepth(GUID guid ) { return this.get( guid ); } @Override public void update( TreeNode nodeWideData ) { VirtualMachineElement serviceElement = (VirtualMachineElement) nodeWideData; this.virtualMachineManipulator.update( serviceElement ); this.nodeMetaManipulator.update( serviceElement ); } @Override public void updateName( GUID guid, String name ) { } private void removeNode( GUID guid ){ GUIDImperialTrieNode node = this.imperialTree.getNode(guid); this.imperialTree.purge( guid ); this.imperialTree.removeCachePath( guid ); this.virtualMachineManipulator.remove( node.getGuid() ); this.nodeMetaManipulator.remove( node.getNodeMetadataGUID() ); } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/source/ClusterNodeManipulator.java ================================================ package com.pinecone.hydra.deploy.kom.source; import java.util.List; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.kom.entity.ClusterElement; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.deploy.kom.DeployInstrument; public interface ClusterNodeManipulator extends GUIDNameManipulator { void insert( ClusterElement clusterElement ); void remove( GUID guid ); ClusterElement getClusterElement( GUID guid, DeployInstrument instrument ); void update( ClusterElement clusterElement ); List fetchJobNodeByName( String name ); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/source/ContainerElementManipulator.java ================================================ package com.pinecone.hydra.deploy.kom.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.entity.ContainerElement; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import java.util.List; public interface ContainerElementManipulator extends GUIDNameManipulator { void insert( ContainerElement quickElement ); ContainerElement getContainerElement( GUID guid, DeployInstrument deployInstrument ); void update( ContainerElement serviceElement); void remove( GUID guid ); List< ContainerElement> fetchContainerElementByName( String name ); @Override List getGuidsByName( String name ); @Override List getGuidsByNameID( String name, GUID guid ); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/source/DeployMasterManipulator.java ================================================ package com.pinecone.hydra.deploy.kom.source; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; public interface DeployMasterManipulator extends KOIMasterManipulator { TrieTreeManipulator getTrieTreeManipulator() ; NodeMetaManipulator getNodeMetaManipulator(); ClusterNodeManipulator getJobNodeManipulator(); DeployNodeManipulator getDeployNodeManipulator(); DeployNamespaceManipulator getNamespaceManipulator(); TireOwnerManipulator getTireOwnerManipulator(); PhysicalHostManipulator getPhysicalHostManipulator(); VirtualMachineManipulator getVirtualMachineManipulator(); QuickElementManipulator getQuickElementManipulator(); ContainerElementManipulator getContainerElementManipulator(); DeployServiceInsMappingManipulator getDeployServiceInsMappingManipulator(); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/source/DeployNamespaceManipulator.java ================================================ package com.pinecone.hydra.deploy.kom.source; import java.util.List; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.deploy.kom.entity.Namespace; public interface DeployNamespaceManipulator extends GUIDNameManipulator { void insert( Namespace ns ); void remove( GUID guid ); Namespace getNamespace( GUID guid ); void update( Namespace ns ); List fetchNamespaceNodeByName( String name ); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/source/DeployNodeManipulator.java ================================================ package com.pinecone.hydra.deploy.kom.source; import java.util.List; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.entity.DeployElement; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; public interface DeployNodeManipulator extends GUIDNameManipulator { void insert( DeployElement deployElement ); void remove( GUID UUID ); void update( DeployElement taskElement ); List fetchDeployNodeByName( String name ); @Override List getGuidsByName( String name ); @Override List getGuidsByNameID( String name, GUID guid ); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/source/DeployServiceInsMappingManipulator.java ================================================ package com.pinecone.hydra.deploy.kom.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.kom.entity.DeployInsMapping; public interface DeployServiceInsMappingManipulator extends Pinenut { void insert( DeployInsMapping deployInsMapping ); DeployInsMapping queryDeployInsMappingByInsGuid( GUID insGuid ); DeployInsMapping queryDeployInsMappingByDeployGuid( GUID deployGuid ); void removeByInsGuid( GUID insGuid ); void removeByDeployGuid( GUID deployGuid ); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/source/NodeMetaManipulator.java ================================================ package com.pinecone.hydra.deploy.kom.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.kom.DeployFamilyNode; import com.pinecone.hydra.deploy.kom.entity.CommonMeta; import com.pinecone.hydra.deploy.kom.entity.Namespace; public interface NodeMetaManipulator extends Pinenut { void insert( DeployFamilyNode node ); void insertNS( Namespace node ); void remove( GUID guid ); CommonMeta getNodeCommonMeta( GUID guid ); void update( DeployFamilyNode node ); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/source/PhysicalHostManipulator.java ================================================ package com.pinecone.hydra.deploy.kom.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.entity.PhysicalHostElement; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import java.util.List; public interface PhysicalHostManipulator extends GUIDNameManipulator { /* void insert(PhysicalHost physicalHost);*/ void insert( PhysicalHostElement physicalHostElement ); PhysicalHostElement getPhysicalHostElement( GUID guid, DeployInstrument deployInstrument ); void update( PhysicalHostElement serviceElement ); void remove( GUID guid ); @Override List getGuidsByName( String name ); @Override List getGuidsByNameID( String name, GUID guid ); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/source/QuickElementManipulator.java ================================================ package com.pinecone.hydra.deploy.kom.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.entity.DeployElement; import com.pinecone.hydra.deploy.kom.entity.QuickElement; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import java.util.List; public interface QuickElementManipulator extends GUIDNameManipulator { void insert( QuickElement quickElement ); QuickElement getQuickElement( GUID guid, DeployInstrument deployInstrument ); void update( QuickElement serviceElement ); void remove( GUID guid ); List fetchQuickElementByName( String name ); @Override List getGuidsByName( String name ); @Override List getGuidsByNameID( String name, GUID guid ); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/source/VirtualMachineManipulator.java ================================================ package com.pinecone.hydra.deploy.kom.source; import java.util.List; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.entity.VirtualMachineElement; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; public interface VirtualMachineManipulator extends GUIDNameManipulator { void insert( VirtualMachineElement virtualMachineElement ); VirtualMachineElement getDeployNode( GUID guid, DeployInstrument instrument ); void update( VirtualMachineElement serviceElement ); void remove( GUID guid ); @Override List getGuidsByName( String name ); @Override List getGuidsByNameID( String name, GUID guid ); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/server/ArchServer.java ================================================ package com.pinecone.hydra.server; import com.pinecone.framework.util.json.JSONObject; public abstract class ArchServer implements Server { protected String name; protected String nickName; protected boolean enable; protected String localDomain; protected String wideDomain; protected JSONObject extras; public String getName() { return this.name; } public void setName( String name ) { this.name = name; } public String getNickName() { return this.nickName; } public void setNickName( String niceName ) { this.nickName = niceName; } public boolean isEnable() { return this.enable; } public void setEnable( boolean enable ) { this.enable = enable; } public String getLocalDomain() { return this.localDomain; } public void setLocalDomain( String localDomain ) { this.localDomain = localDomain; } public String getWideDomain() { return this.wideDomain; } public void setWideDomain( String wideDomain ) { this.wideDomain = wideDomain; } public JSONObject getExtras() { return this.extras; } public void setExtras( JSONObject extras ) { this.extras = extras; } public Object get( Object key ) { return this.extras.getMap().get( key ); } @Override public String toJSONString() { return this.getExtras().toJSONString(); } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/server/ArchServersCenter.java ================================================ package com.pinecone.hydra.server; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.system.ArchSystemCascadeComponent; import com.pinecone.hydra.system.HyComponent; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.framework.util.json.JSONArray; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.JSONMaptron; import java.util.Map; public abstract class ArchServersCenter extends ArchSystemCascadeComponent implements ServersCenter { protected JSONObject serversConfig; protected JSONObject nameMap; protected JSONObject nickNameMap; public ArchServersCenter(Namespace name, Hydrogen system, HyComponent parent ) { super( name, system, system.getComponentManager(), parent ); this.nameMap = new JSONMaptron(); this.nickNameMap = new JSONMaptron(); this.loadConfig(); } public ArchServersCenter(Hydrogen system, HyComponent parent ) { this( null, system, parent ); } public ArchServersCenter( Hydrogen system ) { this( system, null ); } protected abstract void loadConfig() ; protected abstract Server newServer( JSONObject prototype ) ; protected void fetchAll() { for( Map.Entry skv : this.serversConfig.entrySet() ){ JSONObject seg = (JSONObject) skv.getValue(); for( Map.Entry seg_kv : seg.entrySet() ){ Object v = seg_kv.getValue(); if( v instanceof JSONObject ) { JSONObject archy = (JSONObject) seg_kv.getValue(); archy.put( "Hierarchy", seg_kv.getKey() ); this.addServer( this.newServer( archy ) ); } else if( v instanceof JSONArray) { JSONArray archy = (JSONArray) seg_kv.getValue(); for ( int i = 0; i < archy.size(); i++ ) { JSONObject each = archy.optJSONObject(i); each.put( "Hierarchy", seg_kv.getKey() ); this.addServer( this.newServer( each ) ); } } } } } @Override public ServersCenter addServer( Server server ) { this.getNameMap().put( server.getName(), server ); this.getNickNameMap().put( server.getNickName(), server ); return this; } @Override public ServersCenter removeServer( Server server ) { this.getNameMap().remove( server.getName() ); this.getNickNameMap().remove( server.getNickName() ); return this; } @Override public JSONObject getNameMap() { return this.nameMap; } @Override public JSONObject getNickNameMap() { return this.nickNameMap; } } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/server/Server.java ================================================ package com.pinecone.hydra.server; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.json.JSONObject; public interface Server extends Pinenut { String getName(); void setName( String name ); String getNickName(); void setNickName( String niceName ); boolean isEnable(); void setEnable( boolean enable ); String getLocalDomain(); void setLocalDomain( String localDomain ); String getWideDomain(); void setWideDomain(String wideDomain); JSONObject getExtras(); void setExtras( JSONObject extras ); Object get( Object key ); } ================================================ FILE: Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/server/ServersCenter.java ================================================ package com.pinecone.hydra.server; import com.pinecone.hydra.system.HyComponent; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.json.JSONObject; public interface ServersCenter extends Pinenut, HyComponent { JSONObject getNameMap() ; JSONObject getNickNameMap() ; ServersCenter addServer ( Server server ); ServersCenter removeServer( Server server ); Hydrogen getSystem(); } ================================================ FILE: Hydra/hydra-framework-runtime/pom.xml ================================================ hydra com.pinecone.hydra 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile com.pinecone.hydra.kernel hydra-architecture 2.1.0 compile com.pinecone.hydra.kernel hydra-architecture-conduct 2.1.0 compile com.pinecone.ulf ulfhedinn 1.2.1 compile com.pinecone.slime slime 2.1.0 compile mysql mysql-connector-java 8.0.26 org.javassist javassist 3.29.0-GA ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/AbortException.java ================================================ package com.pinecone.hydra.auto; import com.pinecone.framework.system.PineRuntimeException; public class AbortException extends PineRuntimeException { public AbortException () { super(); } public AbortException ( String message ) { super(message); } public AbortException ( String message, Throwable cause ) { super(message, cause); } public AbortException ( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/ArchAutomatron.java ================================================ package com.pinecone.hydra.auto; import com.pinecone.framework.system.GenericMasterTaskManager; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.system.executum.ArchProcessum; import com.pinecone.framework.system.executum.Processum; public abstract class ArchAutomatron extends ArchProcessum implements Automatron { private ExceptionHandler mExceptionHandler; protected Exception mLastException; protected ArchAutomatron( String szName, Processum parent, ExceptionHandler handler ) { super( szName, parent ); if( handler == null ) { handler = new DeathExceptionHandler( this ); } this.mTaskManager = new GenericMasterTaskManager( this ); this.mExceptionHandler = handler; } protected ArchAutomatron( String szName, Processum parent ) { this( szName, parent, null ); } protected void handleException( Exception e ) throws ProxyProvokeHandleException, InstantKillException, AbortException, ContinueException { this.mLastException = e; try{ this.getExceptionHandler().handle( e ); } catch ( ContinueException c ) { throw c; } catch ( RuntimeException e1 ) { this.intoEnded(); throw e1; } } protected abstract void intoEnded() ; @Override public Exception getLastException() { return this.mLastException; } @Override public ExceptionHandler getExceptionHandler() { return this.mExceptionHandler; } @Override public Automatron setExceptionHandler( ExceptionHandler handler ) { this.mExceptionHandler = handler; return this; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/ArchInstructation.java ================================================ package com.pinecone.hydra.auto; public abstract class ArchInstructation implements Instructation { protected Exception mLastException; protected ArchInstructation() { } public Exception lastException() { return this.mLastException; } public void setLastException( Exception e ) { this.mLastException = e; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/ArchParallelInstructation.java ================================================ package com.pinecone.hydra.auto; import com.pinecone.framework.system.executum.ArchThreadum; import com.pinecone.framework.system.executum.Executum; import com.pinecone.framework.system.executum.Processum; public abstract class ArchParallelInstructation extends ArchInstructation implements ParallelInstructation { protected volatile boolean mbEnded ; protected long mStartNano ; protected long mnMaxJoinMillis ; protected Processum mParentPro ; protected Executum mMasterExecutum = new ArchThreadum( null, this.mParentPro ) { @Override public void apoptosis() { this.interrupt(); } }; protected Runnable mMasterRun = new Runnable() { protected ArchParallelInstructation ion = ArchParallelInstructation.this; @Override public void run() { try{ if( Thread.currentThread().isInterrupted() ) { if( this.ion instanceof Suggestation ) { ((Suggestation) this.ion).setIgnoredReason( IgnoredReason.Interrupt ); } return; } this.ion.doExecute(); } catch ( Exception e ) { this.ion.setLastException( e ); if( this.ion instanceof Suggestation ) { if( e instanceof InterruptedException ) { ((Suggestation) this.ion).setIgnoredReason( IgnoredReason.Interrupt ); } else if( e instanceof AbortException || e instanceof ContinueException ) { ((Suggestation) this.ion).setIgnoredReason( IgnoredReason.Abort ); } } } finally { this.ion.mbEnded = true; } } }; protected Thread mMasterThread = new Thread( this.mMasterRun ); protected ArchParallelInstructation( Processum parent, long nMaxJoinMillis ) { super(); this.mbEnded = false ; this.mnMaxJoinMillis = nMaxJoinMillis ; this.mParentPro = parent ; this.mStartNano = System.nanoTime() ; } protected ArchParallelInstructation( Processum parent ) { this( parent, -1 ); } @Override public void terminate() { this.interrupt(); } @Override public void interrupt(){ this.mMasterExecutum.interrupt(); } @Override public void kill(){ this.mMasterExecutum.kill(); } @Override public boolean isEnded() { return this.mbEnded; } @Override public long getStartNano() { return this.mStartNano; } protected abstract void doExecute() throws Exception; @Override public void execute() throws Exception { if( this.mbEnded && this.mMasterThread.getState() == Thread.State.TERMINATED ) { this.mMasterThread = new Thread( this.mMasterRun ); this.mbEnded = false; } this.mMasterThread.start(); if( this.mnMaxJoinMillis == 0 ) { this.mMasterThread.join(); } else if( this.mnMaxJoinMillis > 0 ) { this.mMasterThread.join( this.mnMaxJoinMillis ); } } @Override public boolean isDetached() { return this.mnMaxJoinMillis < 0; } @Override public boolean isJoined() { return this.mnMaxJoinMillis >= 0; } @Override public ArchParallelInstructation setDetach() { this.mnMaxJoinMillis = -1; return this; } @Override public ArchParallelInstructation setJoin() { this.mnMaxJoinMillis = 0; return this; } @Override public long getMaxJoinMillis() { return this.mnMaxJoinMillis; } @Override public ParallelInstructation setMaxJoinMillis( long join ) { this.mnMaxJoinMillis = join; return this; } @Override public Thread getMasterThread() { return this.mMasterThread; } @Override public Executum tryGetMasterExecutum() { return this.mMasterExecutum; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/ArchParallelSuggestation.java ================================================ package com.pinecone.hydra.auto; import com.pinecone.framework.system.executum.Processum; public abstract class ArchParallelSuggestation extends ArchParallelInstructation implements ParallelSuggestation { protected IgnoredReason mIgnoredReason; protected ArchParallelSuggestation( Processum parent ){ super( parent ); } @Override public IgnoredReason getIgnoredReason() { return this.mIgnoredReason; } @Override public void setIgnoredReason( IgnoredReason ignoredReason ) { this.mIgnoredReason = ignoredReason; } @Override public abstract void execute(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/ArchSequentialMarshalling.java ================================================ package com.pinecone.hydra.auto; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; import java.util.List; import java.util.Deque; import java.util.concurrent.TimeoutException; public abstract class ArchSequentialMarshalling implements Marshalling { protected List mInstructations; protected Deque mPriorInstructations; protected List mParallelInstructations; protected MationInvoker mMationInvoker; protected ArchSequentialMarshalling( List instructations, List parallelInstructations, Deque priorInstructations, MationInvoker invoker ) { this.mParallelInstructations = parallelInstructations; this.mPriorInstructations = priorInstructations; this.mInstructations = instructations; this.mMationInvoker = invoker; } protected ArchSequentialMarshalling( MationInvoker invoker ) { this( new ArrayList<>(), new ArrayList<>(), new LinkedList<>(), invoker ); } protected ArchSequentialMarshalling() { this( new GenericMationInvoker() ); } @Override public Collection getInstructations() { return this.mInstructations; } @Override public Collection getParallelInstructations() { return this.mParallelInstructations; } @Override public Collection getPriorInstructations() { return this.mPriorInstructations; } @Override public void addLast( Instructation instructation ) { if( instructation instanceof InstantInstructation ) { this.prompt( instructation ); } else { this.mInstructations.add( instructation ); if ( instructation instanceof ParallelInstructation ) { this.mParallelInstructations.add( instructation ); } } } @Override public void addFirst( Instructation instructation ) { if( instructation instanceof InstantInstructation ) { this.prompt( instructation ); } else { this.mInstructations.add( 0, instructation ); if ( instructation instanceof ParallelInstructation ) { this.mParallelInstructations.add( 0, instructation ); } } } @Override public void erase( Instructation instructation ) { if( instructation instanceof InstantInstructation ) { this.mPriorInstructations.remove( instructation ); } else { this.mInstructations.remove( instructation ); if ( instructation instanceof ParallelInstructation ) { this.mParallelInstructations.remove( instructation ); } } } @Override public void prompt( Instructation instructation ) { this.mPriorInstructations.addFirst( instructation ); } protected boolean executeKernelInstructations( Instructation instruction ) throws Exception { if( instruction == KernelInstructation.DIE ) { instruction.execute(); return true; } return false; } protected void executePriorInstructations( boolean bOnlyTryCallKernel ) throws Exception { for( Instructation instruction : this.mPriorInstructations ) { if( !this.executeKernelInstructations( instruction ) && !bOnlyTryCallKernel ){ this.mMationInvoker.invoke( instruction ); } } } @Override public void execute() throws Exception { List children = this.mInstructations; try{ this.executePriorInstructations( false ); for( Instructation instruction : children ) { this.executePriorInstructations( true ); if( Thread.currentThread().isInterrupted() ) { throw new InterruptedException( "Interrupt termination." ); } this.mMationInvoker.invoke( instruction ); } } catch ( InstantKillException e ) { this.terminate(); throw e; } this.waitForParallelInstructations(); } @Override public void terminate() { for ( Instructation instruction : this.mParallelInstructations ) { ParallelInstructation parallelInstruction = (ParallelInstructation) instruction; parallelInstruction.terminate(); } } protected void waitForParallelInstructations() throws InterruptedException, TimeoutException { boolean allEnded; do { allEnded = true; for ( Instructation instruction : this.mParallelInstructations ) { if( Thread.currentThread().isInterrupted() ) { this.terminate(); throw new InterruptedException( "Interrupt termination." ); } ParallelInstructation parallelInstruction = (ParallelInstructation) instruction; if ( !parallelInstruction.isEnded() ) { allEnded = false; try{ this.mMationInvoker.checkTimeout( parallelInstruction ); } catch ( TimeoutException e ) { this.mMationInvoker.terminate( parallelInstruction ); parallelInstruction.setLastException( e ); if( parallelInstruction instanceof ParallelSuggestation ) { ((ParallelSuggestation) parallelInstruction).setIgnoredReason( IgnoredReason.Overtime ); } else { throw e; } } Thread.sleep( 50 ); } } } while ( !allEnded ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/ArchSuggestation.java ================================================ package com.pinecone.hydra.auto; public abstract class ArchSuggestation extends ArchInstructation implements Suggestation { protected IgnoredReason mIgnoredReason; protected ArchSuggestation() { super(); } @Override public IgnoredReason getIgnoredReason() { return this.mIgnoredReason; } @Override public void setIgnoredReason( IgnoredReason ignoredReason ) { this.mIgnoredReason = ignoredReason; } @Override public abstract void execute(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/Automaton.java ================================================ package com.pinecone.hydra.auto; import com.pinecone.framework.system.executum.Processum; import java.util.concurrent.BlockingDeque; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; public class Automaton extends ArchAutomatron implements LifecycleAutomaton { private static final AtomicInteger nextSerialNumber = new AtomicInteger( 0 ); private static int serialNumber() { return Automaton.nextSerialNumber.getAndIncrement(); } private static String name( String name ) { if( name == null ) { return Automaton.class.getSimpleName() + "-" + Automaton.serialNumber(); } return name; } private Thread mMasterThread ; private AtomicLong mMaxLifetimeMillis ; private AtomicLong mHeartbeatTimeoutMillis; protected volatile boolean mRunning ; protected long mnCurrentPipelineWaitingMillis; protected final BlockingDeque mInstructationQueue ; protected Automaton( String szName, Processum parent, BlockingDeque deque, boolean bIsDaemon, long nCurrentPipelineWaitingMillis ) { super( Automaton.name( szName ), parent ); this.mMaxLifetimeMillis = new AtomicLong( 0 ); this.mHeartbeatTimeoutMillis = new AtomicLong( 0 ); this.mnCurrentPipelineWaitingMillis = nCurrentPipelineWaitingMillis; this.mInstructationQueue = deque; this.mMasterThread = new Thread( this::mainLoop ); this.mMasterThread.setDaemon( bIsDaemon ); this.mMasterThread.setName( this.mszName + this.mMasterThread.getName() ); this.setThreadAffinity( this.mMasterThread ); } public Automaton( String szName, Processum parent, boolean bIsDaemon, long nCurrentPipelineWaitingMillis ) { this( szName, parent, new LinkedBlockingDeque<>(), bIsDaemon, nCurrentPipelineWaitingMillis ); } public Automaton( Processum parent, boolean bIsDaemon, long nCurrentPipelineWaitingMillis ) { this( null, parent, bIsDaemon, nCurrentPipelineWaitingMillis ); } public Automaton( String szName, Processum parent, long nCurrentPipelineWaitingMillis ) { this( szName, parent, false, nCurrentPipelineWaitingMillis ); } public Automaton( Processum parent, long nCurrentPipelineWaitingMillis ) { this( null, parent, nCurrentPipelineWaitingMillis ); } public Automaton( String szName, Processum parent, boolean bIsDaemon ) { this( szName, parent, bIsDaemon, 50 ); } public Automaton( Processum parent, boolean bIsDaemon ) { this( null, parent, bIsDaemon ); } public Automaton( String szName, Processum parent ) { this( szName, parent, false ); } public Automaton( Processum parent ) { this( null, parent ); } @Override public void start() { this.mRunning = true; this.mMasterThread.start(); } @Override public void join() throws InterruptedException { this.mMasterThread.join(); } @Override public void join( long millis ) throws InterruptedException { this.mMasterThread.join( millis ); } @Override public void command( Instructation instructation ) { this.mInstructationQueue.addLast( instructation ); } @Override public void prompt( Instructation instructation ) { this.mInstructationQueue.addFirst( instructation ); } @Override public void withdraw( Instructation instructation ) { this.mInstructationQueue.remove( instructation ); } @Override public boolean isEnded() { return !this.mRunning; } @Override protected void intoEnded() { this.mRunning = false; } @Override public long getMaxLifetimeMillis() { return this.mMaxLifetimeMillis.get(); } @Override public LifecycleAutomaton setMaxLifetimeMillis( long maxLifetimeMillis ) { this.mMaxLifetimeMillis.getAndSet( maxLifetimeMillis ); return this; } @Override public long getHeartbeatTimeoutMillis() { return this.mHeartbeatTimeoutMillis.get(); } @Override public LifecycleAutomaton setHeartbeatTimeoutMillis( long heartbeatTimeoutMillis ) { this.mHeartbeatTimeoutMillis.getAndSet( heartbeatTimeoutMillis ); return this; } protected void mainLoop() { long startTime = System.currentTimeMillis(); long lastCommandTime = System.currentTimeMillis(); while ( this.mRunning ) { try{ if( Thread.currentThread().isInterrupted() ) { throw new AbortException(); } long currentTime = System.currentTimeMillis(); if ( this.getMaxLifetimeMillis() > 0 && ( currentTime - startTime ) > this.getMaxLifetimeMillis() ) { this.intoEnded(); // Suicide break; } if ( this.getHeartbeatTimeoutMillis() > 0 && ( currentTime - lastCommandTime ) > this.getHeartbeatTimeoutMillis() ) { this.intoEnded(); // Suicide break; } Instructation instructation = this.mInstructationQueue.poll( this.mnCurrentPipelineWaitingMillis, TimeUnit.MILLISECONDS ); if ( instructation != null ) { try { instructation.execute(); } catch ( ContinueException c ) { // Do nothing } lastCommandTime = System.currentTimeMillis(); // Reset heartbeat timeout } } catch ( Exception e ) { try{ this.handleException( e ); } catch ( ContinueException c ) { // Do nothing } catch ( Exception ke ) { break; } } } } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/Automatron.java ================================================ package com.pinecone.hydra.auto; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.system.regime.Automatus; public interface Automatron extends Processum, Automatus { void start(); void join() throws InterruptedException; void join( long millis ) throws InterruptedException; // Add to pipeline tail void command ( Instructation instructation ); // Add to pipeline front void prompt ( Instructation instructation ); void withdraw ( Instructation instructation ); default void terminate() { this.prompt( KernelInstructation.DIE ); } boolean isEnded(); Exception getLastException(); ExceptionHandler getExceptionHandler(); Automatron setExceptionHandler( ExceptionHandler handler ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/AutomatronMationInvoker.java ================================================ package com.pinecone.hydra.auto; import com.pinecone.framework.system.executum.Executum; public class AutomatronMationInvoker extends GenericMationInvoker { protected Automatron mAutomatron; public AutomatronMationInvoker( long maxExecutionMillis, long maxInterruptMillis, Automatron automatron ) { super( maxExecutionMillis, maxInterruptMillis ); this.mAutomatron = automatron; } public AutomatronMationInvoker( Automatron automatron ) { this( Long.MAX_VALUE, -1, automatron ); } @Override public void invoke ( Instructation instructation ) throws Exception { Executum executum = null; if( instructation instanceof ParallelInstructation ) { executum = ((ParallelInstructation) instructation).tryGetMasterExecutum(); if( executum != null ) { this.mAutomatron.getTaskManager().add( executum ); } } try{ super.invoke( instructation ); } catch ( Exception e ) { if( executum != null ) { this.mAutomatron.getTaskManager().erase( executum ); } throw e; } } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/Continue.java ================================================ package com.pinecone.hydra.auto; public final class Continue extends ArchInstructation { Continue () { } @Override public void execute() throws Exception { throw new ContinueException(); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/ContinueException.java ================================================ package com.pinecone.hydra.auto; import com.pinecone.framework.system.PineRuntimeException; public class ContinueException extends PineRuntimeException { public ContinueException () { super(); } public ContinueException ( String message ) { super(message); } public ContinueException ( String message, Throwable cause ) { super(message, cause); } public ContinueException ( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/DeathExceptionHandler.java ================================================ package com.pinecone.hydra.auto; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.util.Debug; public class DeathExceptionHandler implements ExceptionHandler { protected Automatron mAutomatron; public DeathExceptionHandler( Automatron automatron ) { this.mAutomatron = automatron; } @Override public void handle( Exception e ) throws ProxyProvokeHandleException, InstantKillException, AbortException, ContinueException { if( e instanceof InstantKillException ) { Debug.info( "[NOTICE] " ); //e.printStackTrace(); throw new InstantKillException( e ) ; } else if( e instanceof ContinueException ) { throw (ContinueException) e; } else if( e instanceof AbortException ) { throw (AbortException) e; } else { e.printStackTrace(); throw new ProxyProvokeHandleException( e ); } } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/Die.java ================================================ package com.pinecone.hydra.auto; public final class Die extends ArchInstructation implements InstantInstructation { Die () { } @Override public void execute() throws Exception { throw new InstantKillException(); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/ExceptionHandler.java ================================================ package com.pinecone.hydra.auto; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.system.prototype.Pinenut; public interface ExceptionHandler extends Pinenut { void handle( Exception e ) throws ProxyProvokeHandleException, InstantKillException, AbortException, ContinueException; } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/GenericMarshalling.java ================================================ package com.pinecone.hydra.auto; public class GenericMarshalling extends ArchSequentialMarshalling { protected Automatron mAutomatron; public GenericMarshalling( Automatron automatron, MationInvoker invoker ) { super( invoker ); this.mAutomatron = automatron; } public GenericMarshalling( Automatron automatron ) { this( automatron, new AutomatronMationInvoker( automatron ) ); } public GenericMarshalling() { this( null, new GenericMationInvoker() ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/GenericMationInvoker.java ================================================ package com.pinecone.hydra.auto; public class GenericMationInvoker implements MationInvoker { private long mMaxExecutionMillis; private long mMaxInterruptMillis; public GenericMationInvoker( long maxExecutionMillis, long maxInterruptMillis ) { this.mMaxExecutionMillis = maxExecutionMillis; this.mMaxInterruptMillis = maxInterruptMillis; } public GenericMationInvoker() { this( Long.MAX_VALUE, -1 ); } @Override public long getMaxExecutionMillis() { return this.mMaxExecutionMillis; } @Override public long getMaxInterruptMillis() { return this.mMaxInterruptMillis; } @Override public void setMaxExecutionMillis( long maxExecutionMillis ) { this.mMaxExecutionMillis = maxExecutionMillis; } @Override public void setMaxInterruptMillis( long maxInterruptMillis ) { this.mMaxInterruptMillis = maxInterruptMillis; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/Heartbeat.java ================================================ package com.pinecone.hydra.auto; public final class Heartbeat extends ArchInstructation implements InstantInstructation { Heartbeat () { } @Override public void execute() throws Exception { } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/IgnoredReason.java ================================================ package com.pinecone.hydra.auto; public enum IgnoredReason { Exception ( "Exception" ), Violation ( "Violation" ), Overtime ( "Overtime" ), Interrupt ( "Interrupt" ), Abort ( "Abort" ); private final String value; IgnoredReason( String value ){ this.value = value; } public String getName(){ return this.value; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/InstantInstructation.java ================================================ package com.pinecone.hydra.auto; public interface InstantInstructation extends Instructation { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/InstantKillException.java ================================================ package com.pinecone.hydra.auto; import com.pinecone.framework.system.PineRuntimeException; public class InstantKillException extends PineRuntimeException { public InstantKillException () { super(); } public InstantKillException ( String message ) { super(message); } public InstantKillException ( String message, Throwable cause ) { super(message, cause); } public InstantKillException ( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/Instructation.java ================================================ package com.pinecone.hydra.auto; import com.pinecone.framework.system.functions.Executor; /** * Instruction -mation */ public interface Instructation extends Executor { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/KernelInstructation.java ================================================ package com.pinecone.hydra.auto; public final class KernelInstructation { public static final Die DIE = new Die(); public static final Continue CONTINUE = new Continue(); public static final Heartbeat HEARTBEAT = new Heartbeat(); //public static final Terminate TERMINATE = new Terminate(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/LifecycleAutomaton.java ================================================ package com.pinecone.hydra.auto; public interface LifecycleAutomaton extends Automatron { long getMaxLifetimeMillis(); LifecycleAutomaton setMaxLifetimeMillis( long maxLifetimeMillis ); long getHeartbeatTimeoutMillis(); LifecycleAutomaton setHeartbeatTimeoutMillis( long heartbeatTimeoutMillis ); default LifecycleAutomaton sendHeartbeat() { this.command( KernelInstructation.HEARTBEAT ); return this; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/Marshalling.java ================================================ package com.pinecone.hydra.auto; import java.util.Collection; public interface Marshalling extends Instructation { default void add( Instructation instructation ) { this.addLast( instructation ); } void addLast( Instructation instructation ); void addFirst( Instructation instructation ); void erase( Instructation instructation ); void prompt( Instructation instructation ); Collection getInstructations(); Collection getParallelInstructations(); Collection getPriorInstructations(); void terminate(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/MationInvoker.java ================================================ package com.pinecone.hydra.auto; import com.pinecone.framework.system.prototype.Pinenut; import java.util.concurrent.TimeoutException; public interface MationInvoker extends Pinenut { default boolean isInstructationViolation( Suggestation suggestation ) { return false; } default void invoke ( Instructation instructation ) throws Exception { try { if( instructation instanceof Suggestation ) { if( this.isInstructationViolation( (Suggestation)instructation ) ){ ((Suggestation) instructation).setIgnoredReason( IgnoredReason.Violation ); return; } } instructation.execute(); } catch ( ContinueException c ) { // Just continue. if( instructation instanceof Suggestation ) { Suggestation suggestation = ((Suggestation) instructation); suggestation.setIgnoredReason( IgnoredReason.Abort ); suggestation.setLastException( c ); } else { throw c; } } catch ( Exception e ) { if( instructation instanceof Suggestation ) { Suggestation suggestation = ((Suggestation) instructation); suggestation.setIgnoredReason( IgnoredReason.Exception ); suggestation.setLastException( e ); } else if( instructation instanceof ParallelInstructation ) { ((ParallelInstructation) instructation).setLastException( e ); } else { throw e; } } } default void terminate( ParallelInstructation instructation ) throws InterruptedException { instructation.interrupt(); if( this.getMaxInterruptMillis() > 0 ) { long startApoptosisMillis = System.currentTimeMillis(); long maxApoptosisMillis = this.getMaxInterruptMillis(); while ( System.currentTimeMillis() - startApoptosisMillis < maxApoptosisMillis ) { if (instructation.isEnded()) { return; } Thread.sleep( 50 ); } } // If the instructation is still not ended, kill it if ( !instructation.isEnded() ) { instructation.kill(); } } long getMaxExecutionMillis(); long getMaxInterruptMillis(); void setMaxExecutionMillis( long maxExecutionMillis ); void setMaxInterruptMillis( long maxInterruptMillis ); default void checkTimeout( ParallelInstructation instructation ) throws TimeoutException { if ( instructation.getExecutedMillis() > this.getMaxExecutionMillis() ) { throw new TimeoutException( "Execution exceeded max time limit of " + this.getMaxExecutionMillis() + " milliseconds." ); } } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/ParallelInstructation.java ================================================ package com.pinecone.hydra.auto; import com.pinecone.framework.system.executum.Chronum; import com.pinecone.framework.system.executum.Executum; public interface ParallelInstructation extends Instructation, Chronum { boolean isEnded(); Exception lastException(); void setLastException( Exception e ); void terminate() ; void interrupt(); void kill(); boolean isDetached(); boolean isJoined(); ParallelInstructation setDetach(); ParallelInstructation setJoin(); long getMaxJoinMillis(); ParallelInstructation setMaxJoinMillis( long join ); Thread getMasterThread(); default Executum tryGetMasterExecutum() { return null; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/ParallelSuggestation.java ================================================ package com.pinecone.hydra.auto; public interface ParallelSuggestation extends ParallelInstructation, Suggestation { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/PeriodicAutomaton.java ================================================ package com.pinecone.hydra.auto; import com.pinecone.framework.system.executum.Processum; import java.util.ArrayList; import java.util.LinkedList; import java.util.Collection; import java.util.Deque; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReentrantReadWriteLock; public class PeriodicAutomaton extends ArchAutomatron implements PeriodicAutomatron { private static final AtomicInteger nextSerialNumber = new AtomicInteger( 0 ); private static int serialNumber() { return PeriodicAutomaton.nextSerialNumber.getAndIncrement(); } private static String name( String name ) { if( name == null ) { return PeriodicAutomaton.class.getSimpleName() + "-" + PeriodicAutomaton.serialNumber(); } return name; } private Marshalling mMarshalling; private final AtomicLong mPeriodMillis; private Thread mMasterThread; protected Deque mInstructationBuffer; protected ReentrantReadWriteLock mBufferLock; protected final Object mNextPeriodLock = new Object(); protected volatile boolean mRunning; protected PeriodicAutomaton( String szName, Processum parent, Marshalling marshalling, Deque buffer, long nPeriodMillis, boolean bIsDaemon, ExceptionHandler handler ) { super( PeriodicAutomaton.name( szName ), parent, handler ); this.mBufferLock = new ReentrantReadWriteLock(); this.mPeriodMillis = new AtomicLong( nPeriodMillis ); this.mMarshalling = marshalling; this.mInstructationBuffer = buffer; this.mMasterThread = new Thread( this::mainLoop ); this.mMasterThread.setDaemon( bIsDaemon ); this.mMasterThread.setName( this.mszName + this.mMasterThread.getName() ); this.setThreadAffinity( this.mMasterThread ); } public PeriodicAutomaton( String szName, Processum parent, Marshalling marshalling, long nPeriodMillis, boolean bIsDaemon, ExceptionHandler handler ) { this( szName, parent, marshalling, new LinkedList<>(),nPeriodMillis, bIsDaemon, handler ); } public PeriodicAutomaton( String szName, Processum parent, Marshalling marshalling, long nPeriodMillis, boolean bIsDaemon ) { this( szName, parent, marshalling,nPeriodMillis, bIsDaemon, null ); } public PeriodicAutomaton( String szName, Processum parent, ExceptionHandler handler, long nPeriodMillis, boolean bIsDaemon ) { this( szName, parent, new GenericMarshalling(), nPeriodMillis, bIsDaemon, handler ); } public PeriodicAutomaton( String szName, Processum parent, long nPeriodMillis, boolean bIsDaemon ) { this( szName, parent, (ExceptionHandler) null, nPeriodMillis, bIsDaemon ); } public PeriodicAutomaton( Processum parent, long nPeriodMillis, boolean bIsDaemon ) { this( null, parent, (ExceptionHandler) null, nPeriodMillis, bIsDaemon ); } public PeriodicAutomaton( Processum parent, long nPeriodMillis ) { this( null, parent, (ExceptionHandler) null, nPeriodMillis, false ); } @Override public void start() { this.mRunning = true; this.mMasterThread.start(); } @Override public void join() throws InterruptedException { this.mMasterThread.join(); } @Override public void join( long millis ) throws InterruptedException { this.mMasterThread.join( millis ); } @Override public boolean isEnded() { return !this.mRunning; } protected void invokeIfKernelInstructation( Instructation instructation ) throws Exception { if( instructation == KernelInstructation.DIE ) { instructation.execute(); } } @Override protected void intoEnded() { this.mRunning = false; } protected void fetchCacheIntoMarshalling( boolean bLoopMode ) { // , locked writing operations. this.mBufferLock.writeLock().lock(); try { if ( this.mInstructationBuffer.isEmpty() ) { return; } for( InstructLine line : this.mInstructationBuffer ) { if( line.setRemove ) { if( bLoopMode ) { this.invokeIfKernelInstructation( line.instructation ); } if( line.instructation instanceof InstantInstructation ) { this.mMarshalling.prompt( line.instructation ); } else { if( line.piror ) { this.mMarshalling.addFirst( line.instructation ); } else { this.mMarshalling.addLast( line.instructation ); } } } else { this.mMarshalling.erase( line.instructation ); } } this.mInstructationBuffer.clear(); } catch ( Exception e ) { this.handleException( e ); } finally { this.mBufferLock.writeLock().unlock(); } } protected void mainLoop() { try{ while ( this.mRunning ) { try { long startTime = System.currentTimeMillis(); if( Thread.currentThread().isInterrupted() ) { throw new AbortException(); } this.fetchCacheIntoMarshalling( true ); if( !this.mRunning ) { // Check if given `death` instruction. break; } // , locked writing operations. // this(Consumer, Who are trying to consume all commands) and others (Producer, Who are trying to add new command) this.mBufferLock.readLock().lock(); try{ try{ this.mMarshalling.execute(); } catch ( ContinueException c ) { // Do nothing } } finally { this.mBufferLock.readLock().unlock(); } //Debug.echo( "\n" ); long endTime = System.currentTimeMillis(); long elapsed = endTime - startTime; long sleepTime = this.mPeriodMillis.get() - elapsed; if ( sleepTime > 0 ) { synchronized ( this.mNextPeriodLock ) { this.mNextPeriodLock.wait( sleepTime ); } } } catch ( Exception e ) { try{ this.handleException( e ); } catch ( ContinueException c ) { // Do nothing } catch ( Exception ke ) { break; } } } } finally { synchronized ( this.mNextPeriodLock ) { this.mNextPeriodLock.notify(); } } } protected boolean tryLockBuffer() { boolean bHeldByCurrentThread = this.mBufferLock.writeLock().isHeldByCurrentThread(); if( !bHeldByCurrentThread ) { bHeldByCurrentThread = Thread.currentThread() == this.mMasterThread; } boolean bOptLocked = true; if( bHeldByCurrentThread ) { bOptLocked = this.mBufferLock.writeLock().tryLock(); } else { this.mBufferLock.writeLock().lock(); } return bOptLocked; } protected void add ( Instructation instructation, boolean bPrior ) { boolean bOptLocked = this.tryLockBuffer(); try{ if ( !this.mRunning ) { // , locked writing operations. this.mMarshalling.add( instructation ); } else { if( Thread.currentThread() != this.mMasterThread ) { // Nested operation this.fetchCacheIntoMarshalling( false ); } InstructLine line = new InstructLine( instructation, bPrior, true ); if( bPrior ) { this.mInstructationBuffer.addFirst( line ); } else { this.mInstructationBuffer.addLast( line ); } } } finally { if( bOptLocked && this.mBufferLock.writeLock().getHoldCount() > 0 ) { this.mBufferLock.writeLock().unlock(); } } } @Override public void command( Instructation instructation ) { this.add( instructation, false ); } @Override public void prompt( Instructation instructation ) { this.add( instructation, true ); } @Override public void withdraw( Instructation instructation ) { boolean bOptLocked = this.tryLockBuffer(); try { InstructLine target = null; for( InstructLine line : this.mInstructationBuffer ) { if( line.instructation.equals( instructation ) ) { target = line; break; } } if( target != null ) { this.mInstructationBuffer.remove( target ); } if ( !this.mRunning ) { // , locked writing operations. this.mMarshalling.erase( instructation ); } else { if( Thread.currentThread() != this.mMasterThread ) { // Nested operation this.fetchCacheIntoMarshalling( false ); } InstructLine line = new InstructLine( instructation, false, false ); this.mInstructationBuffer.addLast( line ); } } finally { if( bOptLocked && this.mBufferLock.writeLock().getHoldCount() > 0 ) { this.mBufferLock.writeLock().unlock(); } } } @Override public long getPeriodMillis() { return this.mPeriodMillis.get(); } @Override public void setPeriodMillis( long periodMillis ) { this.mPeriodMillis.getAndSet( periodMillis ); } @Override public Collection getBuffer() { ArrayList list = new ArrayList<>(); for( InstructLine line : this.mInstructationBuffer ) { list.add( line.instructation ); } return list; } @Override public int bufferSize() { return this.mInstructationBuffer.size(); } @Override public Marshalling getMarshalling() { return this.mMarshalling; } @Override public Thread getMasterThread() { return this.mMasterThread; } protected class InstructLine { protected Instructation instructation; protected boolean piror; protected boolean setRemove; // 0 => remove, 1 => set protected InstructLine ( Instructation instructation, boolean piror, boolean setRemove ) { this.instructation = instructation; this.piror = piror; this.setRemove = setRemove; } } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/PeriodicAutomatron.java ================================================ package com.pinecone.hydra.auto; import java.util.Collection; public interface PeriodicAutomatron extends Automatron { long getPeriodMillis() ; void setPeriodMillis( long periodMillis ) ; Thread getMasterThread(); int bufferSize(); Collection getBuffer(); Marshalling getMarshalling(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/Suggestation.java ================================================ package com.pinecone.hydra.auto; public interface Suggestation extends Instructation { @Override void execute() ; // No exceptions default boolean hasIgnored() { return this.getIgnoredReason() != null; } default boolean hasAccepted() { return this.getIgnoredReason() == null; } IgnoredReason getIgnoredReason(); void setIgnoredReason( IgnoredReason ignoredReason ); Exception lastException(); void setLastException( Exception e ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/Terminate.java ================================================ package com.pinecone.hydra.auto; public final class Terminate extends ArchInstructation { Terminate () { } @Override public void execute() throws Exception { throw new InstantKillException(); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ArchExertion.java ================================================ package com.pinecone.hydra.orchestration; import com.pinecone.framework.util.Debug; public abstract class ArchExertion extends ArchGraphNode implements Exertion { protected String mszName; protected boolean mbDefaultRollback = false; protected IntegrityLevel mIntegrityLevel = IntegrityLevel.Strict; protected ExertionStatus mStatus = ExertionStatus.NEW; protected Exception mLastError = null; protected long mnStartNano ; protected ArchExertion() { this.mnStartNano = System.nanoTime(); } @Override public void reset() { this.mStatus = ExertionStatus.NEW; } @Override public String getName() { return this.mszName; } @Override public void setName( String name ) { this.mszName = name; } @Override public IntegrityLevel getIntegrityLevel(){ return this.mIntegrityLevel; } @Override public void setIntegrityLevel( IntegrityLevel level ){ this.mIntegrityLevel = level; } @Override public long getStartNano() { return this.mnStartNano; } @Override public void setDefaultRollback( boolean b ){ this.mbDefaultRollback = b; } @Override public boolean isDefaultRollback() { return this.mbDefaultRollback; } @Override public ExertionStatus getStatus() { return this.mStatus; } protected void intoStart() { this.mStatus = ExertionStatus.RUNNING; } protected void intoFinished() { this.mStatus = ExertionStatus.FINISHED; } protected void intoTerminated() { this.mStatus = ExertionStatus.TERMINATED; } protected void intoRollback() { this.mStatus = ExertionStatus.ROLLING; } protected void intoError( Exception e ) { this.mStatus = ExertionStatus.ERROR; this.mLastError = e; } @Override public Exception getLastError() { return this.mLastError; } protected abstract void doStart(); protected abstract void doTerminate() ; protected abstract void doRollback(); protected boolean handleErrorCondition( Exception e ) { if( this.mIntegrityLevel != IntegrityLevel.Strict ) { if( this.mIntegrityLevel == IntegrityLevel.Warning ) { Debug.warn( "TODO", e, e.getMessage() ); // TODO e.printStackTrace(); } return true; } return false; } @Override public void start() { this.intoStart(); try{ this.doStart(); this.intoFinished(); } catch ( Exception e ) { if( this.handleErrorCondition( e ) ) { this.intoFinished(); } else { this.intoError( e ); } } } @Override public void terminate() { try{ this.doTerminate(); this.intoTerminated(); } catch ( Exception e ) { if( this.handleErrorCondition( e ) ) { this.intoTerminated(); } else { this.intoError( e ); } } } @Override public void rollback() { this.intoRollback(); try{ this.doRollback(); this.intoFinished(); } catch ( Exception e ) { if( this.handleErrorCondition( e ) ) { this.intoFinished(); } else { this.intoError( e ); } } } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ArchGraphNode.java ================================================ package com.pinecone.hydra.orchestration; public abstract class ArchGraphNode implements GraphNode { protected int mnStratumId; protected GraphNode mParent; protected void setParent( GraphNode parent ) { this.mParent = parent; } @Override public int getStratumId() { return this.mnStratumId; } @Override public GraphNode parent() { return this.mParent; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ArchIrrevocableController.java ================================================ package com.pinecone.hydra.orchestration; public abstract class ArchIrrevocableController extends ArchExertion implements ProcessController { public ArchIrrevocableController() { super(); } @Override public void doStart() { // Marking state for DFA. } @Override public void doTerminate() { } @Override public void doRollback() { // Do nothing, 'Irrevocable Controller (e.g. Break, Continue)' can`t directly withdraw. } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ArchLoop.java ================================================ package com.pinecone.hydra.orchestration; public abstract class ArchLoop extends ArchSequential implements Loop { @Override protected BranchContext init_branch_context() { BranchContext context = new BranchContext(); context.doBreak = false; return context; } @Override protected boolean do_process_controller ( ProcessController controller, BranchContext context ) { context.doBreak = this.invoke_process_controller( controller ); if( context.doBreak ) { return true; } if( controller instanceof BreakController ) { context.doBreak = true; return true; } else if( controller instanceof ContinueController ) { return true; } else if( controller instanceof JumpController ) { try{ JumpController jmp = ((JumpController) controller); context.jmpPoint = this.eval_jump_point( jmp.getJumpPoint() ); return true; } catch ( InstantJumpOutBranchException e ){ context.doBreak = true; return true; } } throw new IllegalArgumentException( "ProcessController for Loop can ONLY be [break, continue, jump]" ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ArchParallel.java ================================================ package com.pinecone.hydra.orchestration; import com.pinecone.hydra.orchestration.parallel.ParallelExertion; import java.util.Iterator; import java.util.List; import java.util.concurrent.Phaser; public abstract class ArchParallel extends ArchSequential implements Parallel { protected Phaser activePhaser = new Phaser(1); ArchParallel() { super(); } @Override public void reset() { this.getExertium().reset(); } @Override protected void waiting_exertions_pool_synchronized() { this.activePhaser.arriveAndAwaitAdvance(); List children = this.getChildren(); Iterator iter = children.iterator(); while ( iter.hasNext() ) { Exertion exertion = (Exertion) iter.next(); if( !exertion.isFinished() ){ throw new UnfulfilledActionException( "Illegal transaction status, unfulfilled exertion founded." , exertion ); } } } @Override protected void waiting_for_single_exertion( Exertion exertion ) { if( exertion instanceof ParallelExertion ) { ParallelExertion pe = ( ParallelExertion ) exertion; if( pe.isForceSynchronized() ) { synchronized ( pe.getFinaleLock() ) { if( pe.getMasterExecutum().getAffiliateThread().isAlive() ) { try { long nMax = pe.getMaximumExecutionTime(); if( nMax > 0 ) { pe.getFinaleLock().wait( nMax ); } else { pe.getFinaleLock().wait( ); } } catch ( InterruptedException e ) { throw new UnfulfilledActionException( e ); } } } } } } @Override protected boolean is_dfa_status_finished_check_required( Exertion exertion ) { if( exertion instanceof ParallelExertion ) { return ((ParallelExertion) exertion).isForceSynchronized(); } return true; } @Override public void notifyFinished( Exertion exertion ){ this.activePhaser.arriveAndDeregister(); } @Override public void notifyExecuting ( Exertion exertion ) { this.activePhaser.register(); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ArchSequential.java ================================================ package com.pinecone.hydra.orchestration; import java.util.Iterator; import java.util.List; public abstract class ArchSequential extends ArchTransaction implements Sequential { ArchSequential() { super(); } @Override public void reset() { this.getExertium().reset(); } protected void execute_exertion( Exertion exertion ) { try { if( this.mExertionStartCB != null ) { this.mExertionStartCB.callback( exertion ); } exertion.start(); this.waiting_for_single_exertion( exertion ); if( this.is_dfa_status_finished_check_required( exertion ) && !exertion.isFinished() ) { throw new UnfulfilledActionException( exertion ); } if( this.mExertionEndCB != null ) { this.mExertionEndCB.callback( exertion ); } } catch ( RuntimeException e ) { if( !this.getSeqExceptionNeglector().isNeglectException( e ) ){ if( exertion.getIntegrityLevel() != IntegrityLevel.Strict ) { if( exertion.isDefaultRollback() ) { exertion.rollback(); // TODO: Notice for warning. } else { throw e; } } } } } protected void noticeAll( BranchNoticeException e ) { List children = this.getChildren(); for( GraphNode node : children ) { if( node instanceof Notifiable ) { ((Notifiable) node).notice( e ); } } } @SuppressWarnings("unchecked") protected Iterator eval_jump_point( Object at ) throws InstantJumpOutBranchException { if( at instanceof Iterator ) { return ( Iterator) at; } else if( at instanceof Integer ) { int id = (int) at; int i = 0; List children = this.getChildren(); Iterator it = children.iterator(); if( id > children.size() ) { throw new IndexOutOfBoundsException( "Jump [Segment:" + id + "] is out of range." ); } else if( id == children.size() ) { // Instant jump-out. throw new InstantJumpOutBranchException(); } while ( it.hasNext() ) { if( i == id ) { return it; } it.next(); ++i; } } return null; } protected boolean invoke_process_controller( ProcessController controller ) { try{ controller.call(); } catch ( BranchNoticeException e ) { if( e.isNoticeAll() ) { // TODO: Notice designed. this.noticeAll( e ); } } catch ( InstantJumpOutBranchException e ){ return true; } catch ( BranchControlException e ){ e.printStackTrace(); // TODO: BranchControlException for more precisely control granularity. } return false; } protected boolean do_process_controller ( ProcessController controller, BranchContext context ) { context.doBreak = this.invoke_process_controller( controller ); if( context.doBreak ) { return true; } if( controller instanceof BreakController ) { context.doBreak = true; return true; } else if( controller instanceof JumpController ) { try{ JumpController jmp = ((JumpController) controller); context.jmpPoint = this.eval_jump_point( jmp.getJumpPoint() ); context.doBreak = false; return true; } catch ( InstantJumpOutBranchException e ){ context.doBreak = true; return true; } } throw new IllegalArgumentException( "ProcessController for Sequential can ONLY be [break, jump]" ); } protected BranchContext init_branch_context() { BranchContext context = new BranchContext( true ); return context; } /** * Waiting synchronized for all exertions which in pool . * Should overridden by Parallel. */ protected void waiting_exertions_pool_synchronized() { } /** * Waiting for single exertion synchronized. * Should overridden by Parallel. */ protected void waiting_for_single_exertion( Exertion exertion ) { } protected boolean is_dfa_status_finished_check_required( Exertion exertion ) { return true; } @Override public void start() { this.getExertium().intoStart(); List children = this.getChildren(); BranchContext context = this.init_branch_context(); while ( true ) { Iterator iter; if( context.jmpPoint != null ) { iter = context.jmpPoint; context.jmpPoint = null; } else { iter = children.iterator(); } while ( iter.hasNext() ) { Exertion exertion = (Exertion) iter.next(); if( exertion instanceof ProcessController ) { if( this.do_process_controller( (ProcessController) exertion, context ) ){ break; } } else { this.execute_exertion( exertion ); } //Debug.sleep( 100 ); ++context.nIP; } if( context.doBreak ) { break; } } this.waiting_exertions_pool_synchronized(); this.getExertium().intoFinished(); } @Override public void terminate() { } @Override public void rollback() { } protected class BranchContext { public Iterator jmpPoint; public boolean doBreak; public int nIP; BranchContext( boolean doBreak ) { this.doBreak = doBreak; } BranchContext() { this( true ); } } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ArchStratum.java ================================================ package com.pinecone.hydra.orchestration; import java.util.ArrayList; import java.util.List; public abstract class ArchStratum implements GraphStratum { protected List mChildren; protected ArchGraphNode mParent; ArchStratum() { this.mChildren = new ArrayList<>(); } @Override public ArchGraphNode parent() { return this.mParent; } @Override public List getChildren() { return this.mChildren; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ArchTransaction.java ================================================ package com.pinecone.hydra.orchestration; import com.pinecone.hydra.orchestration.regulation.NeglectRegulation; import com.pinecone.hydra.orchestration.regulation.RuntimeNeglector; public abstract class ArchTransaction extends ArchStratum implements Transaction { protected Exertium mExertium; // To implement GraphNode and Exertion. protected NeglectRegulation mSequentialRuntimeNeglector; protected ExertionEventCallback mExertionStartCB; protected ExertionEventCallback mExertionEndCB; protected ArchTransaction() { super(); this.mExertium = new Exertium(); this.mSequentialRuntimeNeglector = new RuntimeNeglector( this ); } @Override public NeglectRegulation getSeqExceptionNeglector() { return this.mSequentialRuntimeNeglector; } @Override public void setSeqExceptionNeglector( NeglectRegulation neglector ) { this.mSequentialRuntimeNeglector = neglector; } protected Exertium getExertium() { return this.mExertium; } @Override public void registerExertionStartCallback( ExertionEventCallback callback ) { this.mExertionStartCB = callback; } @Override public void registerExertionEndCallback( ExertionEventCallback callback ) { this.mExertionEndCB = callback; } @Override public void setDefaultRollback( boolean b ) { this.getExertium().setDefaultRollback( b ); } @Override public boolean isDefaultRollback() { return this.getExertium().isDefaultRollback(); } @Override public String getName(){ return this.getExertium().getName(); } @Override public void setName( String name ) { this.getExertium().setName( name ); } @Override public IntegrityLevel getIntegrityLevel(){ return this.getExertium().getIntegrityLevel(); } @Override public void setIntegrityLevel( IntegrityLevel level ){ this.getExertium().setIntegrityLevel( level ); } @Override public long getStartNano() { return this.getExertium().getStartNano(); } @Override public int getStratumId() { return this.getExertium().getStratumId(); } protected void beforeAdd( Exertion exertion ) { if( exertion instanceof ArchGraphNode ) { ((ArchGraphNode) exertion).setParent( this ); } } @Override public void add( Exertion exertion ) { this.beforeAdd( exertion ); this.getChildren().add( exertion ); } @Override public void addFirst( Exertion exertion ) { this.beforeAdd( exertion ); this.getChildren().add( 0, exertion ); } @Override public ExertionStatus getStatus() { return this.getExertium().getStatus(); } @Override public Exception getLastError() { return this.mExertium.getLastError(); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/BooleanCondition.java ================================================ package com.pinecone.hydra.orchestration; public interface BooleanCondition extends Condition { boolean result(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/BranchControlException.java ================================================ package com.pinecone.hydra.orchestration; import com.pinecone.framework.system.prototype.Pinenut; public class BranchControlException extends Exception implements Pinenut { public BranchControlException() { super(); } public BranchControlException( String message ) { super( message ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/BranchNoticeException.java ================================================ package com.pinecone.hydra.orchestration; public class BranchNoticeException extends BranchControlException { protected Object noticeMsg; protected boolean noticeAll; public BranchNoticeException() { super(); } public BranchNoticeException( String message ) { super( message ); } public BranchNoticeException( Object noticeMsg, boolean noticeAll, String message ) { super( message ); this.noticeMsg = noticeMsg; this.noticeAll = noticeAll; } public BranchNoticeException( Object noticeMsg ) { super(); this.noticeMsg = noticeMsg; } public Object getNoticeMsg() { return this.noticeMsg; } public boolean isNoticeAll() { return this.noticeAll; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/BreakController.java ================================================ package com.pinecone.hydra.orchestration; public interface BreakController extends ProcessController { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/BreakPoint.java ================================================ package com.pinecone.hydra.orchestration; public class BreakPoint extends ArchIrrevocableController implements BreakController { public BreakPoint() { super(); } @Override public void call() throws BranchControlException { this.start(); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/CausalBranch.java ================================================ package com.pinecone.hydra.orchestration; public interface CausalBranch extends Transaction { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/Condition.java ================================================ package com.pinecone.hydra.orchestration; public interface Condition { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ContinueController.java ================================================ package com.pinecone.hydra.orchestration; public interface ContinueController extends ProcessController { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ContinuePoint.java ================================================ package com.pinecone.hydra.orchestration; public class ContinuePoint extends ArchIrrevocableController implements ContinueController { public ContinuePoint() { super(); } @Override public void call() throws BranchControlException { this.start(); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/Exertion.java ================================================ package com.pinecone.hydra.orchestration; import com.pinecone.framework.system.executum.Chronum; import com.pinecone.framework.system.prototype.Pinenut; public interface Exertion extends Pinenut, GraphNode, Chronum { String getName(); void setName( String name ); IntegrityLevel getIntegrityLevel(); void setIntegrityLevel( IntegrityLevel level ); void reset(); void start(); void terminate(); void rollback(); void setDefaultRollback( boolean b ); boolean isDefaultRollback(); ExertionStatus getStatus(); default boolean isFinished(){ return this.getStatus() == ExertionStatus.FINISHED; } default boolean isIntrrupted(){ return this.getStatus() == ExertionStatus.INTERRUPTED; } default boolean isTerminated(){ return this.getStatus() == ExertionStatus.TERMINATED; } // The running is end. default boolean isEnded() { return this.getStatus().isEnded(); } Exception getLastError(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ExertionEventCallback.java ================================================ package com.pinecone.hydra.orchestration; import com.pinecone.framework.system.prototype.Pinenut; public interface ExertionEventCallback extends Pinenut { void callback( Exertion exertion ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ExertionStatus.java ================================================ package com.pinecone.hydra.orchestration; public enum ExertionStatus { NEW ( "New" ), RUNNING ( "Running" ), FINISHED ( "Finished" ), // Finished termination. TERMINATED ( "Terminated" ), // Forced termination. ROLLING ( "Rolling" ), INTERRUPTED ( "Interrupted" ), ERROR ( "Error" ); private final String value; ExertionStatus( String value ){ this.value = value; } public String getName(){ return this.value; } public static ExertionStatus queryStatus( String sz ) { return ExertionStatus.valueOf( sz.toUpperCase() ); } public boolean isEnded() { return this == ExertionStatus.FINISHED || this == ExertionStatus.TERMINATED || this == ExertionStatus.ERROR; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/Exertium.java ================================================ package com.pinecone.hydra.orchestration; import com.pinecone.framework.system.NotImplementedException; public class Exertium extends ArchExertion { @Override protected void doStart() { throw new NotImplementedException(); } @Override protected void doTerminate() { throw new NotImplementedException(); } @Override protected void doRollback() { throw new NotImplementedException(); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/GraphNode.java ================================================ package com.pinecone.hydra.orchestration; import com.pinecone.framework.system.prototype.Pinenut; public interface GraphNode extends Pinenut { int getStratumId(); GraphNode parent(); default GraphNode root() { GraphNode p = this.parent(); if( p == null ) { return this; } return p.root(); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/GraphStratum.java ================================================ package com.pinecone.hydra.orchestration; import java.util.List; public interface GraphStratum extends GraphNode { List getChildren(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/InstantJumpOutBranchException.java ================================================ package com.pinecone.hydra.orchestration; public class InstantJumpOutBranchException extends BranchControlException { public InstantJumpOutBranchException() { super(); } public InstantJumpOutBranchException( String message ) { super( message ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/IntegrityLevel.java ================================================ package com.pinecone.hydra.orchestration; public enum IntegrityLevel { // Absolute and successfully transaction executed. // If errors happened, it will stop the whole transaction graph. Strict ("Strict"), // Irresponsibly invoked the transaction. // If errors happened, it will ignored all errors(Tracing warning), and keeps continuity for next transactions. Warning ("Warning"), // Irresponsibly invoked the transaction. // If errors happened, it will ignored all errors(No warning), and keeps continuity for next transactions. Irresponsible ("Irresponsible"); private final String value; IntegrityLevel( String value ){ this.value = value; } public String getName(){ return this.value; } public static String queryName( IntegrityLevel type ) { return type.getName(); } public static IntegrityLevel queryIntegrityLevel( String sz ) { return IntegrityLevel.valueOf( sz ); } public static final String ConfIntegrityLevelKey = "IntegrityLevel"; } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/JumpController.java ================================================ package com.pinecone.hydra.orchestration; public interface JumpController extends ProcessController { JumpController setJumpPoint( Object iter ); Object getJumpPoint(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/JumpPoint.java ================================================ package com.pinecone.hydra.orchestration; public class JumpPoint extends ArchIrrevocableController implements JumpController { protected Object mJumpPoint; public JumpPoint( Object jumpPoint ) { super(); this.mJumpPoint = jumpPoint; } @Override public JumpPoint setJumpPoint(Object mJumpPoint) { this.mJumpPoint = mJumpPoint; return this; } @Override public Object getJumpPoint() { return this.mJumpPoint; } @Override public void call() throws BranchControlException { this.start(); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/Loop.java ================================================ package com.pinecone.hydra.orchestration; public interface Loop extends Transaction { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/LoopAction.java ================================================ package com.pinecone.hydra.orchestration; public class LoopAction extends ArchLoop { public LoopAction() { super(); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/Notifiable.java ================================================ package com.pinecone.hydra.orchestration; import com.pinecone.framework.system.prototype.Pinenut; public interface Notifiable extends Pinenut { void notice( BranchNoticeException e ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/Parallel.java ================================================ package com.pinecone.hydra.orchestration; public interface Parallel extends Transaction { void notifyFinished ( Exertion exertion ); void notifyExecuting ( Exertion exertion ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ParallelAction.java ================================================ package com.pinecone.hydra.orchestration; import com.pinecone.hydra.orchestration.parallel.ParallelExertion; import com.pinecone.hydra.orchestration.parallel.WrappedMasterParallelium; public class ParallelAction extends ArchParallel { public ParallelAction() { super(); } public static ParallelExertion wrap( Exertion exertion ) { return new WrappedMasterParallelium( exertion ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ProcessController.java ================================================ package com.pinecone.hydra.orchestration; public interface ProcessController extends Exertion { ProcessController BREAK = new BreakPoint(); ProcessController CONTINUE = new ContinuePoint(); void call() throws BranchControlException ; } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/Sequential.java ================================================ package com.pinecone.hydra.orchestration; public interface Sequential extends Transaction { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/SequentialAction.java ================================================ package com.pinecone.hydra.orchestration; public class SequentialAction extends ArchSequential { public SequentialAction() { super(); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/Transaction.java ================================================ package com.pinecone.hydra.orchestration; import com.pinecone.hydra.orchestration.regulation.NeglectRegulation; import com.pinecone.hydra.system.flow.Stage; public interface Transaction extends Exertion, Stage { void add( Exertion exertion ); void addFirst( Exertion exertion ); NeglectRegulation getSeqExceptionNeglector(); void setSeqExceptionNeglector( NeglectRegulation neglector ) ; void registerExertionStartCallback( ExertionEventCallback callback ); void registerExertionEndCallback( ExertionEventCallback callback ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/UnfulfilledActionException.java ================================================ package com.pinecone.hydra.orchestration; import com.pinecone.framework.system.PineRuntimeException; public class UnfulfilledActionException extends PineRuntimeException { protected Exertion exertion; public UnfulfilledActionException( Exertion exertion ) { this( null, "", exertion ); } public UnfulfilledActionException( Throwable cause, String message, Exertion exertion ) { super( message, cause ); this.exertion = exertion; } public UnfulfilledActionException( String message, Exertion exertion ) { this( null, message, exertion ); } public UnfulfilledActionException ( Throwable cause ) { super(cause); } public Exertion getExertion() { return this.exertion; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/parallel/ArchMasterParallelium.java ================================================ package com.pinecone.hydra.orchestration.parallel; import com.pinecone.framework.system.executum.ArchThreadum; import com.pinecone.framework.system.executum.Executum; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.orchestration.ArchExertion; import com.pinecone.hydra.orchestration.GraphNode; import com.pinecone.hydra.orchestration.Parallel; import com.pinecone.hydra.orchestration.UnfulfilledActionException; public abstract class ArchMasterParallelium extends ArchExertion implements ParallelExertion { protected boolean mbIsDetachedParallelium = true; protected long mnMaximumExecutionMillis = -1; protected boolean mbIsForceSynchronized = false; protected final Object mFinaleLock = new Object(); protected Processum mParentProcessum = null; protected Executum mMasterExecutum = new ArchThreadum( null, this.mParentProcessum ) { @Override public void apoptosis() { super.apoptosis(); //TODO } }; protected Thread mMasterStartWrapThread = new Thread( new Runnable() { protected ArchMasterParallelium ium = ArchMasterParallelium.this; @Override public void run() { this.ium.invokeMasterSeqStart(); } }); protected Thread mMasterRollbackWrapThread = new Thread( new Runnable() { protected ArchMasterParallelium ium = ArchMasterParallelium.this; @Override public void run() { this.ium.invokeMasterSeqRollback(); } }); public ArchMasterParallelium() { } protected void invokeMasterSeqStart(){ try{ super.start(); } finally { this.after_master_thread_finished(); } } protected void invokeMasterSeqRollback(){ try{ super.rollback(); } finally { this.after_master_thread_finished(); } } protected void before_master_thread_executing() { GraphNode parent = this.parent(); if( parent instanceof Parallel) { ((Parallel) parent).notifyExecuting( this ); } } protected void join_master_thread_if_is() { if( this.isJoined() ) { try{ if( this.mnMaximumExecutionMillis <= 0 ) { this.getMasterExecutum().getAffiliateThread().join(); } else { this.getMasterExecutum().getAffiliateThread().join( this.mnMaximumExecutionMillis ); } } catch ( InterruptedException e ) { throw new UnfulfilledActionException( e ); } } } protected void after_master_thread_finished() { this.releaseFinaleLock(); GraphNode parent = this.parent(); if( parent instanceof Parallel) { ((Parallel) parent).notifyFinished( this ); } } @Override public Executum getMasterExecutum(){ return this.mMasterExecutum; } @Override public void releaseFinaleLock(){ if( this.isForceSynchronized() ) { synchronized ( this.mFinaleLock ) { this.mFinaleLock.notify(); } } } @Override public void start() { this.before_master_thread_executing(); this.mMasterExecutum.setThreadAffinity( this.mMasterStartWrapThread ); this.mMasterExecutum.getAffiliateThread().start(); this.join_master_thread_if_is(); } @Override public boolean isForceSynchronized() { return this.mbIsForceSynchronized; } @Override public void terminate() { super.terminate(); this.mMasterExecutum.kill(); } @Override public void rollback() { this.before_master_thread_executing(); this.mMasterExecutum.setThreadAffinity( this.mMasterRollbackWrapThread ); this.mMasterExecutum.getAffiliateThread().start(); } @Override public Object getFinaleLock(){ return this.mFinaleLock; } @Override public ParallelExertion setForceSynchronized() { this.mbIsForceSynchronized = true; return this; } @Override public ParallelExertion setNoneSynchronized() { this.mbIsForceSynchronized = false; return this; } @Override public boolean isDetached() { return this.mbIsDetachedParallelium; } @Override public boolean isJoined() { return !this.mbIsDetachedParallelium; } @Override public ParallelExertion setDetach() { this.mbIsDetachedParallelium = true; return this; } @Override public ParallelExertion setJoin() { this.mbIsDetachedParallelium = false; return this; } @Override public ParallelExertion setMaximumExecutionTime( long millis ) { this.mnMaximumExecutionMillis = millis; return this; } @Override public long getMaximumExecutionTime() { return this.mnMaximumExecutionMillis; } @Override public String nomenclature( Thread that ) { return String.format( "action-%s-%s", this.getName(), that.getName() ).toLowerCase(); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/parallel/ParallelExertion.java ================================================ package com.pinecone.hydra.orchestration.parallel; import com.pinecone.framework.system.executum.Executum; import com.pinecone.hydra.orchestration.Exertion; public interface ParallelExertion extends Exertion { Object getFinaleLock(); boolean isForceSynchronized(); ParallelExertion setForceSynchronized(); ParallelExertion setNoneSynchronized(); /** * FinaleLock is used to control the final-synchronized of the parent sequential action-list * Explicitly release this lock during the runtime, which can directly removes the buff of the 'ForceSynchronized'. */ void releaseFinaleLock(); boolean isDetached(); boolean isJoined(); ParallelExertion setDetach(); ParallelExertion setJoin(); Executum getMasterExecutum(); ParallelExertion setMaximumExecutionTime( long millis ); long getMaximumExecutionTime(); String nomenclature ( Thread that ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/parallel/WrappedMasterParallelium.java ================================================ package com.pinecone.hydra.orchestration.parallel; import com.pinecone.hydra.orchestration.Exertion; public class WrappedMasterParallelium extends ArchMasterParallelium { protected Exertion mWrapped; public WrappedMasterParallelium( Exertion exertion ) { this.mWrapped = exertion; } @Override protected void doStart() { this.mWrapped.start(); } @Override protected void doTerminate() { this.mWrapped.terminate(); } @Override protected void doRollback() { this.mWrapped.rollback(); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/recorder/ActionTape.java ================================================ package com.pinecone.hydra.orchestration.recorder; public interface ActionTape { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/regulation/NeglectRegulation.java ================================================ package com.pinecone.hydra.orchestration.regulation; public interface NeglectRegulation extends Regulation { boolean isNeglectException( Exception e ); void add( Class stereotype ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/regulation/Regulation.java ================================================ package com.pinecone.hydra.orchestration.regulation; import com.pinecone.framework.system.prototype.Pinenut; public interface Regulation extends Pinenut { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/regulation/RuntimeNeglector.java ================================================ package com.pinecone.hydra.orchestration.regulation; import com.pinecone.hydra.orchestration.Exertion; import java.util.ArrayList; import java.util.List; public class RuntimeNeglector implements NeglectRegulation { protected List > mNeglectExceptions; protected Exertion mParentExertion; public RuntimeNeglector( Exertion parent ) { this.mParentExertion = parent; this.mNeglectExceptions = new ArrayList<>(); } public List > getNeglectExceptions() { return this.mNeglectExceptions; } public Exertion getParentExertion() { return this.mParentExertion; } @Override public void add( Class stereotype ) { this.getNeglectExceptions().add( stereotype ); } @Override public boolean isNeglectException( Exception e ){ List > neglectExceptions = this.getNeglectExceptions(); for( Class c : neglectExceptions ) { if( c.isInstance( e ) ) { return true; } } return false; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ArchProcessManager.java ================================================ package com.pinecone.hydra.proc; public abstract class ArchProcessManager implements ProcessManager { protected abstract void expunge( UProcess that ); public static void invokeExpunge( ArchProcessManager manager, UProcess that ) { manager.expunge( that ); } public static void invokeExpunge( ProcessManager pm, UProcess that ) { if ( pm instanceof ArchProcessManager ) { ((ArchProcessManager) pm).expunge( that ); } } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ArchUProcess.java ================================================ package com.pinecone.hydra.proc; import java.time.LocalDateTime; import java.util.Map; import com.pinecone.framework.system.ApoptosisRejectSignalException; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.RuntimeSystem; import com.pinecone.framework.system.executum.ArchProcessum; import com.pinecone.framework.system.executum.Executum; import com.pinecone.framework.system.executum.Lifecycle; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.system.executum.TaskManager; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.proc.entity.ElementNode; import com.pinecone.hydra.proc.image.ExecutionImage; import com.pinecone.hydra.proc.ns.ProcSpace; import com.pinecone.hydra.proc.tomb.ResurgentTombstone; import com.pinecone.hydra.proc.tomb.RuntimeTombstone; import com.pinecone.hydra.system.ko.entity.ObjectTable; public abstract class ArchUProcess implements UProcess { protected Processum mLocalProcess; protected GUID mProcessID; protected GUID mParentPID; protected ObjectTable mObjectTable; protected ProcSpace mProcSpace; // TODO, Namespace, Hydra V3 protected RuntimeTombstone mRuntimeTombstone; // TODO, Tombstone, Hydra V2.7 protected ProcessActionTape mActionTape; protected ProcessManager mProcessManager; protected ExecutionImage mExecutionImage; protected Map mStartupArgs; protected Map mEnvironmentVars; protected ControllableLevel mControllableLevel; protected LocalDateTime mEndTime; protected LocalDateTime mLastUpdateTime; public ArchUProcess( @Nullable Processum localProcess, GUID guid, String szName, @Nullable UProcess parent, ProcessManager processManager, ExecutionImage image, ProcSpace procSpace, Map startupArgs, Map environmentVars ) { this.mLocalProcess = localProcess; this.mProcessManager = processManager; this.mProcessID = guid; this.mExecutionImage = image; this.mProcSpace = procSpace; this.mRuntimeTombstone = new ResurgentTombstone(); this.mStartupArgs = startupArgs; this.mEnvironmentVars = environmentVars; this.mControllableLevel = image.getControllableLevel(); this.mActionTape = new GenericProcessActionTape(); if ( this.mLocalProcess == null ) { this.mLocalProcess = new LocalSystemProcess( szName, parent ); } if ( parent != null ) { this.mParentPID = parent.getPID(); } } public ArchUProcess( @Nullable Processum localSystemProc, String szName, @Nullable UProcess parent, ProcessManager processManager, ExecutionImage image, ProcSpace procSpace, Map startupArgs, Map environmentVars ) { this( localSystemProc, processManager.getGuidAllocator().nextGUID(), szName, parent, processManager, image, procSpace, startupArgs, environmentVars ); } public ArchUProcess( Processum localSystemProc, @Nullable UProcess parent, ProcessManager processManager, ExecutionImage image, ProcSpace procSpace, Map startupArgs, Map environmentVars ) { this( localSystemProc, processManager.getGuidAllocator().nextGUID(), localSystemProc.getName(), parent, processManager, image, procSpace, startupArgs, environmentVars ); } @Override public Processum affinityLocalProcess() { return this.mLocalProcess; } @Override public ProcessActionTape actionTape() { return this.mActionTape; } @Override public GUID getGuid() { return this.mProcessID; } @Override public long getLocalPID() { return this.getExecutumId(); } @Override public ElementNode getAccount() { return null; } @Override public UProcess parentProcess() { return (UProcess) this.parentExecutum(); } @Override public GUID actualParentPID() { return this.mParentPID; } @Override public void applyActualParentPID( GUID pid ) { this.mParentPID = pid; } @Override public ProcessManager getOwnedProcessManager() { return this.mProcessManager; } @Override public GUID getParentProcessId() { if ( this.parentProcess() != null ) { return this.parentProcess().getGuid(); } return null; } @Override public long getParentLocalPID() { if ( this.parentProcess() != null ) { return this.parentProcess().getLocalPID(); } return -1; } @Override public ProcSpace getProcNamespace() { return this.mProcSpace; } @Override public RuntimeTombstone getRuntimeTombstone() { return this.mRuntimeTombstone; } @Override public Map getStartupArguments() { return this.mStartupArgs; } @Override public Map getEnvironmentVariables() { return this.mEnvironmentVars; } @Override public ObjectTable getObjectTable() { return this.mObjectTable; } @Override public ExecutionImage getExecutionImage() { return this.mExecutionImage; } @Override public ControllableLevel getControllableLevel() { return this.mControllableLevel; } @Override public LocalDateTime getEndTime() { return this.mEndTime; } @Override public LocalDateTime getLastUpdateTime() { return this.mLastUpdateTime; } @Override public void triggerUpdateTerminationStatus() { if ( this.getState() != Thread.State.TERMINATED ) { throw new IllegalStateException( "Bad time to trigger, I am still alive!" ); } this.triggerAfterRunnableTerminationStatus(); } @Override public void triggerAfterRunnableTerminationStatus() { this.mLastUpdateTime = LocalDateTime.now(); this.mEndTime = LocalDateTime.now(); } /** Proxied Processum **/ @Override public Map getOwnThreadGroup() { return this.mLocalProcess.getOwnThreadGroup(); } @Override public TaskManager getTaskManager() { return this.mLocalProcess.getTaskManager(); } @Override public LocalDateTime getCreateTime() { return this.mLocalProcess.getCreateTime(); } @Override public LocalDateTime getStartTime() { return this.mLocalProcess.getStartTime(); } @Override public String getName() { return this.mLocalProcess.getName(); } @Override public void setName( String szName ) { this.mLocalProcess.setName( szName ); } @Override public long getExecutumId() { return this.mLocalProcess.getExecutumId(); } @Override public RuntimeSystem parentSystem() { return this.mLocalProcess.parentSystem(); } @Override public RuntimeSystem revealNearestSystem() { return this.mLocalProcess.revealNearestSystem(); } @Override public Executum parentExecutum() { return this.mLocalProcess.parentExecutum(); } @Override public Executum setThreadAffinity( Thread affinity ) { return this.mLocalProcess.setThreadAffinity( affinity ); } @Override public Thread getAffiliateThread() { return this.mLocalProcess.getAffiliateThread(); } @Override public boolean isTerminated() { return this.mLocalProcess.isTerminated(); } @Override public void start() { this.mLocalProcess.start(); } @Override public void apoptosis() throws ApoptosisRejectSignalException { this.mLocalProcess.apoptosis(); } @Override public void kill() { this.mLocalProcess.kill(); } @Override public void interrupt() { this.mLocalProcess.interrupt(); } @Override public void suspend() { this.mLocalProcess.suspend(); } @Override public void resume() { this.mLocalProcess.resume(); } @Override public void entreatLive() { this.mLocalProcess.entreatLive(); } @Override public Thread.State getState() { return this.mLocalProcess.getState(); } @Override public int getExceptionRestartTime() { return this.mLocalProcess.getExceptionRestartTime(); } @Override public Lifecycle applyExceptionRestartTime( int time ) { return this.mLocalProcess.applyExceptionRestartTime( time ); } /** Proxied Processum End **/ static class LocalSystemProcess extends ArchProcessum { LocalSystemProcess ( String szName, Processum parent ) { super( szName, parent ); } } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ControllableLevel.java ================================================ package com.pinecone.hydra.proc; public enum ControllableLevel { None ( 0x00, "None" ), Monitor ( 0x01, "Monitor" ), Weak ( 0x02, "Weak" ), Absolute ( 0x03, "Absolute" ), ; private final int code; private final String name; ControllableLevel( int code, String name ) { this.code = code; this.name = name; } public int getCode() { return this.code; } public String getName() { return this.name; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/GenericProcessActionTape.java ================================================ package com.pinecone.hydra.proc; public class GenericProcessActionTape implements ProcessActionTape { protected Throwable mLastError; protected int mExitCode; public GenericProcessActionTape() { } @Override public Throwable getLastError() { return this.mLastError; } @Override public void setLastError( Throwable lastError ) { this.mLastError = lastError; } @Override public int getExitCode() { return this.mExitCode; } @Override public void setExitCode( int exitCode ) { this.mExitCode = exitCode; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/InstitutionalProcess.java ================================================ package com.pinecone.hydra.proc; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GUID; /** * Pinecone Ursus For Java, InstitutionalProcess * Author: Harald.E (Dragon King) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Institutional Uniform Process * 体制化统一进程 * ***************************************************************************************** * 1). Processum => Local Process, managed under local-first autonomy and jurisdictional control. * 2). UProcess => Uniform Process, * centrally constituted and managed by unified authority, with reserved central control rights. * ***************************************************************************************** * 1). Processum => Local Process, 地方本地进程,由地方优先自治、管制 * 2). UProcess => Uniform Process, 中央编制的统一进程,由中央统一权威管制,拥有保留的中央控制权 * ***************************************************************************************** */ public interface InstitutionalProcess extends Processum { default Processum ownedLocalProcess() { return this; } UProcess ownedUniformProcess(); default GUID getPID() { return this.ownedUniformProcess().getPID(); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/KernelProcess.java ================================================ package com.pinecone.hydra.proc; public interface KernelProcess extends LocalUProcess { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/LineageProcessEnvironmentSection.java ================================================ package com.pinecone.hydra.proc; import java.util.HashMap; import java.util.Map; public class LineageProcessEnvironmentSection implements ProcessEnvironmentSection { protected Map mSystemEnvironments; public LineageProcessEnvironmentSection(Map systemEnvironmentVars ) { this.mSystemEnvironments = systemEnvironmentVars; } @Override public Map getSystemEnvironments() { return this.mSystemEnvironments; } @Override public Map extendsFrom( final Map superiorEnvironmentVars, final Map contextEnvVars ) { Map neo = new HashMap<>( this.mSystemEnvironments ); neo.putAll( superiorEnvironmentVars ); if ( contextEnvVars != null ) { neo.putAll( contextEnvVars ); } return neo; } @Override public Map extendsFrom( UProcess superiorProcess, final Map contextEnvVars ) { return this.extendsFrom( superiorProcess.getEnvironmentVariables(), contextEnvVars ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/LocalHostedProcess.java ================================================ package com.pinecone.hydra.proc; import java.util.Map; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.proc.image.ExecutionImage; import com.pinecone.hydra.proc.ns.ProcSpace; public class LocalHostedProcess extends ArchUProcess implements LocalUProcess { public LocalHostedProcess( @Nullable Processum localSystemProc, GUID guid, String szName, UProcess parent, ProcessManager processManager, ExecutionImage image, ProcSpace procSpace, Map startupArgs, Map environmentVars ) { super( localSystemProc, guid, szName, parent, processManager, image, procSpace, startupArgs, environmentVars ); } public LocalHostedProcess( @Nullable Processum localSystemProc, String szName, UProcess parent, ProcessManager processManager, ExecutionImage image, ProcSpace procSpace, Map startupArgs, Map environmentVars ) { this( localSystemProc, processManager.getGuidAllocator().nextGUID(), szName, parent, processManager, image, procSpace, startupArgs, environmentVars ); } public LocalHostedProcess( @Nullable Processum localSystemProc, UProcess parent, ProcessManager processManager, ExecutionImage image, ProcSpace procSpace, Map startupArgs, Map environmentVars ) { this( localSystemProc, processManager.getGuidAllocator().nextGUID(), image.getName(), parent, processManager, image, procSpace, startupArgs, environmentVars ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/LocalUProcess.java ================================================ package com.pinecone.hydra.proc; public interface LocalUProcess extends UProcess { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ProcessActionTape.java ================================================ package com.pinecone.hydra.proc; import com.pinecone.framework.system.prototype.Pinenut; public interface ProcessActionTape extends Pinenut { Throwable getLastError(); void setLastError( Throwable lastError ); int getExitCode(); void setExitCode( int exitCode ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ProcessEnvironmentSection.java ================================================ package com.pinecone.hydra.proc; import java.util.Map; import com.pinecone.framework.system.prototype.Pinenut; public interface ProcessEnvironmentSection extends Pinenut { Map getSystemEnvironments(); Map extendsFrom( final Map superiorEnvironmentVars, final Map contextEnvVars ); default Map extendsFrom( final Map superiorEnvironmentVars ) { return this.extendsFrom( superiorEnvironmentVars, null ); } Map extendsFrom( UProcess superiorProcess, final Map contextEnvVars ); default Map extendsFrom( UProcess superiorProcess ) { return this.extendsFrom( superiorProcess, null ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ProcessManager.java ================================================ package com.pinecone.hydra.proc; import java.util.Collection; import java.util.Map; import com.pinecone.framework.system.RuntimeSystem; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.system.regime.Regiment; import com.pinecone.framework.system.regime.arch.Manager; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.proc.image.ExecutionImage; import com.pinecone.hydra.proc.image.ImageLoader; import com.pinecone.hydra.proc.image.ImageModifier; import com.pinecone.hydra.system.ko.CascadeKernelObjectInstrument; import com.pinecone.hydra.system.ko.KernelObjectConfig; import com.pinecone.hydra.system.ko.QueryableInstrument; public interface ProcessManager extends CascadeKernelObjectInstrument, Regiment, Manager, QueryableInstrument { Processum superiorProcess(); UProcess getRootUProcess(); ImageLoader getImageLoader(); ProcessManagerConfig getKernelObjectConfig(); void applyRootUProcess( UProcess rootUProcess ); RuntimeSystem superiorSystem(); void applyGuidAllocator( GuidAllocator guidAllocator ); long getVitalizeCount(); long getFatalityCount(); long processCount(); Collection fetchProcesses(); // Object clearance rate, help load balance and dispatch. [e.g. Using priority queue.] default double getClearanceRate() { double nFatality = this.getFatalityCount(); double nVitalize = this.getVitalizeCount(); return nFatality / ( nVitalize + nFatality ); } void register( UProcess that ); void erase( UProcess that ); boolean autopsy( UProcess that ); LocalUProcess createLocalHostedProcess( ExecutionImage image, UProcess parent, Map startupArgs, Map contextEnvironmentVars ); UProcess getProcess( GUID pid ); Collection searchProcessesByName( String procName ); Collection searchProcessesByNameNoCase( String procName ); boolean containProcess( GUID pid ); default LocalUProcess createLocalHostedProcess( ExecutionImage image, UProcess parent, Map startupArgs ) { return this.createLocalHostedProcess( image, parent, startupArgs, null ); } ImageModifier getImageModifier(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ProcessManagerConfig.java ================================================ package com.pinecone.hydra.proc; import com.pinecone.hydra.system.ko.KernelObjectConfig; public interface ProcessManagerConfig extends KernelObjectConfig { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ProcessManagerSystema.java ================================================ package com.pinecone.hydra.proc; import com.pinecone.hydra.system.Hydrogen; public interface ProcessManagerSystema extends Hydrogen { ProcessManager processManager(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/RemoteUProcess.java ================================================ package com.pinecone.hydra.proc; public interface RemoteUProcess extends UProcess { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/UProcess.java ================================================ package com.pinecone.hydra.proc; import java.time.LocalDateTime; import java.util.Map; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.proc.entity.ProcessElement; import com.pinecone.hydra.proc.image.ExecutionImage; import com.pinecone.hydra.proc.ns.ProcSpace; import com.pinecone.hydra.proc.tomb.RuntimeTombstone; import com.pinecone.hydra.system.ko.entity.ObjectTable; public interface UProcess extends Processum, ProcessElement { ProcessActionTape actionTape(); UProcess parentProcess(); GUID actualParentPID(); void applyActualParentPID( GUID pid ); ProcessManager getOwnedProcessManager(); ProcSpace getProcNamespace(); RuntimeTombstone getRuntimeTombstone(); ObjectTable getObjectTable(); ExecutionImage getExecutionImage(); ControllableLevel getControllableLevel(); LocalDateTime getEndTime() ; LocalDateTime getLastUpdateTime() ; Map getStartupArguments(); Map getEnvironmentVariables(); Processum affinityLocalProcess(); void triggerUpdateTerminationStatus(); void triggerAfterRunnableTerminationStatus(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/UniformProcessConfig.java ================================================ package com.pinecone.hydra.proc; import com.pinecone.hydra.system.ko.ArchKernelObjectConfig; import java.util.Map; public class UniformProcessConfig extends ArchKernelObjectConfig implements ProcessManagerConfig { public UniformProcessConfig( Map config ) { super(config); } public UniformProcessConfig(){ super(); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/UniformProcessManager.java ================================================ package com.pinecone.hydra.proc; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.RuntimeSystem; import com.pinecone.framework.system.executum.ArchProcessum; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.framework.util.lang.GenericDynamicFactory; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.proc.image.ExecutionImage; import com.pinecone.hydra.proc.image.ImageLoader; import com.pinecone.hydra.proc.image.ImageModifier; import com.pinecone.hydra.proc.image.SafeImageModifier; import com.pinecone.hydra.proc.image.UniformMultiScopeImageLoader; import com.pinecone.hydra.proc.ns.GenericSegregationSpace; import com.pinecone.hydra.system.HyComponent; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.system.centrum.UniformCentralSystem; import com.pinecone.hydra.system.ko.CascadeInstrument; import com.pinecone.hydra.system.ko.KernelObjectConfig; import com.pinecone.hydra.unit.imperium.entity.EntityNode; import com.pinecone.ulf.util.guid.GUIDs; public class UniformProcessManager extends ArchProcessManager implements ProcessManager { protected long mnVitalizeCount = 0; protected long mnFatalityCount = 0; protected String mSuperiorPathScope; protected Namespace mThisNamespace; protected GuidAllocator mGuidAllocator; protected Processum mSuperiorProcess; protected UProcess mRootUProcess; protected RuntimeSystem mSuperiorSystem; protected CascadeInstrument mParentInstrument; protected KernelObjectConfig mKernelObjectConfig; protected DynamicFactory mDynamicFactory; protected Map mProcessMap; protected ImageLoader mImageLoader; protected ProcessEnvironmentSection mProcessEnvironmentSection; protected ImageModifier mImageModifier; public UniformProcessManager ( Processum superiorProcess, CascadeInstrument parentInstrument, String name, String superiorPathScope, KernelObjectConfig config, @Nullable ImageLoader imageLoader, @Nullable GuidAllocator guidAllocator ) { this.mSuperiorPathScope = superiorPathScope; this.mSuperiorProcess = superiorProcess; this.mParentInstrument = parentInstrument; this.mProcessMap = new ConcurrentHashMap<>(); this.mKernelObjectConfig = config; this.mGuidAllocator = guidAllocator; this.mDynamicFactory = new GenericDynamicFactory( superiorProcess.getTaskManager().getClassLoader() ); this.mImageLoader = imageLoader; this.mProcessEnvironmentSection = new LineageProcessEnvironmentSection( this.mSuperiorProcess.parentSystem().getEnvironmentVars() ); this.mImageModifier = new SafeImageModifier(); if ( this.mSuperiorProcess instanceof RuntimeSystem ) { this.mSuperiorSystem = (RuntimeSystem) this.mSuperiorProcess; } else { this.mSuperiorSystem = this.mSuperiorProcess.parentSystem(); } this.setTargetingName( name ); if ( this.mSuperiorProcess != null ) { if ( this.mSuperiorProcess instanceof RuntimeSystem ) { this.mSuperiorSystem = (RuntimeSystem) this.mSuperiorProcess; } else { this.mSuperiorSystem = this.mSuperiorProcess.parentSystem(); } if ( this.mSuperiorSystem instanceof UniformCentralSystem ) { UniformCentralSystem system = (UniformCentralSystem) this.mSuperiorSystem; if ( this.mGuidAllocator == null ) { this.mGuidAllocator = system.getSystemGuidAllocator(); } if ( this.mImageLoader == null ) { this.mImageLoader = (ImageLoader) system.imageLoader(); } } } if ( this.mGuidAllocator == null ) { throw new IllegalArgumentException( "GUIDAllocator is undefined." ); } if ( this.mImageLoader == null ) { this.mImageLoader = new UniformMultiScopeImageLoader( (Hydrogen) this.superiorSystem(), (HyComponent) null ); } if ( this.mSuperiorProcess instanceof UProcess ) { this.applyRootUProcess( (UProcess) this.mSuperiorProcess ); } else if ( this.mSuperiorProcess instanceof InstitutionalProcess ) { this.applyRootUProcess( ( (InstitutionalProcess) this.mSuperiorProcess ).ownedUniformProcess() ); } } public UniformProcessManager ( Processum superiorProcess, CascadeInstrument parentInstrument, String name, String superiorPathScope, KernelObjectConfig config ) { this( superiorProcess, parentInstrument, name, superiorPathScope, config, null, null ); } @Override public ImageModifier getImageModifier() { return this.mImageModifier; } @Override public void applyGuidAllocator( GuidAllocator guidAllocator ) { this.mGuidAllocator = guidAllocator; } @Override public Processum superiorProcess() { return this.mSuperiorProcess; } @Override public UProcess getRootUProcess() { return this.mRootUProcess; } @Override public ImageLoader getImageLoader() { return this.mImageLoader; } @Override public ProcessManagerConfig getKernelObjectConfig() { return (ProcessManagerConfig) this.mKernelObjectConfig; } @Override public void applyRootUProcess( UProcess rootUProcess ) { this.mRootUProcess = rootUProcess; } @Override public RuntimeSystem superiorSystem() { return this.mSuperiorSystem; } @Override public CascadeInstrument parent() { return this.mParentInstrument; } @Override public void setParent( CascadeInstrument parent ) { this.mParentInstrument = parent; } @Override public Namespace getTargetingName() { return this.mThisNamespace; } @Override public void setTargetingName( Namespace name ) { this.mThisNamespace = name; } @Override public String getSuperiorPathScope() { return this.mSuperiorPathScope; } @Override public void applySuperiorPathScope( String superiorPathScope ) { this.mSuperiorPathScope = superiorPathScope; } @Override public GuidAllocator getGuidAllocator() { return this.mGuidAllocator; } @Override public KernelObjectConfig getConfig() { return this.mKernelObjectConfig; } @Override public long getVitalizeCount() { return this.mnVitalizeCount; } @Override public long getFatalityCount() { return this.mnFatalityCount; } @Override public long processCount() { return this.mProcessMap.size(); } @Override public Collection fetchProcesses() { return this.mProcessMap.values(); } @Override public void register( UProcess that ) { if( !this.autopsy( that ) ) { this.mProcessMap.put( that.getPID(), that ); ++this.mnVitalizeCount; } else { throw new IllegalStateException( "Process is dead." ); } } @Override public void erase( UProcess that ) { if( this.autopsy( that ) ) { this.expunge( that ); } else { throw new IllegalStateException( "Process is still alive." ); } } @Override protected void expunge( UProcess that ) { this.mProcessMap.remove( that.getPID() ); ++this.mnFatalityCount; that.triggerAfterRunnableTerminationStatus(); } @Override public boolean autopsy( UProcess that ) { return that.getState() == Thread.State.TERMINATED; } @Override public LocalUProcess createLocalHostedProcess( ExecutionImage image, UProcess parent, Map startupArgs, Map contextEnvironmentVars ) { if ( parent == null ) { parent = this.mRootUProcess; } Processum hosted = new ArchProcessum( image.getName(), parent ) {}; Thread primaryThread = new Thread( image.getEntryPoint(), ( image.getName() + "-main" ).toLowerCase() ); hosted.setThreadAffinity( primaryThread ); if ( startupArgs == null ) { startupArgs = new HashMap<>(); } LocalUProcess process = new LocalHostedProcess( hosted, parent, this, image, new GenericSegregationSpace(), startupArgs, this.mProcessEnvironmentSection.extendsFrom( parent, contextEnvironmentVars ) ); // Register the process in the entry-point-runnable for process status surveillance purpose. image.getEntryPoint().applyOwnedProcess( process ); this.register( process ); return process; } public UProcess getProcess( GUID pid ) { return this.mProcessMap.get( pid ); } @Override public Collection searchProcessesByName( String procName ) { Collection li = new ArrayList<>(); for( UProcess process : this.mProcessMap.values() ) { if ( process.getName().equals( procName ) ) { li.add( process ); } } return li; } @Override public Collection searchProcessesByNameNoCase( String procName ) { Collection li = new ArrayList<>(); for( UProcess process : this.mProcessMap.values() ) { if ( process.getName().equalsIgnoreCase( procName ) ) { li.add( process ); } } return li; } @Override public boolean containProcess( GUID pid ) { return this.mProcessMap.containsKey( pid ); } @Override public String getPath( GUID objectGuid ) { return objectGuid.toString(); // Process PID is the path in this case. // /proc/${pid} } @Override public String querySystemKernelObjectPath( GUID objectGuid ) { String thisScopePath = this.getPath( objectGuid ); if ( thisScopePath == null ) { return null; } return this.getSuperiorPathScope() + this.getConfig().getPathNameSeparator() + thisScopePath; } @Override public GUID queryGUIDByPath( String path ) { return this.queryNode( path ).getGuid(); } @Override public EntityNode queryNode( String path ) { String pathSeparator = this.getKernelObjectConfig().getPathNameSeparator(); String[] split = path.split(pathSeparator); if( split.length == 0 ) { return null; } if( split.length > 1 ) { // 后续补齐查找逻辑 return null; } else { GUID guid = this.mGuidAllocator.parse( split[0] ); if ( guid == null ) { return null; } return this.mProcessMap.get( this.mGuidAllocator.parse( split[0] ) ); } } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/entity/ElementNode.java ================================================ package com.pinecone.hydra.proc.entity; import com.pinecone.hydra.system.ko.meta.ElementObject; public interface ElementNode extends ElementObject { @Override default String objectCategoryName() { return "Process"; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/entity/ProcessElement.java ================================================ package com.pinecone.hydra.proc.entity; import com.pinecone.framework.util.id.GUID; public interface ProcessElement extends ElementNode { String getName(); long getLocalPID(); default GUID getPID() { return this.getGuid(); } @Override GUID getGuid(); GUID getParentProcessId(); long getParentLocalPID(); Thread.State getState(); ElementNode getAccount(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/event/ProcessEvent.java ================================================ package com.pinecone.hydra.proc.event; public enum ProcessEvent { Prepare ( 0x00, "Prepare" ), // Prepare to start, no image loaded. Created ( 0x01, "Created" ), Vitalized ( 0x02, "Vitalized" ), Terminated ( 0x03, "Terminated" ), Error ( 0x04, "Error" ), ; private final int code; private final String name; ProcessEvent( int code, String name ) { this.code = code; this.name = name; } public int getCode() { return this.code; } public String getName() { return this.name; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/event/ProcessEventHandler.java ================================================ package com.pinecone.hydra.proc.event; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.proc.image.EntryPointRunnable; public interface ProcessEventHandler extends Pinenut { void fired( EntryPointRunnable runnable, ProcessEvent event ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/event/ProcessLifecycleHandler.java ================================================ package com.pinecone.hydra.proc.event; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.proc.image.EntryPointRunnable; public interface ProcessLifecycleHandler extends Pinenut { void fired( String imageAddress, EntryPointRunnable runnable, ProcessEvent event ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/ArchEntryPointRunnable.java ================================================ package com.pinecone.hydra.proc.image; import java.util.ArrayList; import java.util.List; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.proc.event.ProcessEventHandler; public abstract class ArchEntryPointRunnable implements EntryPointRunnable { protected UProcess mOwnedProcess; protected ProcessEventHandler mProcessEventHandler; List mSysProcEventHandlers; public ArchEntryPointRunnable( UProcess ownedProcess, ProcessEventHandler processEventHandler ) { this.mOwnedProcess = ownedProcess; this.mProcessEventHandler = processEventHandler; this.mSysProcEventHandlers = new ArrayList<>(); } public ArchEntryPointRunnable( ProcessEventHandler processEventHandler ) { this( null, processEventHandler ); } public ArchEntryPointRunnable() { this( null, null ); } @Override public ProcessEventHandler processEventHandler() { return this.mProcessEventHandler; } @Override public void applyProcessEventHandler( ProcessEventHandler handler ) { if ( this.mOwnedProcess.getState() != Thread.State.NEW ) { throw new IllegalStateException( "Process event handler can only be set before the process is started." ); } this.mProcessEventHandler = handler; } @Override public UProcess ownedProcess() { return this.mOwnedProcess; } @Override public void applyOwnedProcess( UProcess process ) { this.mOwnedProcess = process; } static List getSysProcEventHandlers( EntryPointRunnable entryPointRunnable ) { if ( entryPointRunnable instanceof ArchEntryPointRunnable ) { return ((ArchEntryPointRunnable) entryPointRunnable).mSysProcEventHandlers; } else { return null; } } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/ArchExecutionImage.java ================================================ package com.pinecone.hydra.proc.image; import java.net.URI; import com.pinecone.hydra.proc.ControllableLevel; import com.pinecone.hydra.proc.UProcess; public abstract class ArchExecutionImage implements ExecutionImage { protected String mszName; protected URI mResourceURI; protected String mszImageAddress; protected Class mProcessClassType; protected ClassLoader mClassLoader; protected EntryPointRunnable mEntryPoint; protected ImageLoader mImageLoader; protected boolean mbReadOnly; protected boolean mbReusable; protected String mszSignature; protected ControllableLevel mControllableLevel; protected ArchExecutionImage( String name, EntryPointRunnable entryPoint, Class processClassType, URI resourceURI, ClassLoader classLoader, ImageLoader imageLoader, boolean readOnly, boolean reusable, String signature, ControllableLevel controllableLevel ) { this.mszName = name; this.mEntryPoint = entryPoint; this.mProcessClassType = processClassType; this.mResourceURI = resourceURI; this.mClassLoader = classLoader; this.mImageLoader = imageLoader; this.mbReadOnly = readOnly; this.mbReusable = reusable; this.mszSignature = signature; this.mControllableLevel = controllableLevel; } protected ArchExecutionImage( String name, EntryPointRunnable entryPoint, Class processClassType, URI resourceURI, ImageLoader imageLoader, String signature, ControllableLevel controllableLevel ) { this( name, entryPoint, processClassType, resourceURI, imageLoader.getClassLoader(), imageLoader, true, true, signature, controllableLevel ); } @Override public String getName() { return this.mszName; } @Override public URI getResourceURI() { return this.mResourceURI; } protected void applyImageAddress( String address ) { this.mszImageAddress = address; } @Override public String getImageAddress() { return this.mszImageAddress; } @Override public Class processClassType() { return null; } @Override public EntryPointRunnable getEntryPoint() { return this.mEntryPoint; } @Override public ClassLoader getClassLoader() { return this.mClassLoader; } @Override public ImageLoader getImageLoader() { return this.mImageLoader; } @Override public boolean isReadOnly() { return this.mbReadOnly; } @Override public boolean isReusable() { return this.mbReusable; } @Override public String getSignature() { return this.mszSignature; } @Override public ControllableLevel getControllableLevel() { return this.mControllableLevel; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/ArchImageLoader.java ================================================ package com.pinecone.hydra.proc.image; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.system.ArchSystemCascadeComponent; import com.pinecone.hydra.system.HyComponent; import com.pinecone.hydra.system.Hydrogen; public abstract class ArchImageLoader extends ArchSystemCascadeComponent implements ImageLoader { protected ClassLoader mClassLoader; public ArchImageLoader( Namespace name, Hydrogen system, HyComponent parent, ClassLoader classLoader ) { super( name, system, system.getComponentManager(), parent ); this.mClassLoader = classLoader; } public ArchImageLoader( Hydrogen system, HyComponent parent ) { this( null, system, parent, system.getGlobalClassLoader() ); } public ArchImageLoader( Hydrogen system ) { this( system, null ); } @Override public ClassLoader getClassLoader() { return this.mClassLoader; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/EntryPointRunnable.java ================================================ package com.pinecone.hydra.proc.image; import java.util.List; import java.util.Map; import com.pinecone.framework.system.ProvokeHandleException; import com.pinecone.framework.system.functions.Executor; import com.pinecone.hydra.proc.ArchProcessManager; import com.pinecone.hydra.proc.ProcessManager; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.proc.event.ProcessEvent; import com.pinecone.hydra.proc.event.ProcessEventHandler; public interface EntryPointRunnable extends Runnable, Executor { UProcess ownedProcess(); void applyOwnedProcess( UProcess process ); ProcessEventHandler processEventHandler(); void applyProcessEventHandler( ProcessEventHandler handler ); int main( Map args ) throws Exception; @Override default void execute() throws Exception { int c = this.main( this.ownedProcess().getStartupArguments() ); this.ownedProcess().actionTape().setExitCode( c ); } /** * Overriding is discouraged; lifecycle supervision is required in principle. * 原则上,请勿重写,需要检察程序生命周期行为。 */ @Override default void run() { ProcessEventHandler processEventHandler = this.processEventHandler(); List sysProcEventHandlers = ArchEntryPointRunnable.getSysProcEventHandlers( this ); ProcessEvent termEvent = null; try { ProcessEvent vitalEvent = ProcessEvent.Vitalized; if ( processEventHandler != null ) { processEventHandler.fired( this, vitalEvent ); } if ( sysProcEventHandlers != null ) { for ( ProcessEventHandler sysHandler : sysProcEventHandlers ) { sysHandler.fired( this, vitalEvent ); } } int c = this.main( this.ownedProcess().getStartupArguments() ); this.ownedProcess().actionTape().setExitCode( c ); } catch ( Exception e ) { this.ownedProcess().actionTape().setLastError( e ); termEvent = ProcessEvent.Error; throw new ProvokeHandleException( e ); } finally { UProcess owned = this.ownedProcess(); ProcessManager processManager = owned.getOwnedProcessManager(); if ( processManager instanceof ArchProcessManager ) { ArchProcessManager.invokeExpunge( (ArchProcessManager) processManager, owned ); } if ( termEvent == null ) { termEvent = ProcessEvent.Terminated; } if ( processEventHandler != null ) { processEventHandler.fired( this, termEvent ); } if ( sysProcEventHandlers != null ) { for ( ProcessEventHandler sysHandler : sysProcEventHandlers ) { sysHandler.fired( this, termEvent ); } } } } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/ExecutionImage.java ================================================ package com.pinecone.hydra.proc.image; import java.net.URI; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.proc.ControllableLevel; import com.pinecone.hydra.proc.UProcess; public interface ExecutionImage extends Pinenut { String getName(); URI getResourceURI(); String getImageAddress(); Class processClassType(); EntryPointRunnable getEntryPoint(); ClassLoader getClassLoader(); ImageLoader getImageLoader(); boolean isReadOnly(); boolean isReusable(); String getSignature(); ControllableLevel getControllableLevel(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/FileSystemMappingImageLoader.java ================================================ package com.pinecone.hydra.proc.image; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.architecture.CascadeComponent; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.proc.image.kom.ImageElement; import com.pinecone.hydra.proc.image.kom.VirtualExeImageInstrument; import com.pinecone.hydra.system.HyComponent; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.system.centrum.UniformCentralSystem; import com.pinecone.hydra.system.imperium.KernelObjectRootMountPoint; import com.pinecone.hydra.system.ko.kom.KOMInstrument; import com.pinecone.hydra.unit.imperium.entity.EntityNode; public class FileSystemMappingImageLoader extends ArchImageLoader implements ImageLoader { protected VirtualExeImageInstrument mVirtualExeImageInstrument; public FileSystemMappingImageLoader( Namespace name, Hydrogen system, HyComponent parent, ClassLoader classLoader, @Nullable VirtualExeImageInstrument virtualExeImageInstrument ) { super( name, system, parent, classLoader ); this.mVirtualExeImageInstrument = virtualExeImageInstrument; if ( virtualExeImageInstrument == null ) { if ( system instanceof UniformCentralSystem) { KOMInstrument e = ((UniformCentralSystem) system).imperiumPrivy().getExpressInstrument().getMountedInstrument( KernelObjectRootMountPoint.SysImages.getMountPoint() ); if ( e instanceof VirtualExeImageInstrument ) { this.mVirtualExeImageInstrument = (VirtualExeImageInstrument) e; } } } } public FileSystemMappingImageLoader( Hydrogen system, HyComponent parent, @Nullable VirtualExeImageInstrument virtualExeImageInstrument ) { this( null, system, parent, system.getGlobalClassLoader(), virtualExeImageInstrument ); } public FileSystemMappingImageLoader( Hydrogen system, @Nullable VirtualExeImageInstrument virtualExeImageInstrument ) { this( system, null, virtualExeImageInstrument ); } @Override public ExecutionImage queryExecutionImage( String path ) { ExecutionImage image = this.mVirtualExeImageInstrument.queryImage( path ); if ( image != null ) { return image; } if ( this.getSystem() instanceof UniformCentralSystem ) { EntityNode e = ((UniformCentralSystem) this.getSystem()).imperiumPrivy().getExpressInstrument().queryNode( path ); if ( e instanceof ImageElement) { return ((ImageElement) e).getImage(); } } return null; } @Override public void registerLocalScopeExecutionImage( String dirPath, ExecutionImage image ) { this.mVirtualExeImageInstrument.mount( dirPath, image ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/GenericClassImage.java ================================================ package com.pinecone.hydra.proc.image; import java.net.URI; import java.net.URISyntaxException; import com.pinecone.hydra.proc.ControllableLevel; import com.pinecone.hydra.proc.UProcess; public class GenericClassImage extends ArchExecutionImage implements JVMClassExecutionImage { static URI evalClassURI( Class clazz ) throws ImageLoadProcedureException { try { return clazz.getProtectionDomain().getCodeSource().getLocation().toURI(); } catch ( URISyntaxException e ) { throw new ImageLoadProcedureException( e ); } } public GenericClassImage( String name, EntryPointRunnable entryPoint, Class processClassType, ImageLoader imageLoader ) throws ImageLoadProcedureException { super( name, entryPoint, processClassType, evalClassURI( entryPoint.getClass() ), imageLoader, JVMClassExecutionImage.class.getSimpleName(), ControllableLevel.Absolute ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/ImageLoadException.java ================================================ package com.pinecone.hydra.proc.image; import com.pinecone.framework.system.prototype.Pinenut; public class ImageLoadException extends Exception implements Pinenut { public ImageLoadException () { super(); } public ImageLoadException ( String message ) { super(message); } public ImageLoadException ( String message, Throwable cause ) { super(message, cause); } public ImageLoadException ( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/ImageLoadProcedureException.java ================================================ package com.pinecone.hydra.proc.image; import com.pinecone.framework.system.PineRuntimeException; public class ImageLoadProcedureException extends PineRuntimeException { public ImageLoadProcedureException () { super(); } public ImageLoadProcedureException ( String message ) { super(message); } public ImageLoadProcedureException ( String message, Throwable cause ) { super(message, cause); } public ImageLoadProcedureException ( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/ImageLoader.java ================================================ package com.pinecone.hydra.proc.image; import com.pinecone.framework.system.architecture.Component; public interface ImageLoader extends Component { ClassLoader getClassLoader(); ExecutionImage queryExecutionImage( String path ); void registerLocalScopeExecutionImage ( String dirPath, ExecutionImage image ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/ImageModifier.java ================================================ package com.pinecone.hydra.proc.image; import java.net.URI; import com.pinecone.framework.system.Unsafe; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.proc.event.ProcessEventHandler; public interface ImageModifier extends Pinenut { @Unsafe void addSystemProcessEventHandler( EntryPointRunnable runnable, ProcessEventHandler handler ); @Unsafe void removeSystemProcessEventHandler( EntryPointRunnable runnable, ProcessEventHandler handler ); @Unsafe int querySystemProcessEventHandlersSize( EntryPointRunnable runnable ); @Unsafe void applyImageAddress( ExecutionImage image, String address ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/JVMClassExecutionImage.java ================================================ package com.pinecone.hydra.proc.image; public interface JVMClassExecutionImage extends ExecutionImage { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/LocalHostedClassImage.java ================================================ package com.pinecone.hydra.proc.image; import com.pinecone.hydra.proc.LocalHostedProcess; import com.pinecone.hydra.proc.ProcessManager; public class LocalHostedClassImage extends GenericClassImage { public LocalHostedClassImage( String name, EntryPointRunnable entryPoint, ImageLoader imageLoader ) throws ImageLoadProcedureException { super( name, entryPoint, LocalHostedProcess.class, imageLoader ); } public LocalHostedClassImage( String name, EntryPointRunnable entryPoint, ProcessManager manager ) throws ImageLoadProcedureException { this( name, entryPoint, manager.getImageLoader() ); } public LocalHostedClassImage( EntryPointRunnable entryPoint, ProcessManager manager ) throws ImageLoadProcedureException { this( "", entryPoint, manager.getImageLoader() ); this.mszName = this.getClass().getName(); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/SafeImageModifier.java ================================================ package com.pinecone.hydra.proc.image; import java.net.URI; import java.util.List; import com.pinecone.framework.system.Unsafe; import com.pinecone.hydra.proc.event.ProcessEventHandler; public class SafeImageModifier implements ImageModifier { public SafeImageModifier() { } protected List retrieveSysProcEventHandlers( EntryPointRunnable runnable ) { List those = ArchEntryPointRunnable.getSysProcEventHandlers( runnable ); if ( those != null ) { return those; } throw new IllegalArgumentException( "EntryPointRunnable has no SystemProcessEventHandles." ); } @Override @Unsafe public void addSystemProcessEventHandler( EntryPointRunnable runnable, ProcessEventHandler handler ) { List those = this.retrieveSysProcEventHandlers( runnable ); those.add( handler ); } @Override @Unsafe public void removeSystemProcessEventHandler( EntryPointRunnable runnable, ProcessEventHandler handler ) { List those = this.retrieveSysProcEventHandlers( runnable ); those.remove( handler ); } @Override @Unsafe public int querySystemProcessEventHandlersSize( EntryPointRunnable runnable ) { List those = this.retrieveSysProcEventHandlers( runnable ); return those.size(); } @Override @Unsafe public void applyImageAddress( ExecutionImage image, String address ) { if ( image instanceof ArchExecutionImage ) { ((ArchExecutionImage) image). applyImageAddress( address ); return; } throw new IllegalArgumentException( "Unable to modify `image-uri`." ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/URLImageLoader.java ================================================ package com.pinecone.hydra.proc.image; import java.net.URI; public interface URLImageLoader extends ImageLoader { ExecutionImage queryExecutionImage( URI uri ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/UniformImageLoader.java ================================================ package com.pinecone.hydra.proc.image; public interface UniformImageLoader extends URLImageLoader { ImageLoader localMappingImageLoader(); void addScope( String protocol, ImageLoader imageLoader ); ImageLoader getScope( String protocol ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/UniformMultiScopeImageLoader.java ================================================ package com.pinecone.hydra.proc.image; import java.net.URI; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.architecture.CascadeComponent; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.system.HyComponent; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.system.centrum.UniformCentralSystem; public class UniformMultiScopeImageLoader extends ArchImageLoader implements UniformImageLoader { protected Map mLoaderScope; protected ImageLoader mLocalMappingImageLoader; public UniformMultiScopeImageLoader( Namespace name, Hydrogen system, HyComponent parent, ClassLoader classLoader, @Nullable ImageLoader localMappingImageLoader ) { super( name, system, parent, classLoader ); this.mLoaderScope = new ConcurrentHashMap<>(); this.mLocalMappingImageLoader = localMappingImageLoader; if ( this.mLocalMappingImageLoader == null ) { if ( system instanceof UniformCentralSystem ) { UniformImageLoader pl = (UniformImageLoader) ( (UniformCentralSystem) system ).imageLoader(); this.mLocalMappingImageLoader = pl.localMappingImageLoader(); } else { throw new IllegalArgumentException( "`UniformMultiScopeImageLoader` must includes at least one `localMappingImageLoader`." ); } } this.mLoaderScope.put( "uofs", this.mLocalMappingImageLoader ); } public UniformMultiScopeImageLoader( Hydrogen system, HyComponent parent, @Nullable ImageLoader localMappingImageLoader ) { this( null, system, parent, system.getGlobalClassLoader(), localMappingImageLoader ); } public UniformMultiScopeImageLoader( Hydrogen system, HyComponent parent ) { this( null, system, parent, system.getGlobalClassLoader(), null ); } public UniformMultiScopeImageLoader( Hydrogen system, @Nullable ImageLoader localMappingImageLoader ) { this( system, null, localMappingImageLoader ); } @Override public ImageLoader localMappingImageLoader() { return this.mLocalMappingImageLoader; } @Override public void addScope( String protocol, ImageLoader imageLoader ) { this.mLoaderScope.put( protocol.toLowerCase(), imageLoader ); } @Override public ImageLoader getScope( String protocol ) { return this.mLoaderScope.get( protocol.toLowerCase() ); } @Override public ExecutionImage queryExecutionImage( URI uri ) { String p = uri.getScheme().toLowerCase(); ImageLoader imageLoader = this.mLoaderScope.get( p ); if ( imageLoader == null ) { return null; } if ( imageLoader instanceof URLImageLoader ) { URLImageLoader urlImageLoader = (URLImageLoader) imageLoader; return urlImageLoader.queryExecutionImage( uri ); } ExecutionImage image = imageLoader.queryExecutionImage( uri.getPath() ); if ( image != null ) { return image; } CascadeComponent component = this.parent(); if ( component instanceof URLImageLoader ) { image = ( (URLImageLoader) component ).queryExecutionImage( uri ); if ( image != null ) { return image; } } return null; } @Override public ExecutionImage queryExecutionImage( String path ) { ExecutionImage image = this.mLocalMappingImageLoader.queryExecutionImage( path ); if ( image != null ) { return image; } CascadeComponent component = this.parent(); if ( component instanceof ImageLoader ) { image = ( (ImageLoader) component ).queryExecutionImage( path ); if ( image != null ) { return image; } } return null; } @Override public void registerLocalScopeExecutionImage( String dirPath, ExecutionImage image ) { this.mLocalMappingImageLoader.registerLocalScopeExecutionImage( dirPath, image ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/kom/ElementNode.java ================================================ package com.pinecone.hydra.proc.image.kom; import com.pinecone.hydra.system.ko.meta.ElementObject; public interface ElementNode extends ElementObject { @Override default String objectCategoryName() { return "Process"; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/kom/GenericImageElement.java ================================================ package com.pinecone.hydra.proc.image.kom; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.proc.image.ExecutionImage; public class GenericImageElement implements ImageElement { protected ExecutionImage mImage; protected GUID mGUID; public GenericImageElement() { } public GenericImageElement( ExecutionImage image, GUID guid ) { this.mImage = image; this.mGUID = guid; } @Override public ExecutionImage getImage() { return this.mImage; } @Override public String getName() { return this.mImage.getName(); } @Override public GUID getGuid() { return this.mGUID; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/kom/ImageElement.java ================================================ package com.pinecone.hydra.proc.image.kom; import com.pinecone.hydra.proc.image.ExecutionImage; public interface ImageElement extends ElementNode { ExecutionImage getImage(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/kom/VirtualExeImageInstrument.java ================================================ package com.pinecone.hydra.proc.image.kom; import com.pinecone.hydra.proc.image.ExecutionImage; import com.pinecone.hydra.system.ko.kom.KOMInstrument; public interface VirtualExeImageInstrument extends KOMInstrument { ImageElement mount( String parentPath, ExecutionImage image ) ; ImageElement queryImageElement( String path ) ; ExecutionImage queryImage( String path ) ; } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/kom/VirtualMappingExeImageInstrument.java ================================================ package com.pinecone.hydra.proc.image.kom; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.proc.image.ExecutionImage; import com.pinecone.hydra.system.centrum.UniformCentralSystem; import com.pinecone.hydra.system.ko.KernelObjectConfig; import com.pinecone.hydra.system.ko.runtime.ArchRuntimeKOMTree; import com.pinecone.hydra.unit.imperium.entity.EntityNode; public class VirtualMappingExeImageInstrument extends ArchRuntimeKOMTree implements VirtualExeImageInstrument { public VirtualMappingExeImageInstrument( @Nullable Processum superiorProcess, String superiorPathScope, KernelObjectConfig kernelObjectConfig, @Nullable GuidAllocator guidAllocator ) { super( superiorProcess, superiorPathScope, kernelObjectConfig, guidAllocator ); } public VirtualMappingExeImageInstrument( @Nullable Processum superiorProcess, String superiorPathScope, KernelObjectConfig kernelObjectConfig ) { this( superiorProcess, superiorPathScope, kernelObjectConfig, null ); } public VirtualMappingExeImageInstrument( Processum superiorProcess, String superiorPathScope ) { this( superiorProcess, superiorPathScope, UniformCentralSystem.evalCentralSystem(superiorProcess).fundamentalKernelObjectConfig(), null ); } @Override public ImageElement mount( String parentPath, ExecutionImage image ) { ImageElement element = new GenericImageElement( image, this.guidAllocator.nextGUID() ); this.add( parentPath + this.getConfig().getPathNameSeparator() + image.getName(), element ); return element; } @Override public ImageElement queryImageElement( String path ) { EntityNode e = this.queryNode( path ); if ( e instanceof ImageElement ) { return (ImageElement) e; } return null; } @Override public ExecutionImage queryImage( String path ) { ImageElement element = this.queryImageElement( path ); if ( element != null ) { return element.getImage(); } return null; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ns/ControlGroup.java ================================================ package com.pinecone.hydra.proc.ns; import com.pinecone.framework.system.prototype.Pinenut; public interface ControlGroup extends Pinenut { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ns/GenericSegregationSpace.java ================================================ package com.pinecone.hydra.proc.ns; public class GenericSegregationSpace implements ProcSpace { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ns/ProcSpace.java ================================================ package com.pinecone.hydra.proc.ns; import com.pinecone.framework.system.prototype.Pinenut; public interface ProcSpace extends Pinenut { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/signal/Signal.java ================================================ package com.pinecone.hydra.proc.signal; import com.pinecone.framework.system.prototype.Pinenut; public interface Signal extends Pinenut { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/tomb/ResurgentTombstone.java ================================================ package com.pinecone.hydra.proc.tomb; public class ResurgentTombstone implements RuntimeTombstone { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/tomb/RuntimeTombstone.java ================================================ package com.pinecone.hydra.proc.tomb; import com.pinecone.framework.system.prototype.Pinenut; public interface RuntimeTombstone extends Pinenut { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/tomb/TombCheckpoint.java ================================================ package com.pinecone.hydra.proc.tomb; import com.pinecone.framework.system.prototype.Pinenut; public interface TombCheckpoint extends Pinenut { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/ActionType.java ================================================ package com.pinecone.hydra.servgram; public enum ActionType { Sequential ("Sequential"), Parallel ("Parallel"), Loop ("Loop"), SequentialActions ("SequentialActions"), ParallelActions ("ParallelActions"), LoopActions ("LoopActions"), Break ("Break"), Continue ("Continue"), Jump ("Jump"),; private final String value; ActionType( String value ){ this.value = value; } public String getName(){ return this.value; } public boolean isActionGroup() { return this == ActionType.SequentialActions || this == ActionType.ParallelActions || this == ActionType.LoopActions; } public ActionType reinterpretActions() { switch ( this ) { case LoopActions: { return ActionType.Loop; } case ParallelActions: { return ActionType.Parallel; } case SequentialActions: { return ActionType.Sequential; } default: { return this; } } } public static String queryName( ActionType type ) { return type.getName(); } public static ActionType queryActionType( String sz ) { return ActionType.valueOf( sz ); } public static final String ConfigActionTypeKey = "Type" ; } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/ArchGramFactory.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.framework.system.executum.TaskManager; import com.pinecone.framework.util.name.Name; import com.pinecone.ulf.util.lang.ArchMultiScopeFactory; import java.lang.reflect.InvocationTargetException; import java.util.List; public abstract class ArchGramFactory extends ArchMultiScopeFactory implements GramFactory { protected ArchGramFactory( TaskManager taskManager, ClassLoader classLoader, MultiGramsLoader gramLoader, GramScope gramScope ) { super( taskManager, classLoader, gramLoader, gramScope ); } @Override public ClassLoader getClassLoader() { return this.mClassLoader; } @Override public GramScope getClassScope() { return (GramScope) super.getClassScope(); } @Override public MultiGramsLoader getTraitClassLoader() { return (MultiGramsLoader) super.getTraitClassLoader(); } public Servgram newInstance ( Class that, Class[] stereotypes, Object[] args ) throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException { return (Servgram) super.newInstance( that, stereotypes, args ); } @Override public Servgram spawn ( Name name, Object... args ) throws InvocationTargetException { return this.spawn( name, null, args ); } @Override public Servgram spawn ( Name name, Class[] stereotypes, Object... args ) throws InvocationTargetException { return (Servgram) super.spawn( name, stereotypes, args ); } @Override public List popping ( Name name, Object... args ) { return this.popping( name, null, args ); } @Override @SuppressWarnings( "unchecked" ) public List popping ( Name name, Class[] stereotypes, Object... args ) { return (List) super.popping( name, stereotypes, args ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/ArchGramLoader.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.framework.util.name.Name; import com.pinecone.hydra.servgram.filters.AnnotationValueFilter; import com.pinecone.ulf.util.lang.*; import javassist.ClassPool; import javassist.bytecode.annotation.Annotation; /** * Notice: TODO, IOC Inject Implement. */ public abstract class ArchGramLoader extends ArchMultiScopeLoader implements MultiGramsLoader { protected AnnotationValueFilter mAnnoValueFilter ; protected ArchGramLoader( GramScope gramScope, ClassLoader classLoader, ClassPool classPool ) { super( gramScope, classLoader, classPool, null, null ); this.mClassScanner = new PooledClassCandidateScanner( new LocalGramScopeSet( this.mClassLoader ), this.mClassLoader, this.mClassPool ); this.mClassInspector = new GenericPreloadClassInspector( this.mClassPool ); } protected ArchGramLoader( GramScope gramScope, ClassLoader classLoader ) { this( gramScope, classLoader, ClassPool.getDefault() ); } @Override public void setAnnotationValueFilter( AnnotationValueFilter filter ) { this.mAnnoValueFilter = filter; } @Override protected boolean isAnnotationQualified( Annotation that, String szName ) { return !this.mAnnoValueFilter.match( that, szName ); } @Override @SuppressWarnings( "unchecked" ) public Class load( Name simpleName ) throws ClassNotFoundException { return (Class )super.load( simpleName ); } // Directly by it`s name. @Override @SuppressWarnings( "unchecked" ) public Class loadByName( Name simpleName ) throws ClassNotFoundException { return (Class )super.loadByName( simpleName ); } // Scanning class`s annotations, methods or others. @Override @SuppressWarnings( "unchecked" ) public Class loadInClassTrait( Name simpleName ) throws ClassNotFoundException { return (Class )super.loadInClassTrait( simpleName ); } @Override protected Class loadSingleByFullClassName( String szFullClassName ) { try { Class clazz = this.mClassLoader.loadClass( szFullClassName ); if( this.filter( clazz ) ) { return null; } if ( Servgram.class.isAssignableFrom( clazz ) ) { return clazz.asSubclass( Servgram.class ); } } catch ( ClassNotFoundException e ) { return null; } return null; } @Override public MultiGramsLoader updateScope() { return (MultiGramsLoader)super.updateScope(); } @Override public void clearCache() { this.mLoadedClassesPool.clear(); this.mVisitedClasses.clear(); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/ArchGramScopeSet.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.framework.util.lang.ArchClassScopeSet; import com.pinecone.framework.util.lang.ScopedPackage; import java.util.Set; public abstract class ArchGramScopeSet extends ArchClassScopeSet implements GramScope { protected ArchGramScopeSet( Set scope, ClassLoader classLoader ) { super( scope, classLoader ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/ArchServgramOrchestrator.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.framework.system.GenericMasterTaskManager; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.Pinecore; import com.pinecone.framework.system.executum.Executum; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.config.PatriarchalConfig; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.name.Name; import com.pinecone.hydra.orchestration.Exertion; import com.pinecone.hydra.orchestration.IntegrityLevel; import com.pinecone.ulf.util.lang.MultiScopeFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; import java.util.Map; import java.util.concurrent.locks.ReentrantLock; public abstract class ArchServgramOrchestrator extends GenericMasterTaskManager implements ServgramOrchestrator { private Logger logger ; private GramFactory mGramFactory ; private PatriarchalConfig mSectionConfig ; private PatriarchalConfig mOrchestrationConfig ; private List mServgramScopesConf ; private GramTransaction mTransaction ; private Exertion mPrimaryAction ; private ReentrantLock mNotifyLock ; public ArchServgramOrchestrator( Processum parent, PatriarchalConfig sectionConfig, @Nullable GramFactory factory, GramTransaction transaction ) { super( parent ); this.mSectionConfig = sectionConfig; this.logger = LoggerFactory.getLogger( String.format( "Tracer<%s>", this.className() ) ); this.mOrchestrationConfig = this.mSectionConfig.getChild( AutoOrchestrator.ConfigOrchestrationKey ); this.mServgramScopesConf = (List)((JSONObject) this.mOrchestrationConfig).get( AutoOrchestrator.ConfigServgramScopesKey ); this.mNotifyLock = new ReentrantLock(); this.prepareFactory( factory ); this.mTransaction = transaction; } protected void prepareFactory( @Nullable GramFactory factory ) { if( factory != null ) { this.mGramFactory = factory; for( Object o : this.mServgramScopesConf ) { this.mGramFactory.getClassScope().addScope( o.toString() ); } this.mGramFactory.getTraitClassLoader().updateScope(); } } @Override public Pinecore getSystem() { return (Pinecore) super.getSystem(); } @Override public PatriarchalConfig getSectionConfig() { return this.mSectionConfig; } @Override public PatriarchalConfig getScheme(){ return this.getSectionConfig(); } @Override public PatriarchalConfig getOrchestrationConfig() { return this.mOrchestrationConfig; } @Override public GramTransaction getTransaction() { return this.mTransaction; } @Override public void setTransaction( GramTransaction transaction ) { this.mTransaction = transaction; } @Override public MultiScopeFactory getClassFactory() { return this.mGramFactory; } protected Exertion wrapServgramAction( Servgram servgram, ActionType type ) { switch ( type ) { case Parallel: { return new LocalParallelGramExertium( this, servgram ); } case Sequential:{ return new LocalSequentialGramExertium( this,servgram ); } default:{ throw new IllegalArgumentException( "ServgramAction can only be [Parallel, Sequential]." ); } } } protected void orchestrateTransactionGroup( GramTransaction transaction, GramTransaction.TransactionArgs args, Map protoConf, int stratum ) throws OrchestrateInterruptException { List childrenList = (List) protoConf.get( GramTransaction.ConfigTransactionsListKey ); GramTransaction child = new LocalGramTransaction( args.name, args.type.reinterpretActions(), childrenList, this, this.getParentProcessum() ); this.orchestrateTransaction( child, stratum + 1 ); transaction.add( child ); } protected abstract List popping( String szName ); protected abstract List popping( Name name ); protected void orchestrateServgramium( GramTransaction transaction, GramTransaction.TransactionArgs args, Map protoConf, boolean bPrimary, int stratum ) throws OrchestrateInterruptException { List grams = this.popping( args.name ); if( grams.isEmpty() ) { this.tracer().warn( "[Lifecycle] " ); } if( bPrimary ) { if( stratum > 0 && this.mPrimaryAction != null ) { throw new OrchestrateInterruptException( "Primary Servgram can only have one instance in the whole transaction graph." ); } if( grams.size() > 1 ) { throw new OrchestrateInterruptException( "Primary Servgram can only have one instance." ); } else { if( !grams.isEmpty() ){ this.mPrimaryAction = this.wrapServgramAction( grams.get(0), args.type ); this.mPrimaryAction.setIntegrityLevel( args.level ); this.mTransaction.addFirst( this.mPrimaryAction ); } } } else { for( Servgram gram : grams ) { Exertion act = this.wrapServgramAction( gram, args.type ); act.setIntegrityLevel( args.level ); transaction.add( act ); } } } @SuppressWarnings( "unchecked" ) protected void orchestrateTransaction( GramTransaction transaction, int stratum ) throws OrchestrateInterruptException { try{ for( Object o : transaction.getTransactionList() ) { Map jo = (Map) o; GramTransaction.TransactionArgs args = new GramTransaction.TransactionArgs( (String) jo.getOrDefault( GramTransaction.ConfigTransactionNameKey, "" ), ActionType.queryActionType( (String) jo.getOrDefault( ActionType.ConfigActionTypeKey, ActionType.Sequential.getName() ) ), IntegrityLevel.queryIntegrityLevel( jo.getOrDefault( IntegrityLevel.ConfIntegrityLevelKey, IntegrityLevel.Warning ).toString() ) ); boolean bPrimary = (boolean) jo.getOrDefault( GramTransaction.ConfigPrimaryNameKey, false ); if( args.type.isActionGroup() ) { this.orchestrateTransactionGroup( transaction, args, jo, stratum ); } else { this.orchestrateServgramium( transaction, args, jo, bPrimary, stratum ); } } } catch ( Exception e ) { throw new OrchestrateInterruptException( "Orchestrate has been interrupted, transaction compromised.", e ); } } public void onlyOrchestrateTransaction() throws OrchestrateInterruptException { this.orchestrateTransaction( this.mTransaction, 0 ); } @Override public void orchestrate() throws OrchestrateInterruptException { this.infoLifecycle( "Executing designed orchestration sequence" ); this.onlyOrchestrateTransaction(); this.startTransaction(); } /** * startTransaction * Transaction should be joined or sequential with atomic trait. * Default transaction set the affinity to its parent thread, with sequential trait. * If you wish to set the affinity to a master thread, please overriding this method. */ @Override public void startTransaction() throws OrchestrateInterruptException { if( this.mTransaction instanceof Executum ) { this.add( (Executum) this.mTransaction ); ((Executum) this.mTransaction).setThreadAffinity( Thread.currentThread() ); } this.mTransaction.start(); if( this.mTransaction instanceof Executum ) { this.erase( (Executum) this.mTransaction ); } } @Override public List preloads( String szName ) { return this.popping( szName ); } @Override public List preloads( Name name ) { return this.popping( name ); } @Override public Logger tracer() { return this.logger; } protected ArchServgramOrchestrator infoLifecycle( String szWhat, String szStateOrExtra ) { this.tracer().info( "[Lifecycle] [{}] <{}>", szWhat, szStateOrExtra ); return this; } protected ArchServgramOrchestrator infoLifecycle( String szStateOrExtra ) { StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace(); return this.infoLifecycle( stackTraceElements[ 2 ].getMethodName(), szStateOrExtra ); } @Override public void notifyFinished ( Executum that ) { this.mNotifyLock.lock(); try{ this.erase( that ); } finally { this.mNotifyLock.unlock(); } super.notifyFinished( that ); } @Override public void notifyExecuting ( Executum that ) { this.mNotifyLock.lock(); try{ this.add( that ); } finally { this.mNotifyLock.unlock(); } super.notifyExecuting( that ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/ArchServgramium.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.framework.util.config.PatriarchalConfig; import com.pinecone.framework.system.GenericMasterTaskManager; import com.pinecone.framework.system.executum.ArchProcessum; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.system.component.Slf4jTraceable; import org.slf4j.Logger; import com.pinecone.hydra.system.Hydrogen; import java.io.IOException; import java.nio.file.Path; public abstract class ArchServgramium extends ArchProcessum implements Servgramium, Slf4jTraceable { protected Logger mLogger; protected String mszGramName; protected PatriarchalConfig mServgramList; protected PatriarchalConfig mServgramConf; protected boolean mbTraceLifecycle; public ArchServgramium( String szGramName, Processum parent ) { super( szGramName, parent ); this.mszGramName = szGramName; this.mTaskManager = new GenericMasterTaskManager( this ); this.mLogger = this.parentSystem().getTracerScope().newLogger( this.className() ); this.loadConfig(); this.infoLifecycle( "MeeseekSpawned", "I'm Mr.Meeseek[" + this.className() + "], look at me !" ); } protected ArchServgramium( Servgramium shared, boolean bs ) { super( shared.getName(), (Processum) shared.parentExecutum() ); ArchServgramium that = (ArchServgramium) shared; this.mszGramName = that.mszGramName; this.mTaskManager = that.mTaskManager; this.mLogger = that.mLogger; this.mServgramList = that.mServgramList; this.mServgramConf = that.mServgramConf; } @Override public Logger getLogger() { return this.mLogger; } @Override public ArchServgramium infoLifecycle( String szWhat, String szStateOrExtra ) { this.getLogger().info( "[Lifecycle] [{}] <{}>", szWhat, szStateOrExtra ); return this; } @Override public ArchServgramium infoLifecycle( String szStateOrExtra ) { StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace(); return this.infoLifecycle( stackTraceElements[ 2 ].getMethodName(), szStateOrExtra ); } protected void loadConfig() { this.mServgramList = this.getAttachedOrchestrator().getSectionConfig().getChild( Servgram.ConfigServgramsKey ); Object dyServgramConf = this.mServgramList.get( this.gramName() ); if( dyServgramConf instanceof String ) { try{ this.mServgramConf = this.mServgramList.getChildFromPath( Path.of((String) dyServgramConf) ); } catch ( IOException e ) { this.parentSystem().handleKillException( e ); } } else { this.mServgramConf = this.mServgramList.getChild( this.gramName() ); } //Debug.trace( this.mMeeseekConf ); } public boolean isTraceLifecycle() { return this.mbTraceLifecycle; } @Override public ServgramOrchestrator getAttachedOrchestrator() { return (ServgramOrchestrator) this.parentExecutum().getTaskManager(); } @Override public String gramName() { return this.mszGramName; } @Override public Hydrogen parentSystem() { return (Hydrogen) super.parentSystem(); } @Override public PatriarchalConfig getConfig() { return this.mServgramConf; } @Override public abstract void execute() throws Exception; } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/AutoOrchestrator.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.framework.system.executum.EventedTaskManager; import com.pinecone.framework.util.config.Config; import com.pinecone.framework.util.name.Name; import com.pinecone.framework.system.regime.Orchestrator; import java.util.List; public interface AutoOrchestrator extends EventedTaskManager, Orchestrator { String ConfigOrchestrationKey = "Orchestration" ; String ConfigServgramScopesKey = "ServgramScopes" ; void orchestrate() throws OrchestrateInterruptException ; List preloads( String szName ) ; List preloads( Name name ) ; Config getScheme(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/Gram.java ================================================ package com.pinecone.hydra.servgram; import java.lang.annotation.*; @Target({ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface Gram { String ValueKey = "value"; String value() default ""; } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/GramFactory.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.framework.util.name.Name; import com.pinecone.framework.util.name.ScopeName; import com.pinecone.ulf.util.lang.MultiScopeFactory; import java.lang.reflect.InvocationTargetException; import java.util.List; public interface GramFactory extends MultiScopeFactory { @Override ClassLoader getClassLoader(); @Override GramScope getClassScope(); @Override MultiGramsLoader getTraitClassLoader(); @Override default Servgram spawn( String name, Class[] stereotypes, Object... args ) throws InvocationTargetException { return this.spawn( new ScopeName(name), stereotypes, args ); } @Override Servgram spawn( Name name, Class[] stereotypes, Object... args ) throws InvocationTargetException; @Override default Servgram spawn( String name, Object... args ) throws InvocationTargetException { return this.spawn( new ScopeName(name), args ); } @Override Servgram spawn( Name name, Object... args ) throws InvocationTargetException; @Override default List popping( String name, Class[] stereotypes, Object... args ) { return this.popping( new ScopeName(name), stereotypes, args ); } @Override List popping( Name name, Class[] stereotypes, Object... args ); @Override default List popping( String name, Object... args ) { return this.popping( new ScopeName(name), args ); } @Override List popping( Name name, Object... args ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/GramLoader.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.framework.util.name.Name; import com.pinecone.hydra.servgram.filters.AnnotationValueFilter; import com.pinecone.ulf.util.lang.TraitClassLoader; public interface GramLoader extends TraitClassLoader { @Override Class load( Name simpleName ) throws ClassNotFoundException ; // Directly by it`s name. @Override Class loadByName( Name simpleName ) throws ClassNotFoundException ; // Scanning class`s annotations, methods or others. @Override Class loadInClassTrait( Name simpleName ) throws ClassNotFoundException ; @Override GramLoader updateScope(); void setAnnotationValueFilter( AnnotationValueFilter filter ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/GramScope.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.framework.util.lang.ClassScope; public interface GramScope extends ClassScope { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/GramTransaction.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.hydra.orchestration.GraphStratum; import com.pinecone.hydra.orchestration.IntegrityLevel; import com.pinecone.hydra.orchestration.Transaction; import java.util.List; public interface GramTransaction extends Transaction, GraphStratum { String ConfigTransactionsListKey = "Transactions" ; String ConfigTransactionNameKey = "Name" ; String ConfigPrimaryNameKey = "Primary" ; GramTransaction loadActionsFromConfig(); List getTransactionList(); class TransactionArgs { String name ; ActionType type ; IntegrityLevel level ; TransactionArgs( String name, ActionType type, IntegrityLevel level ) { this.name = name; this.type = type; this.level = level; } } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/LocalGramFactory.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.framework.system.executum.TaskManager; public class LocalGramFactory extends ArchGramFactory { public LocalGramFactory( TaskManager taskManager, ClassLoader classLoader, MultiGramsLoader gramLoader, GramScope gramScope ) { super( taskManager, classLoader, gramLoader, gramScope ); } public LocalGramFactory( TaskManager taskManager ) { this( taskManager, taskManager.getClassLoader(), null, null ); this.mClassScope = new LocalGramScopeSet( this ); this.mTraitClassLoader = new LocalGramLoader( this ); } public LocalGramFactory( TaskManager taskManager, GramScope gramScope ) { this( taskManager, taskManager.getClassLoader(), null, gramScope ); this.mTraitClassLoader = new LocalGramLoader( this ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/LocalGramLoader.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.hydra.servgram.filters.ExcludeGramFilters; import com.pinecone.hydra.servgram.filters.GramAnnotationValueFilter; public class LocalGramLoader extends ArchGramLoader { public LocalGramLoader( GramScope gramScope, ClassLoader classLoader ) { super( gramScope, classLoader ); this.mClassScanner.addExcludeFilter( new ExcludeGramFilters( this.mClassInspector ) ); this.setAnnotationValueFilter( new GramAnnotationValueFilter() ); } public LocalGramLoader( GramFactory factory ) { this( factory.getClassScope(), factory.getClassLoader() ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/LocalGramScopeSet.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.framework.unit.LinkedTreeSet; import com.pinecone.framework.util.lang.ScopedPackage; import java.util.Set; public class LocalGramScopeSet extends ArchGramScopeSet { public LocalGramScopeSet( Set scope, ClassLoader classLoader ) { super( scope, classLoader ); } public LocalGramScopeSet( ClassLoader classLoader ) { super( new LinkedTreeSet<>(), classLoader ); } public LocalGramScopeSet( GramFactory factory ) { super( new LinkedTreeSet<>(), factory.getClassLoader() ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/LocalGramTransaction.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.framework.system.ApoptosisRejectSignalException; import com.pinecone.framework.system.executum.ArchProcessum; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.orchestration.*; import com.pinecone.hydra.orchestration.regulation.NeglectRegulation; import java.util.List; public class LocalGramTransaction extends ArchProcessum implements GramTransaction, Processum { private List mActionList ; private ActionType mActionType ; protected ServgramOrchestrator mOrchestrator ; protected Transaction mTransaction ; public LocalGramTransaction( String name, ServgramOrchestrator orchestrator, Processum parent ) { super( name, parent ); this.mOrchestrator = orchestrator; this.mActionType = ActionType.queryActionType( this.mOrchestrator.getOrchestrationConfig().get( ActionType.ConfigActionTypeKey ).toString() ); this.mActionList = (List)((JSONObject) orchestrator.getOrchestrationConfig()).get( GramTransaction.ConfigTransactionsListKey ); this.prepareTransactionByType(); this.setName( name ); orchestrator.tracer().info( String.format( "[Lifecycle] [%s, %s] ", name, this.mActionType ) ); } public LocalGramTransaction( ServgramOrchestrator orchestrator, Processum parent ) { this( orchestrator.getOrchestrationConfig().getOrDefault( GramTransaction.ConfigTransactionNameKey, "Anonymous" ).toString(), orchestrator, parent ); } // Children Transaction public LocalGramTransaction( String name, ActionType actionType, List actionList, ServgramOrchestrator orchestrator, Processum parent ) { super( name, parent ); this.mOrchestrator = orchestrator; this.mActionType = actionType; this.mActionList = actionList; this.prepareTransactionByType(); this.setName( name ); } protected void prepareTransactionByType() { switch ( this.mActionType ) { case Loop: { this.mTransaction = new LoopAction(); break; } case Parallel:{ this.mTransaction = new ParallelAction(); break; } case Sequential:{ this.mTransaction = new SequentialAction(); break; } default: { throw new IllegalArgumentException( "MasterTransaction can only be [Loop, Parallel, Sequential]." ); } } } @Override public void apoptosis() throws ApoptosisRejectSignalException { this.terminate(); } @Override public void kill() { this.terminate(); if( !this.isEnded() ) { super.kill(); } } @Override public GramTransaction loadActionsFromConfig() { return this; } @Override public List getTransactionList() { return this.mActionList; } @Override public void add( Exertion exertion ) { this.mTransaction.add(exertion); } @Override public void addFirst( Exertion exertion ) { this.mTransaction.addFirst(exertion); } @Override public void reset() { this.mTransaction.reset(); } @Override public void start() { this.mTransaction.start(); } @Override public void terminate() { this.mTransaction.terminate(); } @Override public void rollback() { this.mTransaction.rollback(); } @Override public NeglectRegulation getSeqExceptionNeglector(){ return this.mTransaction.getSeqExceptionNeglector(); } @Override public void setSeqExceptionNeglector( NeglectRegulation neglector ) { this.mTransaction.setSeqExceptionNeglector( neglector ); } @Override public ExertionStatus getStatus() { return this.mTransaction.getStatus(); } @Override public String getName(){ return this.mTransaction.getName(); } @Override public void setName( String name ){ this.mTransaction.setName( name ); } @Override public IntegrityLevel getIntegrityLevel(){ return this.mTransaction.getIntegrityLevel(); } @Override public void setIntegrityLevel( IntegrityLevel level ){ this.mTransaction.setIntegrityLevel( level ); } @Override public long getStartNano() { return this.mTransaction.getStartNano(); } @Override public void setDefaultRollback( boolean b ){ this.mTransaction.setDefaultRollback( b ); } @Override public boolean isDefaultRollback(){ return this.mTransaction.isDefaultRollback(); } @Override public int getStratumId(){ return this.mTransaction.getStratumId(); } @Override public ArchGraphNode parent(){ return (ArchGraphNode)this.mTransaction.parent(); } @Override public List getChildren() { return ( (GraphStratum)this.mTransaction ).getChildren(); } @Override public Exception getLastError() { return this.mTransaction.getLastError(); } @Override public void registerExertionStartCallback( ExertionEventCallback callback ) { this.mTransaction.registerExertionStartCallback( callback ); } @Override public void registerExertionEndCallback( ExertionEventCallback callback ) { this.mTransaction.registerExertionEndCallback( callback ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/LocalParallelGramExertium.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.framework.system.NotImplementedException; import com.pinecone.framework.system.ProvokeHandleException; import com.pinecone.framework.system.executum.Executum; import com.pinecone.hydra.orchestration.parallel.ArchMasterParallelium; public class LocalParallelGramExertium extends ArchMasterParallelium { protected Servgram mWrapServgram; protected ServgramOrchestrator mOrchestrator; public LocalParallelGramExertium( ServgramOrchestrator orchestrator, Servgram servgram ) { this.mWrapServgram = servgram; this.mOrchestrator = orchestrator; this.setName( servgram.getName() ); } @Override protected void doStart() { try{ Thread thisThread = this.getMasterExecutum().getAffiliateThread(); thisThread.setName( this.nomenclature( thisThread ) ); this.mWrapServgram.execute(); } catch ( Exception e ) { throw new ProvokeHandleException( e ); } } @Override protected void doTerminate() { LocalSequentialGramExertium.terminate( this.mWrapServgram ); } @Override protected void doRollback() { throw new NotImplementedException(); } @Override protected void intoStart() { super.intoStart(); this.notifyExecuting(); } @Override protected void intoFinished() { super.intoFinished(); this.notifyFinished(); } @Override protected void intoTerminated() { super.intoTerminated(); this.notifyFinished(); } @Override protected void intoRollback() { super.intoRollback(); this.notifyExecuting(); } @Override protected void intoError( Exception e ) { super.intoError( e ); this.notifyFinished(); } protected void notifyExecuting() { if( this.mWrapServgram instanceof Executum ) { this.mOrchestrator.notifyExecuting( (Executum)this.mWrapServgram ); } } protected void notifyFinished() { if( this.mWrapServgram instanceof Executum ) { this.mOrchestrator.notifyFinished( (Executum)this.mWrapServgram ); } } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/LocalSequentialGramExertium.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.framework.system.ApoptosisRejectSignalException; import com.pinecone.framework.system.NotImplementedException; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.system.executum.Executum; import com.pinecone.hydra.orchestration.ArchExertion; public class LocalSequentialGramExertium extends ArchExertion { protected Servgram mWrapServgram; protected ServgramOrchestrator mOrchestrator; public LocalSequentialGramExertium( ServgramOrchestrator orchestrator, Servgram servgram ) { this.mWrapServgram = servgram; this.mOrchestrator = orchestrator; this.setName( servgram.getName() ); } @Override protected void doStart() { try{ this.mWrapServgram.execute(); } catch ( Exception e ) { throw new ProxyProvokeHandleException( e ); } } @Override protected void doTerminate() { LocalSequentialGramExertium.terminate( this.mWrapServgram ); } @Override protected void doRollback() { throw new NotImplementedException(); } protected static void terminate( Servgram servgram ) throws ProxyProvokeHandleException { try{ try{ servgram.terminate(); } catch ( ApoptosisRejectSignalException e ) { if( servgram instanceof Servgramium ) { ((Servgramium) servgram).kill(); } } } catch ( Exception e ) { throw new ProxyProvokeHandleException( e ); } } @Override protected void intoStart() { super.intoStart(); this.notifyExecuting(); } @Override protected void intoFinished() { super.intoFinished(); this.notifyFinished(); } @Override protected void intoTerminated() { super.intoTerminated(); this.notifyFinished(); } @Override protected void intoRollback() { super.intoRollback(); this.notifyExecuting(); } @Override protected void intoError( Exception e ) { super.intoError( e ); this.notifyFinished(); } protected void notifyExecuting() { if( this.mWrapServgram instanceof Executum ) { this.mOrchestrator.notifyExecuting( (Executum)this.mWrapServgram ); } } protected void notifyFinished() { if( this.mWrapServgram instanceof Executum ) { this.mOrchestrator.notifyFinished( (Executum)this.mWrapServgram ); } } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/LocalServgramOrchestrator.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.config.PatriarchalConfig; import com.pinecone.framework.util.name.Name; import java.util.List; public class LocalServgramOrchestrator extends ArchServgramOrchestrator { public LocalServgramOrchestrator( Processum parent, PatriarchalConfig sectionConfig, @Nullable GramFactory factory, GramTransaction transaction ) { super( parent, sectionConfig, factory, transaction ); } public LocalServgramOrchestrator( Processum parent, PatriarchalConfig sectionConfig ) { super( parent, sectionConfig, null, null ); this.prepareFactory( new LocalGramFactory( this ) ); this.setTransaction( new LocalGramTransaction( this, parent ) ); } @Override protected List popping( String szName ) { return ( (GramFactory)this.getClassFactory() ).popping( szName, szName, this.getSystem() ); } @Override protected List popping( Name name ) { return ( (GramFactory)this.getClassFactory() ).popping( name, name.getName(), this.getSystem() ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/MultiGramsLoader.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.framework.util.lang.MultiClassScopeLoader; import com.pinecone.framework.util.name.Name; import com.pinecone.ulf.util.lang.MultiTraitClassLoader; import java.util.List; public interface MultiGramsLoader extends GramLoader, MultiClassScopeLoader, MultiTraitClassLoader { @Override List > loads( Name name ) ; @Override List > loadsByName( Name simpleName ); @Override List > loadsInClassTrait( Name simpleName ) ; } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/OrchestrateInterruptException.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.framework.system.prototype.Pinenut; public class OrchestrateInterruptException extends Exception implements Pinenut { public OrchestrateInterruptException () { super(); } public OrchestrateInterruptException ( String message ) { super(message); } public OrchestrateInterruptException ( String message, Throwable cause ) { super(message, cause); } public OrchestrateInterruptException ( Throwable cause ) { super(cause); } protected OrchestrateInterruptException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) { super( message, cause, enableSuppression, writableStackTrace ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/Servgram.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.framework.system.RuntimeSystem; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.config.PatriarchalConfig; public interface Servgram extends Pinenut { String getName(); default String gramName(){ return this.className(); } PatriarchalConfig getConfig(); RuntimeSystem parentSystem(); void terminate() ; void execute() throws Exception ; // Who summoned me ? ServgramOrchestrator getAttachedOrchestrator(); String ConfigServgramsKey = "Servgrams"; } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/ServgramOrchestrator.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.framework.util.config.PatriarchalConfig; import com.pinecone.ulf.util.lang.MultiScopeFactory; import org.slf4j.Logger; public interface ServgramOrchestrator extends AutoOrchestrator { PatriarchalConfig getSectionConfig(); PatriarchalConfig getOrchestrationConfig(); GramTransaction getTransaction(); void setTransaction( GramTransaction transaction ); void onlyOrchestrateTransaction() throws OrchestrateInterruptException ; void startTransaction() throws OrchestrateInterruptException ; Logger tracer(); MultiScopeFactory getClassFactory(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/Servgramium.java ================================================ package com.pinecone.hydra.servgram; import com.pinecone.framework.system.executum.Processum; public interface Servgramium extends Servgram, Processum { @Override default void terminate() { this.apoptosis(); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/Servgramlet.java ================================================ package com.pinecone.hydra.servgram; public interface Servgramlet extends Servgram { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/filters/AnnotationValueFilter.java ================================================ package com.pinecone.hydra.servgram.filters; import com.pinecone.framework.system.prototype.Pinenut; import javassist.bytecode.annotation.Annotation; public interface AnnotationValueFilter extends Pinenut { boolean match( Annotation that, String destinationName ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/filters/ExcludeGramFilters.java ================================================ package com.pinecone.hydra.servgram.filters; import com.pinecone.framework.util.lang.TypeFilter; import com.pinecone.ulf.util.lang.HierarchyClassInspector; import javassist.ClassPool; import javassist.CtClass; import javassist.NotFoundException; import java.io.IOException; public class ExcludeGramFilters implements TypeFilter { protected HierarchyClassInspector mClassInspector; public ExcludeGramFilters( HierarchyClassInspector inspector ) { this.mClassInspector = inspector; } @Override public boolean match( String szClassName, Object pool ) throws IOException { try{ CtClass clz = ( (ClassPool) pool ).get( szClassName ); if( clz.isInterface() ) { return true; } if( this.mClassInspector.isImplemented( clz, com.pinecone.hydra.servgram.Servgram.class ) ) { return false; } return !this.mClassInspector.hasOwnAnnotation( clz, com.pinecone.hydra.servgram.Gram.class ) ; } catch ( NotFoundException e ) { return true; } } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/filters/GramAnnotationValueFilter.java ================================================ package com.pinecone.hydra.servgram.filters; import com.pinecone.hydra.servgram.Gram; import javassist.bytecode.annotation.Annotation; public class GramAnnotationValueFilter implements AnnotationValueFilter { public boolean match( Annotation that, String destinationName ) { if( that.getTypeName().equals( com.pinecone.hydra.servgram.Gram.class.getName() ) ) { String szAN = that.getMemberValue( Gram.ValueKey ).toString(); if( szAN.startsWith( "\"" ) ){ return !szAN.equals("\"" + destinationName + "\""); } return !szAN.equals( destinationName ); } return true; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/App.java ================================================ package com.pinecone.hydra.task; import com.pinecone.framework.util.id.GUID; public interface App extends TaskFamilyMeta { long getEnumId(); GUID getGuid(); String getName(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/ArchInstanceMeta.java ================================================ package com.pinecone.hydra.task; import java.time.LocalDateTime; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.task.marshal.TaskScheduleCycle; import com.pinecone.hydra.task.marshal.TaskScheduleType; public abstract class ArchInstanceMeta implements TaskInstanceMeta { protected GUID guid; protected GUID taskGuid; protected String instanceName; protected String taskName; protected LocalDateTime businessTime; protected short priority; protected String imagePath; protected short actuallyPriority; protected TaskInstanceStatus instanceStatus; protected String taskType; protected int runCount; protected int sequenceCnt; protected int retryCnt; protected String errorCause; protected boolean dryRun; protected TaskScheduleCycle scheduleCycle; protected TaskScheduleType scheduleType; protected LocalDateTime lastStartTime; protected LocalDateTime lastEndTime; protected LocalDateTime expectTime; protected LocalDateTime fireTime; protected LocalDateTime startTime; protected LocalDateTime finishTime; protected LocalDateTime scheduleHostTime; protected LocalDateTime submitTime; protected LocalDateTime scheduleTime; protected String processorName; protected LocalDateTime createTime; protected LocalDateTime updateTime; @Override public GUID getGuid() { return this.guid; } @Override public GUID getTaskGuid() { return this.taskGuid; } @Override public String getInstanceName() { return this.instanceName; } @Override public String getTaskName() { return this.taskName; } @Override public LocalDateTime getBusinessTime() { return this.businessTime; } @Override public short getPriority() { return this.priority; } @Override public String getImagePath() { return this.imagePath; } @Override public short getActuallyPriority() { return this.actuallyPriority; } @Override public TaskInstanceStatus getInstanceStatus() { return this.instanceStatus; } @Override public String getTaskType() { return this.taskType; } @Override public int getRunCount() { return this.runCount; } @Override public int getSequenceCnt() { return this.sequenceCnt; } @Override public int getRetryCnt() { return this.retryCnt; } @Override public String getErrorCause() { return this.errorCause; } @Override public boolean isDryRun() { return this.dryRun; } @Override public TaskScheduleCycle getKernelScheduleCycle() { return this.scheduleCycle; } @Override public TaskScheduleType getKernelScheduleType() { return this.scheduleType; } @Override public LocalDateTime getLastStartTime() { return this.lastStartTime; } @Override public LocalDateTime getLastEndTime() { return this.lastEndTime; } @Override public LocalDateTime getExpectTime() { return this.expectTime; } @Override public LocalDateTime getFireTime() { return this.fireTime; } @Override public LocalDateTime getStartTime() { return this.startTime; } @Override public LocalDateTime getFinishTime() { return this.finishTime; } @Override public LocalDateTime getScheduleHostTime() { return this.scheduleHostTime; } @Override public LocalDateTime getSubmitTime() { return this.submitTime; } @Override public LocalDateTime getScheduleTime() { return this.scheduleTime; } @Override public void setExpectTime(LocalDateTime expectTime) { this.expectTime = expectTime; } @Override public void setFireTime(LocalDateTime fireTime) { this.fireTime = fireTime; } @Override public void setStartTime(LocalDateTime startTime) { this.startTime = startTime; } @Override public void setFinishTime(LocalDateTime finishTime) { this.finishTime = finishTime; } @Override public void setScheduleHostTime(LocalDateTime scheduleHostTime) { this.scheduleHostTime = scheduleHostTime; } @Override public void setSubmitTime(LocalDateTime submitTime) { this.submitTime = submitTime; } @Override public void setScheduleTime(LocalDateTime scheduleTime) { this.scheduleTime = scheduleTime; } @Override public String getProcessorName() { return this.processorName; } @Override public void setProcessorName(String processorName) { this.processorName = processorName; } @Override public LocalDateTime getCreateTime() { return this.createTime; } @Override public LocalDateTime getUpdateTime() { return this.updateTime; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/ArchTask.java ================================================ package com.pinecone.hydra.task; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.Identification; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.task.kom.entity.TaskElement; public abstract class ArchTask implements Task { protected Identification mServiceId; protected TaskElement mTaskElement; protected Map mMetaDataScope; public ArchTask( Identification serviceId, TaskElement serviceElement, Map metaDataScope ){ this.mServiceId = serviceId; this.mTaskElement = serviceElement; this.mMetaDataScope = metaDataScope; } public ArchTask( Identification serviceId, TaskElement serviceElement ){ this( serviceId, serviceElement, null ); } public TaskElement getTaskElement() { return this.mTaskElement; } @Override public String getName() { return this.mTaskElement.getName(); } @Override public String getDisplayName() { return this.mTaskElement.getName(); } @Override public String getFullName() { return this.mTaskElement.getKomPath(); } public GUID getGuid() { return this.mTaskElement.getGuid(); } @Override public Identification getId() { return this.getGuid(); } @Override public String getScenario() { return this.mTaskElement.getScenario(); } @Override public String getMarshallingArchitecture() { return this.mTaskElement.getMarshallingArchitecture(); } @Override public String getExtraInformation() { return this.mTaskElement.getExtraInformation(); } @Override public short getPriority() { return this.mTaskElement.getPriority(); } @Override public short getActuallyPriority() { return this.mTaskElement.getActuallyPriority(); } @Override public boolean isDryRun() { return this.mTaskElement.isDryRun(); } @Override public boolean isEnable() { return this.mTaskElement.isEnable(); } @Override public String getDescription() { return this.mTaskElement.getDescription(); } @Override public String getProcessorName() { return this.mTaskElement.getProcessorName(); } @Override public Map getMetaDataScope() { return this.mMetaDataScope; } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/ArchTaskFamilyMeta.java ================================================ package com.pinecone.hydra.task; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.Identification; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import com.pinecone.ulf.util.guid.GUIDs; public abstract class ArchTaskFamilyMeta implements TaskFamilyMeta { protected GUID guid; protected String name; protected String scenario; protected String marshallingArchitecture; protected String extraInformation; protected String szElementaryConfig; protected Map elementaryConfig; protected String description; public ArchTaskFamilyMeta() { } public ArchTaskFamilyMeta( Map joEntity ) { this.apply( joEntity ); } protected ArchTaskFamilyMeta apply( Map joEntity ) { String szGuid = (String) joEntity.get( "guid" ); if( szGuid != null ) { this.guid = GUIDs.GUID128( (String) joEntity.get( "guid" ) ); } BeanMapDecoder.BasicDecoder.decode( this, joEntity ); return this; } public GUID getGuid() { return this.guid; } @Override public Identification getId() { return this.getGuid(); } @Override public String getName() { return this.name; } @Override public String getScenario() { return this.scenario; } @Override public String getMarshallingArchitecture() { return this.marshallingArchitecture; } @Override public String getExtraInformation() { return this.extraInformation; } @Override public String getDescription() { return this.description; } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/ArchTaskInstance.java ================================================ package com.pinecone.hydra.task; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.instance.InstanceEntry; import com.pinecone.hydra.task.marshal.TaskScheduleCycle; import com.pinecone.hydra.task.marshal.TaskScheduleType; import java.time.LocalDateTime; public abstract class ArchTaskInstance implements TaskInstance { protected InstanceEntry mInstanceEntry; protected Task mOwnedTask; public ArchTaskInstance( InstanceEntry instanceEntry, Task ownedTask ) { this.mInstanceEntry = instanceEntry; this.mOwnedTask = ownedTask; } @Override public Task getOwnedTask() { return this.mOwnedTask; } @Override public TaskInstrument getTaskInstrument() { return this.mInstanceEntry.getTaskInstrument(); } @Override public String getRunStatus() { return this.mInstanceEntry.getRunStatus(); } @Override public GUID getGuid() { return this.mInstanceEntry.getGuid(); } @Override public GUID getTaskGuid() { return this.mInstanceEntry.getTaskGuid(); } @Override public String getInstanceName() { return this.mInstanceEntry.getInstanceName(); } @Override public LocalDateTime getBusinessTime() { return this.mInstanceEntry.getBusinessTime(); } @Override public short getPriority() { return this.mInstanceEntry.getPriority(); } @Override public String getImagePath() { return this.mInstanceEntry.getImagePath(); } @Override public short getActuallyPriority() { return this.mInstanceEntry.getActuallyPriority(); } @Override public TaskInstanceStatus getInstanceStatus() { return this.mInstanceEntry.getInstanceStatus(); } @Override public String getTaskType() { return this.mInstanceEntry.getTaskType(); } @Override public int getRunCount() { return this.mInstanceEntry.getRunCount(); } @Override public int getRetryCnt() { return this.mInstanceEntry.getRetryCnt(); } @Override public int getSequenceCnt() { return this.mInstanceEntry.getSequenceCnt(); } @Override public String getErrorCause() { return this.mInstanceEntry.getErrorCause(); } @Override public boolean isDryRun() { return this.mInstanceEntry.isDryRun(); } @Override public TaskScheduleCycle getKernelScheduleCycle() { return this.mInstanceEntry.getKernelScheduleCycle(); } @Override public TaskScheduleType getKernelScheduleType() { return this.mInstanceEntry.getKernelScheduleType(); } @Override public LocalDateTime getLastStartTime() { return this.mInstanceEntry.getLastStartTime(); } @Override public LocalDateTime getLastEndTime() { return this.mInstanceEntry.getLastEndTime(); } @Override public LocalDateTime getCreateTime() { return this.mInstanceEntry.getCreateTime(); } @Override public LocalDateTime getUpdateTime() { return this.mInstanceEntry.getUpdateTime(); } @Override public InstanceEntry getInstanceEntry() { return this.mInstanceEntry; } @Override public LocalDateTime getExpectTime() { return this.mInstanceEntry.getExpectTime(); } @Override public LocalDateTime getFireTime() { return this.mInstanceEntry.getFireTime(); } @Override public LocalDateTime getStartTime() { return this.mInstanceEntry.getStartTime(); } @Override public LocalDateTime getFinishTime() { return this.mInstanceEntry.getFinishTime(); } @Override public LocalDateTime getScheduleHostTime() { return this.mInstanceEntry.getScheduleHostTime(); } @Override public LocalDateTime getSubmitTime() { return this.mInstanceEntry.getSubmitTime(); } @Override public LocalDateTime getScheduleTime() { return this.mInstanceEntry.getScheduleTime(); } @Override public String getProcessorName() { return this.mInstanceEntry.getProcessorName(); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/InstanceEventType.java ================================================ package com.pinecone.hydra.task; public enum InstanceEventType { TaskSubmit ( "TaskSubmit" ), TaskTimeReady ( "TaskTimeReady" ), CheckDependencyReady ( "CheckDependencyReady" ), DepartureReady ( "DepartureReady" ), // 实例离港完成,进入工作节点,等待远端正式执行 TaskRun ( "TaskRun" ), // 实例启动完成,进入工作节点,远端进程进入运行 TaskExecSuccess ( "TaskExecSuccess" ), TaskExecFail ( "TaskExecFail" ), TaskSuccess ( "TaskSuccess" ), TaskFail ( "TaskFail" ), AuditPost ( "AuditPost" ), AuditSuccess ( "AuditSuccess" ), AuditFail ( "AuditFail" ), TaskKilled ( "TaskKilled" ); private final String name; InstanceEventType( String name ) { this.name = name; } public String getName() { return this.name; } public static InstanceEventType valueOfName( String name ) { try { return InstanceEventType.valueOf(name); } catch (IllegalArgumentException e) { return null; } } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/ProcApp.java ================================================ package com.pinecone.hydra.task; public interface ProcApp extends App { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/Task.java ================================================ package com.pinecone.hydra.task; import java.util.Map; import com.pinecone.hydra.task.kom.entity.TaskElement; public interface Task extends TaskFamilyMeta { TaskElement getTaskElement(); String getName(); // Service Name, e.g. WpnService String getDisplayName(); // Service Long Name, e.g. Windows Push Notification System Service String getDescription(); String getFullName(); String getScenario() ; String getMarshallingArchitecture() ; String getExtraInformation() ; short getPriority(); short getActuallyPriority(); boolean isDryRun() ; boolean isEnable() ; String getProcessorName(); Map getMetaDataScope(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/TaskApp.java ================================================ package com.pinecone.hydra.task; public interface TaskApp extends App { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/TaskExtraMeta.java ================================================ package com.pinecone.hydra.task; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface TaskExtraMeta extends Pinenut { TaskFamilyMeta getKernelMeta(); GUID getGuid() ; String getTaskName(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/TaskFamilyMeta.java ================================================ package com.pinecone.hydra.task; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.Identification; public interface TaskFamilyMeta extends Pinenut { //long getEnumId(); //GUID getGuid(); Identification getId() ; String getName(); String getScenario(); String getMarshallingArchitecture(); String getExtraInformation(); String getDescription(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/TaskInstance.java ================================================ package com.pinecone.hydra.task; import java.time.LocalDateTime; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.Identification; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.instance.InstanceEntry; import com.pinecone.hydra.task.marshal.TaskScheduleCycle; import com.pinecone.hydra.task.marshal.TaskScheduleType; public interface TaskInstance extends Pinenut { default Identification getId() { return this.getGuid(); } Object getProcessObject(); Task getOwnedTask(); TaskInstrument getTaskInstrument(); String getRunStatus (); GUID getGuid(); GUID getTaskGuid(); String getInstanceName(); LocalDateTime getBusinessTime (); short getPriority(); String getImagePath(); short getActuallyPriority(); TaskInstanceStatus getInstanceStatus (); String getTaskType (); int getRunCount (); int getSequenceCnt(); int getRetryCnt(); boolean isDryRun() ; String getErrorCause(); TaskScheduleCycle getKernelScheduleCycle (); TaskScheduleType getKernelScheduleType (); LocalDateTime getLastStartTime (); LocalDateTime getLastEndTime (); LocalDateTime getCreateTime (); LocalDateTime getUpdateTime (); InstanceEntry getInstanceEntry(); LocalDateTime getExpectTime(); LocalDateTime getFireTime(); LocalDateTime getStartTime(); LocalDateTime getFinishTime(); LocalDateTime getScheduleHostTime(); LocalDateTime getSubmitTime(); LocalDateTime getScheduleTime(); String getProcessorName(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/TaskInstanceExecState.java ================================================ package com.pinecone.hydra.task; public enum TaskInstanceExecState { Success ( "Success" ), Fail ( "Fail" ), Running ( "Running" ), Submitted ( "Submitted" ), Killed ( "Killed" ), ; private final String name; TaskInstanceExecState( String name ) { this.name = name; } public String getName() { return this.name; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/TaskInstanceMeta.java ================================================ package com.pinecone.hydra.task; import java.time.LocalDateTime; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.task.marshal.TaskScheduleCycle; import com.pinecone.hydra.task.marshal.TaskScheduleType; public interface TaskInstanceMeta extends Pinenut { GUID getGuid(); GUID getTaskGuid(); String getInstanceName(); String getTaskName(); LocalDateTime getBusinessTime (); short getPriority(); String getImagePath(); short getActuallyPriority(); TaskInstanceStatus getInstanceStatus (); String getTaskType (); int getRunCount (); int getSequenceCnt(); int getRetryCnt(); String getErrorCause(); boolean isDryRun() ; TaskScheduleCycle getKernelScheduleCycle (); TaskScheduleType getKernelScheduleType (); LocalDateTime getLastStartTime (); LocalDateTime getLastEndTime (); LocalDateTime getCreateTime (); LocalDateTime getUpdateTime (); LocalDateTime getExpectTime(); LocalDateTime getFireTime(); LocalDateTime getStartTime(); LocalDateTime getFinishTime(); LocalDateTime getScheduleHostTime(); LocalDateTime getSubmitTime(); LocalDateTime getScheduleTime(); String getProcessorName(); void setExpectTime(LocalDateTime expectTime); void setFireTime(LocalDateTime fireTime); void setStartTime(LocalDateTime startTime); void setFinishTime(LocalDateTime finishTime); void setScheduleHostTime(LocalDateTime scheduleHostTime); void setSubmitTime(LocalDateTime submitTime); void setScheduleTime(LocalDateTime scheduleTime); void setProcessorName(String processorName); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/TaskInstanceStatus.java ================================================ package com.pinecone.hydra.task; public enum TaskInstanceStatus { New ( 0x1000, "WAIT" , "New" ), // Instance created, pending for starting and scheduling. DependencyWait ( 0x1001, "WAIT" , "DependencyWait" ), // DAG dependency wait. ResourceWait ( 0x1002, "WAIT" , "ResourceWait" ), // Waiting for resource allocation. DepartureStandby ( 0x1003, "WAIT" , "DepartureStandby" ), // Ready to launch. ProcessStandby ( 0x1004, "WAIT" , "ProcessStandby" ), // Process spawned. Running ( 0x2000, "RUNNING" , "Running" ), // Running. Audit ( 0x2001, "RUNNING" , "Audit" ), // Auditing and checking. [e.g. DQC check, Event check, etc.] Suspended ( 0x3000, "SUSPENDED", "Suspended" ), // Process suspended. Finished ( 0x4000, "SUCCESS" , "Finished" ), // Finished termination. Killed ( 0x5000, "KILLED" , "Killed" ), // Forced termination. Error ( 0x6000, "FAIL" , "Error" ), // Error termination. AuditFailed ( 0x6001, "FAIL" , "AuditFailed" ), // Auditing failed. [e.g. DQC failed etc.] ; private final int code; private final String name; private final String group; TaskInstanceStatus( int code, String group, String name ) { this.code = code; this.group = group; this.name = name; } public String getName(){ return this.name; } public int getCode() { return this.code; } public String getGroup() { return this.group; } public static TaskInstanceStatus getByName(String name ) { for ( TaskInstanceStatus status : TaskInstanceStatus.values() ) { if ( status.name.equals(name) ) { return status; } } return null; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/Taskiom.java ================================================ package com.pinecone.hydra.task; public interface Taskiom extends Task { } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/Taskium.java ================================================ package com.pinecone.hydra.task; import com.pinecone.framework.system.executum.Processum; public interface Taskium extends TaskInstance { @Override Processum getProcessObject(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/KernelTaskConfig.java ================================================ package com.pinecone.hydra.task.kom; import com.pinecone.hydra.system.ko.ArchKernelObjectConfig; public class KernelTaskConfig extends ArchKernelObjectConfig implements TaskConfig { protected String mszInstanceTitleTimeFormat = TaskMetaConstants.InstanceTitleTimeFormat; @Override public String getInstanceTitleTimeFormat() { return this.mszInstanceTitleTimeFormat; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/TaskConfig.java ================================================ package com.pinecone.hydra.task.kom; import com.pinecone.hydra.system.ko.KernelObjectConfig; public interface TaskConfig extends KernelObjectConfig { String getInstanceTitleTimeFormat(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/TaskFamilyNode.java ================================================ package com.pinecone.hydra.task.kom; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.Identification; import com.pinecone.hydra.task.TaskFamilyMeta; public interface TaskFamilyNode extends TaskFamilyMeta { long getEnumId(); void setEnumId(long id); void setName(String name); GUID getGuid(); void setGuid(GUID guid); @Override default Identification getId() { return this.getGuid(); } String getScenario(); void setScenario( String scenario ); String getMarshallingArchitecture(); void setMarshallingArchitecture( String marshallingArchitecture ); String getExtraInformation(); void setExtraInformation( String extraInformation ); String getDescription(); void setDescription( String description ); TaskFamilyNode apply( Map joEntity ) ; } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/TaskInstrument.java ================================================ package com.pinecone.hydra.task.kom; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.task.kom.entity.AppElement; import com.pinecone.hydra.task.kom.entity.ElementNode; import com.pinecone.hydra.task.kom.entity.Namespace; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.system.ko.kom.ReparseKOMTree; import com.pinecone.hydra.task.kom.instance.InstanceInstrument; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public interface TaskInstrument extends ReparseKOMTree { TaskConfig KernelServiceConfig = new KernelTaskConfig(); AppElement affirmJob(String path ); Namespace affirmNamespace( String path ); TaskElement affirmTask( String path ,TaskElement metaInfos ); ElementNode queryElement( String path ); boolean containsChild( GUID parentGuid, String childName ); void update( TreeNode treeNode ); InstanceInstrument getInstanceInstrument(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/TaskMetaConstants.java ================================================ package com.pinecone.hydra.task.kom; public final class TaskMetaConstants { public static final String InstanceTitleTimeFormat = "yyyyMMdd_HHmmss"; } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/TaskPathSelector.java ================================================ package com.pinecone.hydra.task.kom; import com.pinecone.framework.util.name.path.PathResolver; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.system.ko.kom.StandardPathSelector; import com.pinecone.hydra.unit.imperium.ImperialTree; public class TaskPathSelector extends StandardPathSelector { public TaskPathSelector(PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator dirMan, GUIDNameManipulator[] fileMans ) { super( pathResolver, trieTree, dirMan, fileMans ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/UniformTaskInstrument.java ================================================ package com.pinecone.hydra.task.kom; import java.util.ArrayList; import java.util.List; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.system.ko.KernelObjectConfig; import com.pinecone.hydra.task.kom.entity.AppElement; import com.pinecone.hydra.task.kom.entity.ElementNode; import com.pinecone.hydra.task.kom.entity.GenericAppElement; import com.pinecone.hydra.task.kom.entity.GenericNamespace; import com.pinecone.hydra.task.kom.entity.GenericTaskElement; import com.pinecone.hydra.task.kom.entity.Namespace; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.task.kom.entity.TaskTreeNode; import com.pinecone.hydra.task.kom.instance.InstanceInstrument; import com.pinecone.hydra.task.kom.instance.KernelInstanceInstrument; import com.pinecone.hydra.task.kom.operator.GenericElementOperatorFactory; import com.pinecone.hydra.task.kom.source.AppNodeManipulator; import com.pinecone.hydra.task.kom.source.TaskMasterManipulator; import com.pinecone.hydra.task.kom.source.TaskNamespaceManipulator; import com.pinecone.hydra.task.kom.source.TaskNodeManipulator; import com.pinecone.hydra.system.identifier.KOPathResolver; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; import com.pinecone.hydra.system.ko.kom.ArchReparseKOMTree; import com.pinecone.hydra.system.ko.kom.GenericReparseKOMTreeAddition; import com.pinecone.hydra.system.ko.kom.MultiFolderPathSelector; import com.pinecone.hydra.unit.imperium.ImperialTree; import com.pinecone.hydra.unit.imperium.RegimentedImperialTree; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; import com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator; import com.pinecone.ulf.util.guid.GUIDs; import com.pinecone.ulf.util.guid.i128.GuidAllocator128V7; public class UniformTaskInstrument extends ArchReparseKOMTree implements TaskInstrument { //GenericDistributedScopeTree protected ImperialTree imperialTree; protected TaskMasterManipulator taskMasterManipulator; protected TaskNamespaceManipulator taskNamespaceManipulator; protected AppNodeManipulator appNodeManipulator; protected TaskNodeManipulator taskNodeManipulator; protected List folderManipulators; protected List fileManipulators; protected InstanceInstrument instanceInstrument; public UniformTaskInstrument( Processum superiorProcess, KOIMasterManipulator masterManipulator, TaskInstrument parent, String name, KernelObjectConfig config, @Nullable GuidAllocator guidAllocator ) { super( superiorProcess, masterManipulator, TaskInstrument.KernelServiceConfig, parent, name, guidAllocator ); this.taskMasterManipulator = (TaskMasterManipulator) masterManipulator; this.taskNamespaceManipulator = this.taskMasterManipulator.getNamespaceManipulator(); this.appNodeManipulator = this.taskMasterManipulator.getAppNodeManipulator(); this.taskNodeManipulator = this.taskMasterManipulator.getTaskNodeManipulator(); KOISkeletonMasterManipulator skeletonMasterManipulator = this.taskMasterManipulator.getSkeletonMasterManipulator(); TreeMasterManipulator treeMasterManipulator = (TreeMasterManipulator) skeletonMasterManipulator; this.imperialTree = new RegimentedImperialTree(treeMasterManipulator); this.operatorFactory = new GenericElementOperatorFactory(this,(TaskMasterManipulator) masterManipulator); this.pathResolver = new KOPathResolver( this.kernelObjectConfig ); // TODO for customize service tree architecture. this.folderManipulators = new ArrayList<>( List.of( this.taskNamespaceManipulator, this.appNodeManipulator) ); this.fileManipulators = new ArrayList<>( List.of( this.appNodeManipulator, this.taskNodeManipulator) ); this.pathSelector = new MultiFolderPathSelector( this.pathResolver, this.imperialTree, this.folderManipulators.toArray( new GUIDNameManipulator[]{} ), this.fileManipulators.toArray( new GUIDNameManipulator[]{} ) ); this.mReparseKOM = new GenericReparseKOMTreeAddition( this ); this.instanceInstrument = new KernelInstanceInstrument( this, this.taskMasterManipulator.getInstanceNodeManipulator() ); this.kernelObjectConfig = config; } public UniformTaskInstrument( Processum superiorProcess, KOIMasterManipulator masterManipulator, KernelObjectConfig config ) { this( superiorProcess, masterManipulator, null, TaskInstrument.class.getSimpleName(), config, new GuidAllocator128V7()); } // public UniformTaskInstrument( Hydrogen hydrogen ) { // this.hydrogen = hydrogen; // } public UniformTaskInstrument( KOIMappingDriver driver, KernelObjectConfig config ) { this( driver.getSuperiorProcess(), driver.getMasterManipulator(), config ); } public UniformTaskInstrument( KOIMappingDriver driver, TaskInstrument parent, String name, KernelObjectConfig config ) { this( driver.getSuperiorProcess(), driver.getMasterManipulator(), parent, name, config, null ); } protected TaskTreeNode affirmTreeNodeByPath( String path, Class cnSup, Class nsSup ) { String[] parts = this.pathResolver.segmentPathParts( path ); String currentPath = ""; GUID parentGuid = GUIDs.Dummy128(); TaskTreeNode node = this.queryElement(path); if ( node != null ){ return node; } TaskTreeNode ret = null; for( int i = 0; i < parts.length; ++i ){ currentPath = currentPath + ( i > 0 ? this.getConfig().getPathNameSeparator() : "" ) + parts[ i ]; node = this.queryElement( currentPath ); if ( node == null){ if ( i == parts.length - 1 && cnSup != null ){ ElementNode en = (ElementNode) this.dynamicFactory.optNewInstance( cnSup, new Object[]{ this } ); en.setName( parts[i] ); GUID guid = this.put( en ); this.affirmOwnedNode( parentGuid, guid ); return en; } else { Namespace namespace = (Namespace) this.dynamicFactory.optNewInstance( nsSup, new Object[]{ this } ); namespace.setName( parts[i] ); GUID guid = this.put( namespace ); if ( i != 0 ){ this.affirmOwnedNode( parentGuid, guid ); parentGuid = guid; } else { parentGuid = guid; } ret = namespace; } } else { parentGuid = node.getGuid(); } } return ret; } @Override public InstanceInstrument getInstanceInstrument() { return this.instanceInstrument; } @Override public AppElement affirmJob(String path ) { return (AppElement) this.affirmTreeNodeByPath( path, GenericAppElement.class, GenericNamespace.class ); } @Override public TaskElement affirmTask( String path ,TaskElement metaInfos) { TaskElement taskElement = (TaskElement) this.affirmTreeNodeByPath( path, GenericTaskElement.class, GenericNamespace.class ); taskElement.setActuallyPriority( metaInfos.getActuallyPriority() ); taskElement.setDeploymentMethod( metaInfos.getDeploymentMethod() ); taskElement.setEnable( metaInfos.isEnable()); taskElement.setDryRun( metaInfos.isDryRun() ); taskElement.setPriority( metaInfos.getPriority() ); taskElement.setResourceType( metaInfos.getResourceType() ); taskElement.setScheduleCycle( metaInfos.getScheduleCycle() ); taskElement.setScheduleType( metaInfos.getScheduleType() ); taskElement.setType( metaInfos.getType() ); taskElement.setImagePath( metaInfos.getImagePath() ); taskElement.setName( metaInfos.getName() ); taskElement.setGuid( metaInfos.getGuid() ); return taskElement; } @Override public ElementNode queryElement( String path ) { GUID guid = this.queryGUIDByPath( path ); if( guid != null ) { return this.get( guid ).evinceElementNode(); } return null; } @Override public Namespace affirmNamespace( String path ) { return ( Namespace ) this.affirmTreeNodeByPath( path, null, GenericNamespace.class ); } protected boolean containsChild( GUIDNameManipulator manipulator, GUID parentGuid, String childName ) { List guids = manipulator.getGuidsByName( childName ); for( GUID guid : guids ) { List ps = this.imperialTree.fetchParentGuids( guid ); if( ps.contains( parentGuid ) ){ return true; } } return false; } @Override public boolean containsChild( GUID parentGuid, String childName ) { for( GUIDNameManipulator manipulator : this.fileManipulators ) { if( this.containsChild( manipulator, parentGuid, childName ) ) { return true; } } for( GUIDNameManipulator manipulator : this.folderManipulators ) { if( this.containsChild( manipulator, parentGuid, childName ) ) { return true; } } return false; } /** * Affirm path exist in cache, if required. * 确保路径存在于缓存,如果有明确实现必要的话。 * 对于GenericDistributedScopeTree::getPath, 默认会自动写入缓存,因此这里可以通过getPath保证路径缓存一定存在。 * @param guid, target guid. * @return Path */ protected void affirmPathExist( GUID guid ) { this.imperialTree.getCachePath( guid ); } @Override public TaskTreeNode get( GUID guid ){ return (TaskTreeNode) super.get( guid ); } @Override public void update( TreeNode treeNode ) { TreeNodeOperator operator = this.operatorFactory.getOperator( treeNode.getMetaType() ); operator.update( treeNode ); } @Override public void remove( GUID guid ) { super.remove( guid ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/AppElement.java ================================================ package com.pinecone.hydra.task.kom.entity; import com.pinecone.hydra.task.kom.TaskFamilyNode; public interface AppElement extends FolderElement, TaskFamilyNode { @Override default AppElement evinceAppElement() { return this; } String getType(); void setType( String type ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/ArchElementNode.java ================================================ package com.pinecone.hydra.task.kom.entity; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.BeanColonist; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import com.pinecone.hydra.task.ArchTaskFamilyMeta; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; public abstract class ArchElementNode extends ArchTaskFamilyMeta implements ElementNode { protected long enumId; protected GUID metaGuid; protected GUIDImperialTrieNode distributedTreeNode; protected TaskInstrument taskInstrument; protected LocalDateTime createTime; protected LocalDateTime updateTime; public ArchElementNode() { super(); this.createTime = LocalDateTime.now(); this.updateTime = LocalDateTime.now(); } public ArchElementNode( Map joEntity ) { super( joEntity ); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); this.createTime = LocalDateTime.now(); this.updateTime = LocalDateTime.now(); } public ArchElementNode( Map joEntity, TaskInstrument taskInstrument) { super( joEntity ); this.apply(taskInstrument); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); } public ArchElementNode( TaskInstrument taskInstrument ) { this.apply(taskInstrument); } public void apply( TaskInstrument taskInstrument ) { this.taskInstrument = taskInstrument; if ( this.getGuid() == null ) { GuidAllocator guidAllocator = this.taskInstrument.getGuidAllocator(); this.setGuid( guidAllocator.nextGUID() ); } if ( this.createTime == null ) { this.createTime = LocalDateTime.now(); this.updateTime = LocalDateTime.now(); } } @Override public ArchElementNode apply( Map joEntity ) { super.apply( joEntity ); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); return this; } @Override public String getKomPath() { return this.taskInstrument.getPath( this.getGuid() ); } @Override public String getSystemKernelObjectPath() { return this.taskInstrument.querySystemKernelObjectPath( this.getGuid() ); } @Override public GUID getMetaGuid() { return this.metaGuid; } @Override public void setMetaGuid( GUID metaGuid ) { this.metaGuid = metaGuid; } @Override public long getEnumId() { return this.enumId; } @Override public void setEnumId( long enumId ) { this.enumId = enumId; } @Override public GUID getGuid() { return this.guid; } @Override public void setGuid( GUID guid ) { this.guid = guid; } @Override public void setName( String name ) { this.name = name; } @Override public void setScenario( String scenario ) { this.scenario = scenario; } @Override public void setMarshallingArchitecture( String marshallingArchitecture ) { this.marshallingArchitecture = marshallingArchitecture; } @Override public void setExtraInformation( String extraInformation ) { this.extraInformation = extraInformation; } @Override public void setDescription( String description ) { this.description = description; } @Override public LocalDateTime getCreateTime() { return this.createTime; } @Override public void setCreateTime( LocalDateTime createTime ) { this.createTime = createTime; } @Override public LocalDateTime getUpdateTime() { return this.updateTime; } @Override public void setUpdateTime( LocalDateTime updateTime ) { this.updateTime = updateTime; } @Override public GUIDImperialTrieNode getDistributedTreeNode() { return this.distributedTreeNode; } @Override public void setDistributedTreeNode( GUIDImperialTrieNode distributedTreeNode ) { this.distributedTreeNode = distributedTreeNode; } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } protected Collection fetchChildren() { Collection guids = this.fetchChildrenGuids(); List elementNodes = new ArrayList<>(); for( GUID guid : guids ){ ElementNode elementNode = (ElementNode) this.taskInstrument.get( guid ); elementNodes.add( elementNode ); } return elementNodes; } protected Collection fetchChildrenGuids() { return this.taskInstrument.fetchChildrenGuids( this.getGuid() ); } protected void addChild( ElementNode child ) { GUID childId; boolean bContainsChild = this.containsChild( child.getName() ); if( bContainsChild ) { return; } else { childId = this.taskInstrument.put( child ); } this.taskInstrument.affirmOwnedNode( this.guid, childId ); } protected boolean containsChild( String childName ) { return this.taskInstrument.containsChild( this.guid, childName ); } @Override public JSONObject toJSONObject() { return BeanColonist.DirectColonist.populate( this, UnbeanifiedKeys ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/ElementNode.java ================================================ package com.pinecone.hydra.task.kom.entity; import java.time.LocalDateTime; import java.util.Set; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.system.ko.meta.ElementObject; import com.pinecone.hydra.task.kom.TaskFamilyNode; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; public interface ElementNode extends TaskTreeNode, TaskFamilyNode, ElementObject { Set UnbeanifiedKeys = Set.of( "distributedTreeNode" ); @Override default String objectCategoryName() { return "Task"; } default Namespace evinceNamespace() { return null; } default AppElement evinceAppElement() { return null; } default TaskElement evinceTaskElement() { return null; } GUIDImperialTrieNode getDistributedTreeNode(); void setDistributedTreeNode( GUIDImperialTrieNode distributedTreeNode ); JSONObject toJSONObject(); @Override default ElementNode evinceElementNode(){ return this; } GUID getMetaGuid(); void setMetaGuid( GUID metaGuid ); String getKomPath(); String getSystemKernelObjectPath(); String getName(); void setName( String name ); LocalDateTime getCreateTime(); void setCreateTime( LocalDateTime createTime ); LocalDateTime getUpdateTime(); void setUpdateTime( LocalDateTime updateTime ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/EntryNode.java ================================================ package com.pinecone.hydra.task.kom.entity; import com.pinecone.hydra.unit.imperium.entity.MetaEntryNode; import com.pinecone.slime.entity.EnumIndexableEntity; public interface EntryNode extends MetaEntryNode, EnumIndexableEntity { @Override default EntryNode evinceEntryNode() { return this; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/FolderElement.java ================================================ package com.pinecone.hydra.task.kom.entity; import java.util.Collection; import com.pinecone.framework.util.id.GUID; public interface FolderElement extends ElementNode { Collection fetchChildren(); Collection fetchChildrenGuids(); void addChild( ElementNode child ); boolean containsChild( String childName ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/GenericAppElement.java ================================================ package com.pinecone.hydra.task.kom.entity; import java.util.Collection; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.BeanColonist; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import com.pinecone.hydra.task.kom.TaskInstrument; public class GenericAppElement extends ArchElementNode implements AppElement { protected String taskType; public GenericAppElement() { super(); } public GenericAppElement( Map joEntity ) { super( joEntity ); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); } public GenericAppElement( Map joEntity, TaskInstrument taskInstrument ) { super( joEntity, taskInstrument); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); } public GenericAppElement( TaskInstrument taskInstrument ) { super(taskInstrument); } @Override public String getType() { return this.taskType; } @Override public void setType( String taskType ) { this.taskType = taskType; } @Override public Collection fetchChildren() { return super.fetchChildren(); } @Override public Collection fetchChildrenGuids() { return super.fetchChildrenGuids(); } @Override public void addChild( ElementNode child ) { if( child instanceof FolderElement ) { throw new IllegalArgumentException( "Foisting `FolderElement` into application node is not accepted." ); } super.addChild( child ); } @Override public boolean containsChild( String childName ) { return super.containsChild( childName ); } @Override public JSONObject toJSONObject() { Collection children = this.fetchChildren(); JSONObject jo = BeanColonist.DirectColonist.populate( this, UnbeanifiedKeys ); JSONObject joChildren = new JSONMaptron(); for( ElementNode node : children ) { joChildren.put( node.getName(), node.toJSONObject() ); } jo.put( "tasks", joChildren ); return jo; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/GenericNamespace.java ================================================ package com.pinecone.hydra.task.kom.entity; import java.util.Collection; import java.util.Map; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSONEncoder; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.BeanColonist; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.source.TaskNamespaceManipulator; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; public class GenericNamespace extends ArchElementNode implements Namespace { protected GUID metaGuid; protected GUIDImperialTrieNode distributedTreeNode; protected TaskNamespaceManipulator namespaceManipulator; public GenericNamespace() { super(); } public GenericNamespace( Map joEntity ) { super( joEntity ); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); } public GenericNamespace( Map joEntity, TaskInstrument taskInstrument) { super( joEntity, taskInstrument); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); } public GenericNamespace( TaskInstrument taskInstrument) { super(taskInstrument); } public GenericNamespace(TaskInstrument taskInstrument, TaskNamespaceManipulator namespaceManipulator ) { this(taskInstrument); this.namespaceManipulator = namespaceManipulator; } @Override public GUIDImperialTrieNode getDistributedTreeNode() { return this.distributedTreeNode; } @Override public void setDistributedTreeNode( GUIDImperialTrieNode distributedTreeNode ) { this.distributedTreeNode = distributedTreeNode; } @Override public GUID getMetaGuid() { return this.metaGuid; } @Override public void setMetaGuid( GUID metaGuid ) { this.metaGuid = metaGuid; } @Override public JSONObject toJSONObject() { Collection children = this.fetchChildren(); JSONObject jo = new JSONMaptron(); for( ElementNode node : children ) { jo.put( node.getName(), node.toJSONObject() ); } return jo; } @Override public JSONObject toJSONDetails() { return BeanColonist.DirectColonist.populate( this, ElementNode.UnbeanifiedKeys ); } @Override public String toJSONString() { return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "guid" , this.getGuid() ), new KeyValue<>( "name" , this.getName() ) } ); } @Override public String toString() { return this.name; } @Override public Collection fetchChildren() { return super.fetchChildren(); } @Override public Collection fetchChildrenGuids() { return super.fetchChildrenGuids(); } @Override public void addChild( ElementNode child ) { super.addChild( child ); } @Override public boolean containsChild( String childName ) { return super.containsChild( childName ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/GenericTaskElement.java ================================================ package com.pinecone.hydra.task.kom.entity; import java.time.LocalDateTime; import java.util.Map; import com.pinecone.framework.util.json.JSON; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import com.pinecone.hydra.task.TaskExtraMeta; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.marshal.TaskScheduleCycle; import com.pinecone.hydra.task.marshal.TaskScheduleType; public class GenericTaskElement extends ArchElementNode implements TaskElement { protected String taskType; protected String imagePath; protected String resourceType; protected String deploymentMethod; protected short priority; protected short actuallyPriority; protected boolean dryRun; protected String scheduleCron; protected TaskScheduleCycle scheduleCycle; protected TaskScheduleType scheduleType; protected boolean enable; protected LocalDateTime scheduleStartTime; protected LocalDateTime scheduleEndTime; protected LocalDateTime nextScheduleTime; protected String processorName; private void initSelf( Map joEntity ) { BeanMapDecoder.BasicDecoder.decode( this, joEntity ); if ( this.szElementaryConfig != null ) { this.elementaryConfig = (JSONObject)JSON.parse( this.szElementaryConfig ); } } public GenericTaskElement() { super(); } public GenericTaskElement( Map joEntity ) { super( joEntity ); this.initSelf( joEntity ); } public GenericTaskElement( Map joEntity, TaskInstrument taskInstrument ) { super( joEntity, taskInstrument); this.initSelf( joEntity ); } public GenericTaskElement( TaskInstrument taskInstrument ) { super(taskInstrument); } @Override public String getType() { return this.taskType; } @Override public void setType( String taskType ) { this.taskType = taskType; } @Override public String getImagePath() { return this.imagePath; } @Override public void setImagePath( String imagePath ) { this.imagePath = imagePath; } @Override public String getResourceType() { return this.resourceType; } @Override public void setResourceType( String resourceType ) { this.resourceType = resourceType; } @Override public short getPriority() { return this.priority; } @Override public void setPriority( int priority ) { this.priority = (short) priority; } @Override public short getActuallyPriority() { return this.actuallyPriority; } @Override public void setActuallyPriority( int actuallyPriority ) { this.actuallyPriority = (short) actuallyPriority; } @Override public String getDeploymentMethod() { return this.deploymentMethod; } @Override public void setDeploymentMethod( String deploymentMethod ) { this.deploymentMethod = deploymentMethod; } @Override public boolean isDryRun() { return this.dryRun; } @Override public void setDryRun( boolean dryRun ) { this.dryRun = dryRun; } @Override public String getScheduleCron() { return this.scheduleCron; } @Override public void setScheduleCron( String scheduleCron ) { this.scheduleCron = scheduleCron; } @Override public LocalDateTime getNextScheduleTime() { return this.nextScheduleTime; } @Override public void setNextScheduleTime( LocalDateTime nextScheduleTime ) { this.nextScheduleTime = nextScheduleTime; } @Override public void setScheduleCycle ( TaskScheduleCycle kernelScheduleCycle ) { this.scheduleCycle = kernelScheduleCycle; } @Override public TaskScheduleCycle getScheduleCycle() { return this.scheduleCycle; } @Override public void setScheduleType ( TaskScheduleType kernelScheduleType ) { this.scheduleType = kernelScheduleType; } @Override public TaskScheduleType getScheduleType() { return this.scheduleType; } @Override public boolean isEnable() { return this.enable; } @Override public void setEnable( boolean enable ) { this.enable = enable; } @Override public LocalDateTime getScheduleStartTime() { return this.scheduleStartTime; } @Override public void setScheduleStartTime( LocalDateTime scheduleStartTime ) { this.scheduleStartTime = scheduleStartTime; } @Override public LocalDateTime getScheduleEndTime() { return this.scheduleEndTime; } @Override public void setScheduleEndTime( LocalDateTime scheduleEndTime ) { this.scheduleEndTime = scheduleEndTime; } @Override public String getProcessorName() { return this.processorName; } @Override public void setProcessorName( String processorName ) { this.processorName = processorName; } @Override public TaskExtraMeta getExtraMeta() { return null; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/Namespace.java ================================================ package com.pinecone.hydra.task.kom.entity; import java.util.Set; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; public interface Namespace extends FolderElement { Set UnbeanifiedKeys = Set.of( "distributedTreeNode", "classificationRules" ); long getEnumId(); void setEnumId(long id); GUID getGuid(); void setGuid(GUID guid); GUID getMetaGuid(); void setMetaGuid(GUID metaGuid); String getName(); void setName(String name); GUIDImperialTrieNode getDistributedTreeNode(); void setDistributedTreeNode(GUIDImperialTrieNode distributedTreeNode); @Override default Namespace evinceNamespace() { return this; } JSONObject toJSONDetails(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/TaskElement.java ================================================ package com.pinecone.hydra.task.kom.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.task.TaskExtraMeta; import com.pinecone.hydra.task.marshal.TaskScheduleCycle; import com.pinecone.hydra.task.marshal.TaskScheduleType; import java.time.LocalDateTime; public interface TaskElement extends ElementNode { @Override default TaskElement evinceTaskElement() { return this; } String getImagePath(); void setImagePath( String path ); String getType(); void setType( String type ); String getDeploymentMethod(); void setDeploymentMethod( String deploymentMethod ); String getResourceType(); void setResourceType( String resourceType ); short getPriority(); void setPriority( int priority ); short getActuallyPriority(); void setActuallyPriority( int priority ); TaskScheduleCycle getScheduleCycle(); void setScheduleCycle ( TaskScheduleCycle kernelScheduleCycle ) ; TaskScheduleType getScheduleType(); void setScheduleType ( TaskScheduleType kernelScheduleType ) ; boolean isDryRun() ; void setDryRun( boolean dryRun ) ; String getScheduleCron(); void setScheduleCron( String scheduleCron ) ; boolean isEnable() ; void setEnable( boolean enable ) ; LocalDateTime getScheduleStartTime(); void setScheduleStartTime( LocalDateTime scheduleStartTime ); LocalDateTime getScheduleEndTime(); void setScheduleEndTime( LocalDateTime scheduleEndTime ); LocalDateTime getNextScheduleTime(); void setNextScheduleTime( LocalDateTime nextScheduleTime ); String getProcessorName(); void setProcessorName( String processorName ); TaskExtraMeta getExtraMeta(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/TaskTreeNode.java ================================================ package com.pinecone.hydra.task.kom.entity; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public interface TaskTreeNode extends TreeNode { String getName(); default String getMetaType() { return this.className().replace("Generic",""); } default TaskTreeNode evinceTreeNode(){ return this; } default ElementNode evinceElementNode(){ return null; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/instance/GenericInstanceEntry.java ================================================ package com.pinecone.hydra.task.kom.instance; import java.time.LocalDateTime; import java.util.Map; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import com.pinecone.hydra.task.ArchInstanceMeta; import com.pinecone.hydra.task.TaskInstanceStatus; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.entity.EntryNode; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.task.marshal.TaskScheduleCycle; import com.pinecone.hydra.task.marshal.TaskScheduleType; public class GenericInstanceEntry extends ArchInstanceMeta implements InstanceEntry, EntryNode { protected long enumId; protected TaskInstrument taskInstrument; protected TaskElement taskElement; public GenericInstanceEntry() { super(); this.createTime = LocalDateTime.now(); this.updateTime = LocalDateTime.now(); } public GenericInstanceEntry( Map joEntity ) { this(); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); } public GenericInstanceEntry( Map joEntity, TaskInstrument taskInstrument ) { this.apply(taskInstrument); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); } public GenericInstanceEntry( TaskInstrument taskInstrument ) { this( taskInstrument, null ); } public GenericInstanceEntry( TaskInstrument taskInstrument, @Nullable TaskElement taskElement ) { this.taskElement = taskElement; this.apply(taskInstrument); } public void apply( TaskInstrument taskInstrument ) { this.taskInstrument = taskInstrument; if ( this.getGuid() == null ) { GuidAllocator guidAllocator = this.taskInstrument.getGuidAllocator(); this.setGuid( guidAllocator.nextGUID() ); } if ( this.createTime == null ) { this.createTime = LocalDateTime.now(); this.updateTime = LocalDateTime.now(); } if ( this.taskElement == null && this.getTaskGuid() != null ) { this.taskElement = (TaskElement) this.taskInstrument.get( this.getTaskGuid() ); } } @Override public long getEnumId() { return this.enumId; } @Override public String getTaskName() { if ( this.taskName == null ) { if ( this.taskElement != null ) { this.taskName = this.taskElement.getName(); } } return this.taskName; } @Override public TaskElement taskElement() { return this.taskElement; } @Override public void setGuid ( GUID guid ) { this.guid = guid; } @Override public void setTaskGuid ( GUID taskGuid ) { this.taskGuid = taskGuid; } @Override public void setInstanceName ( String instanceName ) { this.instanceName = instanceName; } @Override public void setTaskName( String taskName ) { this.taskName = taskName; } @Override public void setBusinessTime ( LocalDateTime businessTime ) { this.businessTime = businessTime; } @Override public void setPriority ( int priority ) { this.priority = (short) priority; } @Override public void setImagePath( String imagePath ) { this.imagePath = imagePath; } @Override public void setActuallyPriority ( int actuallyPriority ) { this.actuallyPriority = (short) actuallyPriority; } @Override public void setInstanceStatus ( TaskInstanceStatus instanceStatus ) { this.instanceStatus = instanceStatus; } @Override public void setTaskType ( String taskType ) { this.taskType = taskType; } @Override public void setRunCount ( int runCount ) { this.runCount = runCount; } @Override public void setSequenceCnt( int sequenceCnt ) { this.sequenceCnt = sequenceCnt; } @Override public void setRetryCnt( int retryCnt ) { this.retryCnt = retryCnt; } @Override public void setDryRun ( boolean dryRun ) { this.dryRun = dryRun; } @Override public void setErrorCause( String errorCause ) { this.errorCause = errorCause; } @Override public void setScheduleCycle ( TaskScheduleCycle kernelScheduleCycle ) { this.scheduleCycle = kernelScheduleCycle; } @Override public void setScheduleType ( TaskScheduleType kernelScheduleType ) { this.scheduleType = kernelScheduleType; } @Override public void setLastStartTime ( LocalDateTime lastStartTime ) { this.lastStartTime = lastStartTime; } @Override public void setLastEndTime ( LocalDateTime lastEndTime ) { this.lastEndTime = lastEndTime; } @Override public void setCreateTime ( LocalDateTime createTime ) { this.createTime = createTime; } @Override public void setUpdateTime ( LocalDateTime updateTime ) { this.updateTime = updateTime; } @Override public TaskInstrument getTaskInstrument() { return this.taskInstrument; } @Override public String getRunStatus() { if ( this.instanceStatus == null ) { return null; } return this.instanceStatus.getName(); } @Override public void setRunStatus ( String status ) { this.instanceStatus = TaskInstanceStatus.getByName( status ); } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/instance/InstanceEntry.java ================================================ package com.pinecone.hydra.task.kom.instance; import java.time.LocalDateTime; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.task.TaskInstanceMeta; import com.pinecone.hydra.task.TaskInstanceStatus; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.entity.EntryNode; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.task.marshal.TaskScheduleCycle; import com.pinecone.hydra.task.marshal.TaskScheduleType; public interface InstanceEntry extends TaskInstanceMeta, EntryNode { @Override default String getName() { return this.getInstanceName(); } @Override String getTaskName(); TaskElement taskElement(); void setGuid ( GUID guid ); void setTaskGuid ( GUID taskGuid ); void setInstanceName ( String instanceName ); void setBusinessTime ( LocalDateTime businessTime ); void setTaskName ( String taskName ); void setPriority ( int priority ); void setImagePath( String imagePath ); void setActuallyPriority ( int actuallyPriority ); void setInstanceStatus ( TaskInstanceStatus instanceStatus ); void setTaskType ( String taskType ); void setRunCount ( int runCount ); void setSequenceCnt( int sequenceCnt ); void setRetryCnt( int retryCnt ); void setRunStatus( String runStatus ); void setDryRun ( boolean dryRun ); void setErrorCause( String errorCause ); void setScheduleCycle ( TaskScheduleCycle kernelScheduleCycle ) ; void setScheduleType ( TaskScheduleType kernelScheduleType ) ; void setLastStartTime ( LocalDateTime lastStartTime ); void setLastEndTime ( LocalDateTime lastEndTime ); void setCreateTime ( LocalDateTime createTime ); void setUpdateTime ( LocalDateTime updateTime ); TaskInstrument getTaskInstrument(); String getRunStatus (); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/instance/InstanceInstrument.java ================================================ package com.pinecone.hydra.task.kom.instance; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.regime.Instrument; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.system.ko.MetaPersistenceException; import com.pinecone.hydra.task.TaskInstanceStatus; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.slime.meta.TableIndexMeta; import java.time.LocalDateTime; import java.util.Collection; import java.util.List; public interface InstanceInstrument extends Instrument { void addInstance( InstanceEntry instanceEntry ); void addInstance( GUID taskGuid, InstanceEntry instanceEntry ); void updateInstance( InstanceEntry instanceEntry ) throws MetaPersistenceException; InstanceEntry getInstanceEntry( GUID insGuid ); List queryInstances( String taskTreePath, long offset, long pageSize ); long countInstanceByGuid( GUID taskGuid ); default List queryInstances( String taskTreePath ) { return this.queryInstances( this.getTaskInstrument().queryGUIDByPath( taskTreePath ) ); } default List queryInstances( GUID taskGuid ) { return this.queryInstances( taskGuid, 0, this.countInstanceByGuid( taskGuid ) ); } List queryInstances( GUID taskGuid, long offset, long pageSize ); TaskInstrument getTaskInstrument(); InstanceEntry makeInstanceEntry( GUID taskGuid, @Nullable String insName, @Nullable LocalDateTime bizTime ); default InstanceEntry makeInstanceEntry( GUID taskGuid ) { return this.makeInstanceEntry( taskGuid, null, null ); } void removeInstance( GUID insGuid ); InstanceEntry findLastExecuted( GUID taskGuid, String bizTime ); TableIndexMeta querySchedulableIdRange( Collection runStatuses, LocalDateTime targetTime ); List fetchSchedulableInstances( long idMin, long idMax, Collection runStatuses, LocalDateTime targetTime ); TableIndexMeta querySchedulableIdRange( Collection runStatuses, LocalDateTime targetTime, short actuallyPriority ); List fetchSchedulableInstances( long idMin, long idMax, Collection runStatuses, LocalDateTime targetTime, short actuallyPriority ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/instance/KernelInstanceInstrument.java ================================================ package com.pinecone.hydra.task.kom.instance; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.system.ko.MetaPersistenceException; import com.pinecone.hydra.task.TaskInstanceStatus; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.task.kom.instance.source.InstanceNodeManipulator; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.slime.meta.TableIndexMeta; import java.time.LocalDateTime; import java.util.Collection; import java.util.List; public class KernelInstanceInstrument implements InstanceInstrument { protected InstanceNodeManipulator mInstanceManipulator; protected TaskInstrument mTaskInstrument; public KernelInstanceInstrument( TaskInstrument instrument, InstanceNodeManipulator manipulator ) { this.mTaskInstrument = instrument; this.mInstanceManipulator = manipulator; } @Override public TaskInstrument getTaskInstrument() { return this.mTaskInstrument; } @Override public void addInstance( InstanceEntry instanceEntry ) { this.mInstanceManipulator.insert( instanceEntry ); } @Override public void addInstance( GUID taskGuid, InstanceEntry instanceEntry ) { instanceEntry.setTaskGuid( taskGuid ); if ( instanceEntry.getGuid() == null ) { instanceEntry.setGuid( this.mTaskInstrument.getGuidAllocator().nextGUID() ); } this.addInstance( instanceEntry ); } @Override public void updateInstance( InstanceEntry instanceEntry ) throws MetaPersistenceException { try { this.mInstanceManipulator.update( instanceEntry ); } catch ( Exception e ) { throw new MetaPersistenceException( e ); } } @Override public InstanceEntry getInstanceEntry( GUID insGuid ) { return this.mInstanceManipulator.queryByGuid( insGuid, this.mTaskInstrument ); } @Override public List queryInstances( String taskPath, long offset, long pageSize ) { GUID guid = this.mTaskInstrument.queryGUIDByPath( taskPath ); if ( guid == null ) { return null; } return this.mInstanceManipulator.queryByTaskGuid( this.mTaskInstrument, guid, offset, pageSize ); } @Override public List queryInstances( GUID taskGuid, long offset, long pageSize ) { return this.mInstanceManipulator.queryByTaskGuid( this.mTaskInstrument, taskGuid, offset, pageSize ); } @Override public long countInstanceByGuid( GUID taskGuid ) { return this.mInstanceManipulator.countInstanceByTaskGuid( taskGuid ); } @Override public InstanceEntry makeInstanceEntry( GUID taskGuid, @Nullable String insName, @Nullable LocalDateTime bizTime ) { TreeNode tn = this.mTaskInstrument.get( taskGuid ); if ( tn instanceof TaskElement ) { TaskElement taskElement = (TaskElement) tn; InstanceEntry instanceEntry = new GenericInstanceEntry( this.mTaskInstrument, taskElement ); instanceEntry.setTaskGuid( taskGuid ); instanceEntry.setGuid( this.mTaskInstrument.getGuidAllocator().nextGUID() ); instanceEntry.setPriority( taskElement.getPriority() ); instanceEntry.setActuallyPriority( taskElement.getPriority() ); instanceEntry.setTaskType( taskElement.getType() ); // instanceEntry.setInstanceName( taskElement.getName() ); // instanceEntry.setBusinessTime( taskElement.getBusinessTime() ); // instanceEntry.setScheduleCycleCode( taskElement.getScheduleCycleCode() ); instanceEntry.setScheduleCycle( taskElement.getScheduleCycle() ); instanceEntry.setScheduleType( taskElement.getScheduleType() ); instanceEntry.setRunCount( 0 ); instanceEntry.setDryRun( taskElement.isDryRun() ); instanceEntry.setInstanceStatus( TaskInstanceStatus.New ); return instanceEntry; } return null; } @Override public void removeInstance( GUID insGuid ) { this.mInstanceManipulator.remove( insGuid ); } @Override public InstanceEntry findLastExecuted( GUID taskGuid, String bizTime ) { return this.mInstanceManipulator.findLastExecuted( taskGuid, this.mTaskInstrument, bizTime ); } @Override public TableIndexMeta querySchedulableIdRange( Collection runStatuses, LocalDateTime targetTime ) { return this.mInstanceManipulator.selectSchedulableIdRange( runStatuses, targetTime, null ); } @Override public List fetchSchedulableInstances( long idMin, long idMax, Collection runStatuses, LocalDateTime targetTime ) { return this.mInstanceManipulator.fetchSchedulableInstances( this.mTaskInstrument, idMin, idMax, runStatuses, targetTime, null ); } @Override public TableIndexMeta querySchedulableIdRange( Collection runStatuses, LocalDateTime targetTime, short actuallyPriority ) { return this.mInstanceManipulator.selectSchedulableIdRange( runStatuses, targetTime, actuallyPriority ); } @Override public List fetchSchedulableInstances( long idMin, long idMax, Collection runStatuses, LocalDateTime targetTime, short actuallyPriority ) { return this.mInstanceManipulator.fetchSchedulableInstances( this.mTaskInstrument, idMin, idMax, runStatuses, targetTime, actuallyPriority ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/instance/source/InstanceNodeManipulator.java ================================================ package com.pinecone.hydra.task.kom.instance.source; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.task.TaskInstanceStatus; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.instance.InstanceEntry; import com.pinecone.slime.meta.TableIndexMeta; import java.time.LocalDateTime; import java.util.Collection; import java.util.List; public interface InstanceNodeManipulator extends Pinenut { void insert( InstanceEntry instanceEntry ); void update( InstanceEntry instanceEntry ); InstanceEntry queryByGuid( GUID guid, TaskInstrument instrument ); int countInstance(); long countInstanceByName( String name ); List fetchInstances( TaskInstrument instrument, long offset, long pageSize ); default List fetchInstances( TaskInstrument instrument ) { return this.fetchInstances( instrument, 0, this.countInstance() ); } List queryByTaskGuid( TaskInstrument instrument, GUID taskGuid, long offset, long pageSize ); long countInstanceByTaskGuid( GUID taskGuid ); void remove( GUID guid ); InstanceEntry findLastExecuted( GUID taskGuid, TaskInstrument instrument, String bizTime ); TableIndexMeta selectSchedulableIdRange( Collection runStatuses, LocalDateTime targetTime, @Nullable Short actuallyPriority ); List fetchSchedulableInstances( TaskInstrument instrument, long idMin, long idMax, Collection runStatuses, LocalDateTime targetTime, @Nullable Short actuallyPriority ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/marshaling/TaskInstrumentDecoder.java ================================================ package com.pinecone.hydra.task.kom.marshaling; import java.util.Map; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.task.kom.entity.ElementNode; public interface TaskInstrumentDecoder extends Pinenut { default ElementNode decode( Object val, GUID parentGUID ) { if ( val instanceof Map ) { Map map = (Map) val; if( map.isEmpty() ) { return null; } else if( map.size() > 1 ) { throw new IllegalArgumentException( "Root element should has at last 1." ); } Map.Entry kv = (Map.Entry) map.entrySet().iterator().next(); return this.decode( kv.getKey().toString(), kv.getValue(), parentGUID ); } return null; } ElementNode decode( String key, Object val, GUID parentGUID ); default ElementNode decode( Map.Entry kv, GUID parentGUID ) { return this.decode( kv.getKey().toString(), kv.getValue(), parentGUID ); } default ElementNode decode( Object val ) { return this.decode( val, null ); } default ElementNode decode( String key, Object val ) { return this.decode( key, val, null ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/marshaling/TaskInstrumentEncoder.java ================================================ package com.pinecone.hydra.task.kom.marshaling; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.task.kom.entity.ElementNode; public interface TaskInstrumentEncoder extends Pinenut { Object encode(ElementNode node); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/marshaling/TaskJSONDecoder.java ================================================ package com.pinecone.hydra.task.kom.marshaling; import java.util.Collection; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.entity.AppElement; import com.pinecone.hydra.task.kom.entity.ElementNode; import com.pinecone.hydra.task.kom.entity.FolderElement; import com.pinecone.hydra.task.kom.entity.GenericAppElement; import com.pinecone.hydra.task.kom.entity.GenericNamespace; import com.pinecone.hydra.task.kom.entity.GenericTaskElement; import com.pinecone.hydra.task.kom.entity.Namespace; import com.pinecone.hydra.task.kom.entity.TaskElement; public class TaskJSONDecoder implements TaskInstrumentDecoder { protected TaskInstrument instrument; public TaskJSONDecoder( TaskInstrument instrument ) { this.instrument = instrument; } @Override @SuppressWarnings( "unchecked" ) public ElementNode decode( String szName, Object o, GUID parentGuid ) { if ( o instanceof Map ) { return (ElementNode) this.instrument.get( this.decodeJSONObject( szName, (Map) o, parentGuid ).getGuid() ); } throw new IllegalArgumentException( "Elements of `TaskInstrument` should all be object." ); } protected Namespace newNamespace( String szName, Map jo ) { Namespace ns = new GenericNamespace( jo, this.instrument ); ns.setName( szName ); return ns; } protected Object[] affirmNSExisted( String szName, GUID parentGuid, Map jo ) { Namespace ns = null; if( parentGuid == null ) { ElementNode rootE = this.instrument.queryElement( szName ); if( rootE != null ) { if( rootE.evinceNamespace() == null ) { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be namespace.", szName ) ); } ns = rootE.evinceNamespace(); } } else { ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid ); if( parentNode instanceof Namespace ) { Collection destChildren = parentNode.evinceNamespace().fetchChildren(); for( ElementNode node : destChildren ) { if( szName.equals( node.getName() ) ) { if( node instanceof Namespace ) { ns = (Namespace) node; break; } else { throw new IllegalArgumentException( String.format( " Existed child-destination [%s] should be namespace.", szName ) ); } } } } } GUID currentGuid; if( ns == null ) { ns = this.newNamespace( szName, jo ); currentGuid = this.instrument.put( ns ); this.instrument.affirmOwnedNode( parentGuid, currentGuid ); } else { currentGuid = ns.getGuid(); } return new Object[] { ns, currentGuid }; } protected Object[] affirmAppExisted( String szName, GUID parentGuid, Map jo ) { AppElement job = null; if( parentGuid == null ) { ElementNode rootE = this.instrument.queryElement( szName ); if( rootE != null ) { if( rootE.evinceAppElement() == null ) { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be `AppElement`.", szName ) ); } job = rootE.evinceAppElement(); } } else { ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid ); if( parentNode instanceof Namespace ) { Collection destChildren = parentNode.evinceNamespace().fetchChildren(); for( ElementNode node : destChildren ) { if( szName.equals( node.getName() ) ) { if( node instanceof AppElement) { job = (AppElement) node; break; } else { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be `AppElement`.", szName ) ); } } } } } AppElement neo ; if( job == null ) { neo = new GenericAppElement( jo, this.instrument ); neo.setName( szName ); } else { neo = job; } return new Object[] { job, neo }; } protected Object[] affirmTasExisted( String szName, GUID parentGuid, Map jo ) { TaskElement task = null; if( parentGuid == null ) { ElementNode rootE = this.instrument.queryElement( szName ); if( rootE != null ) { if( rootE.evinceTaskElement() == null ) { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be `TaskElement`.", szName ) ); } task = rootE.evinceTaskElement(); } } else { ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid ); Collection destChildren; if( parentNode instanceof FolderElement ) { destChildren = ( (FolderElement) parentNode ).fetchChildren(); for( ElementNode node : destChildren ) { if( szName.equals( node.getName() ) ) { if( node instanceof TaskElement ) { task = (TaskElement) node; break; } else { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be `TaskElement`.", szName ) ); } } } } else { throw new IllegalStateException( String.format( "Parent of `TaskElement` [%s] should be `FolderElement`.", szName ) ); } } TaskElement neo ; if( task == null ) { neo = new GenericTaskElement( jo, this.instrument ); neo.setName( szName ); } else { neo = task; } return new Object[] { task, neo }; } protected Object[] decodeExternalElements( String szMetaType, String szName, GUID parentGuid, Map jo ) throws IllegalArgumentException { throw new IllegalArgumentException( "Unknown metaType '" + szMetaType + "'." ); } protected void decodeChildren ( Map jo, GUID currentGuid ) { for ( Object o : jo.entrySet() ) { Map.Entry kv = (Map.Entry) o; Object val = kv.getValue(); if( val instanceof Map ) { this.decode( kv.getKey().toString(), val, currentGuid ); } } } protected ElementNode decodeJSONObject( String szName, Map jo, GUID parentGuid ) { String szMetaType = (String) jo.get( "metaType" ); boolean isNamespace = szMetaType == null || szMetaType.equals( Namespace.class.getSimpleName() ); ElementNode elementNode; GUID currentGuid; if ( isNamespace ) { Object[] pair = this.affirmNSExisted( szName, parentGuid, jo ); Namespace ns = (Namespace) pair[ 0 ]; currentGuid = (GUID) pair[ 1 ]; this.decodeChildren( jo, currentGuid ); elementNode = ns; } else { Object[] pair; boolean bIsFolderElement = false; if( szMetaType.equals( AppElement.class.getSimpleName() ) ) { pair = this.affirmAppExisted( szName, parentGuid, jo ); bIsFolderElement = true; } else if( szMetaType.equals( TaskElement.class.getSimpleName() ) ) { pair = this.affirmTasExisted( szName, parentGuid, jo ); } else { try{ pair = this.decodeExternalElements( szMetaType, szName, parentGuid, jo ); } catch ( RuntimeException e ) { throw new IllegalArgumentException( e ); } } ElementNode arc = (ElementNode) pair[ 0 ]; ElementNode neo = (ElementNode) pair[ 1 ]; if( arc == null ) { currentGuid = this.instrument.put( neo ); this.instrument.affirmOwnedNode( parentGuid, currentGuid ); } else { currentGuid = arc.getGuid(); this.instrument.update( neo ); } if( bIsFolderElement ) { Object subs = jo.get( "tasks" ); if( subs instanceof Map ) { Map joSer = (Map) subs; this.decodeChildren( joSer, currentGuid ); } } elementNode = neo; } return elementNode; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/marshaling/TaskJSONEncoder.java ================================================ package com.pinecone.hydra.task.kom.marshaling; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.entity.ElementNode; public class TaskJSONEncoder implements TaskInstrumentEncoder { protected TaskInstrument instrument; public TaskJSONEncoder(TaskInstrument instrument ) { this.instrument = instrument; } @Override public Object encode( ElementNode node ) { return node.toJSONObject(); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/operator/AppElementOperator.java ================================================ package com.pinecone.hydra.task.kom.operator; import java.util.List; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.entity.GenericAppElement; import com.pinecone.hydra.task.kom.entity.AppElement; import com.pinecone.hydra.task.kom.entity.GenericNamespace; import com.pinecone.hydra.task.kom.source.AppNodeManipulator; import com.pinecone.hydra.task.kom.source.TaskMasterManipulator; import com.pinecone.hydra.system.ko.UOIUtils; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public class AppElementOperator extends ArchElementOperator implements ElementOperator { protected AppNodeManipulator appNodeManipulator; public AppElementOperator(ElementOperatorFactory factory ) { this( factory.getTaskMasterManipulator(),factory.taskInstrument() ); this.factory = factory; } public AppElementOperator(TaskMasterManipulator masterManipulator, TaskInstrument taskInstrument){ super( masterManipulator, taskInstrument); this.appNodeManipulator = masterManipulator.getAppNodeManipulator(); } @Override public GUID insert( TreeNode treeNode ) { GenericAppElement jobElement = (GenericAppElement) treeNode; GuidAllocator guidAllocator = this.taskInstrument.getGuidAllocator(); GUID jobNodeGUID = guidAllocator.nextGUID(); jobElement.setGuid( jobNodeGUID ); this.appNodeManipulator.insert( jobElement ); //将节点信息存入主表 GUIDImperialTrieNode node = new GUIDImperialTrieNode(); node.setNodeMetadataGUID(jobNodeGUID); node.setGuid(jobNodeGUID); node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) ); this.imperialTree.insert( node ); return jobNodeGUID; } @Override public void purge( GUID guid ) { //namespace节点需要递归删除其拥有节点若其引用节点,没有其他引用则进行清理 List childNodes = this.imperialTree.getChildren(guid); GUIDImperialTrieNode node = this.imperialTree.getNode(guid); if ( !childNodes.isEmpty() ){ List subordinates = this.imperialTree.getSubordinates(guid); if ( !subordinates.isEmpty() ){ for ( GUID subordinateGuid : subordinates ){ this.purge( subordinateGuid ); } } childNodes = this.imperialTree.getChildren( guid ); for( GUIDImperialTrieNode childNode : childNodes ){ List parentNodes = this.imperialTree.fetchParentGuids(childNode.getGuid()); if ( parentNodes.size() > 1 ){ this.imperialTree.removeInheritance(childNode.getGuid(),guid); } else { this.purge( childNode.getGuid() ); } } } if ( node.getType().getObjectName().equals( GenericNamespace.class.getName() ) ){ this.removeNode(guid); } else { UOI uoi = node.getType(); String metaType = this.getOperatorFactory().getMetaType( uoi.getObjectName() ); if( metaType == null ) { TreeNode newInstance = (TreeNode)uoi.newInstance( new Class[]{ TaskInstrument.class }, this.taskInstrument); metaType = newInstance.getMetaType(); } ElementOperator operator = this.getOperatorFactory().getOperator( metaType ); operator.purge( guid ); } } @Override public AppElement get(GUID guid ) { AppElement appElement; appElement = this.appNodeManipulator.getAppElement( guid, this.taskInstrument ); appElement.setGuid(appElement.getGuid()); return appElement; } @Override public AppElement get(GUID guid, int depth ) { return this.get( guid ); } @Override public AppElement getAsRootDepth(GUID guid ) { return this.get( guid ); } @Override public void update( TreeNode treeNode ) { GenericAppElement applicationElement = (GenericAppElement) treeNode; this.appNodeManipulator.update( applicationElement ); } @Override public void updateName( GUID guid, String name ) { } protected void removeNode( GUID guid ){ GUIDImperialTrieNode node = this.imperialTree.getNode( guid ); this.imperialTree.purge( guid ); this.imperialTree.removeCachePath(guid); this.appNodeManipulator.remove( node.getGuid( )); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/operator/ArchElementOperator.java ================================================ package com.pinecone.hydra.task.kom.operator; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.source.TaskMasterManipulator; import com.pinecone.hydra.unit.imperium.ImperialTree; public abstract class ArchElementOperator implements ElementOperator { protected TaskInstrument taskInstrument; protected ImperialTree imperialTree; protected TaskMasterManipulator taskMasterManipulator; protected ElementOperatorFactory factory; public ArchElementOperator( ElementOperatorFactory factory ){ this( factory.getTaskMasterManipulator(), factory.taskInstrument() ); this.factory = factory; } public ArchElementOperator( TaskMasterManipulator masterManipulator, TaskInstrument taskInstrument){ this.imperialTree = taskInstrument.getMasterTrieTree(); this.taskInstrument = taskInstrument; this.taskMasterManipulator = masterManipulator; //this.factory = new GenericServiceOperatorFactory(servicesTree,masterManipulator); } public ElementOperatorFactory getOperatorFactory() { return this.factory; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/operator/ElementOperator.java ================================================ package com.pinecone.hydra.task.kom.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.task.kom.entity.ElementNode; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; public interface ElementOperator extends TreeNodeOperator { @Override ElementNode get(GUID guid); @Override ElementNode get(GUID guid, int depth); @Override ElementNode getAsRootDepth(GUID guid); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/operator/ElementOperatorFactory.java ================================================ package com.pinecone.hydra.task.kom.operator; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.entity.AppElement; import com.pinecone.hydra.task.kom.entity.Namespace; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.task.kom.source.TaskMasterManipulator; import com.pinecone.hydra.unit.imperium.operator.OperatorFactory; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; public interface ElementOperatorFactory extends OperatorFactory { String DefaultServiceNode = TaskElement.class.getSimpleName(); String DefaultNamespace = Namespace.class.getSimpleName(); String DefaultApplicationNode = AppElement.class.getSimpleName(); void register(String typeName, TreeNodeOperator functionalNodeOperation); void registerMetaType(Class clazz, String metaType); void registerMetaType(String classFullName, String metaType); String getMetaType(String classFullName); ElementOperator getOperator(String typeName); TaskInstrument taskInstrument(); TaskMasterManipulator getTaskMasterManipulator(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/operator/GenericElementOperatorFactory.java ================================================ package com.pinecone.hydra.task.kom.operator; import java.util.HashMap; import java.util.Map; import java.util.TreeMap; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.entity.GenericAppElement; import com.pinecone.hydra.task.kom.entity.GenericNamespace; import com.pinecone.hydra.task.kom.entity.GenericTaskElement; import com.pinecone.hydra.task.kom.source.TaskMasterManipulator; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; public class GenericElementOperatorFactory implements ElementOperatorFactory { protected TaskMasterManipulator taskMasterManipulator; protected TaskInstrument taskInstrument; protected Map registerer = new HashMap<>(); protected Map metaTypeMap = new TreeMap<>(); protected void registerDefaultMetaType( Class genericType ) { this.metaTypeMap.put( genericType.getName(), genericType.getSimpleName().replace("Generic","") ); } protected void registerDefaultMetaTypes() { this.registerDefaultMetaType( GenericNamespace.class ); this.registerDefaultMetaType( GenericTaskElement.class ); this.registerDefaultMetaType( GenericAppElement.class ); } public GenericElementOperatorFactory(TaskInstrument taskInstrument, TaskMasterManipulator taskMasterManipulator){ this.taskInstrument = taskInstrument; this.taskMasterManipulator = taskMasterManipulator; this.registerer.put( ElementOperatorFactory.DefaultServiceNode, new TaskElementOperator( this ) ); this.registerer.put( ElementOperatorFactory.DefaultApplicationNode, new AppElementOperator(this) ); this.registerer.put( ElementOperatorFactory.DefaultNamespace, new NamespaceOperator(this) ); this.registerDefaultMetaTypes(); } @Override public void register( String typeName, TreeNodeOperator functionalNodeOperation ) { this.registerer.put( typeName, functionalNodeOperation ); } @Override public void registerMetaType( Class clazz, String metaType ){ this.registerMetaType( clazz.getName(), metaType ); } @Override public void registerMetaType( String classFullName, String metaType ){ this.metaTypeMap.put( classFullName, metaType ); } @Override public TaskInstrument taskInstrument() { return this.taskInstrument; } @Override public TaskMasterManipulator getTaskMasterManipulator() { return this.taskMasterManipulator; } @Override public String getMetaType( String classFullName ) { return this.metaTypeMap.get( classFullName ); } @Override public ElementOperator getOperator(String typeName ) { //Debug.trace( this.registerer.toString() ); return (ElementOperator) this.registerer.get( typeName ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/operator/NamespaceOperator.java ================================================ package com.pinecone.hydra.task.kom.operator; import java.util.List; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.entity.GenericAppElement; import com.pinecone.hydra.task.kom.entity.GenericNamespace; import com.pinecone.hydra.task.kom.entity.Namespace; import com.pinecone.hydra.task.kom.source.TaskMasterManipulator; import com.pinecone.hydra.task.kom.source.TaskNamespaceManipulator; import com.pinecone.hydra.system.ko.UOIUtils; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public class NamespaceOperator extends ArchElementOperator implements ElementOperator { protected TaskNamespaceManipulator namespaceManipulator; public NamespaceOperator( ElementOperatorFactory factory ) { this( factory.getTaskMasterManipulator(),factory.taskInstrument() ); this.factory = factory; } public NamespaceOperator( TaskMasterManipulator masterManipulator, TaskInstrument taskInstrument ) { super( masterManipulator, taskInstrument); this.namespaceManipulator = masterManipulator.getNamespaceManipulator(); } @Override public GUID insert( TreeNode treeNode ) { GenericNamespace ns = ( GenericNamespace ) treeNode; //存节点基础信息 GuidAllocator guidAllocator = this.taskInstrument.getGuidAllocator(); GUID namespaceGuid = guidAllocator.nextGUID(); ns.setGuid( namespaceGuid ); this.namespaceManipulator.insert( ns ); //存元信息 GUID metadataGUID = guidAllocator.nextGUID(); ns.setMetaGuid( metadataGUID ); //this.nodeMetaManipulator.insertNS( ns ); GUIDImperialTrieNode node = new GUIDImperialTrieNode(); node.setGuid( namespaceGuid ); node.setNodeMetadataGUID( metadataGUID ); node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) ); this.imperialTree.insert( node ); return namespaceGuid; } @Override public void purge( GUID guid ) { //namespace节点需要递归删除其拥有节点若其引用节点,没有其他引用则进行清理 List childNodes = this.imperialTree.getChildren(guid); GUIDImperialTrieNode node = this.imperialTree.getNode(guid); if ( !childNodes.isEmpty() ){ List subordinates = this.imperialTree.getSubordinates(guid); if ( !subordinates.isEmpty() ){ for ( GUID subordinateGuid : subordinates ){ this.purge( subordinateGuid ); } } childNodes = this.imperialTree.getChildren( guid ); for( GUIDImperialTrieNode childNode : childNodes ){ List parentNodes = this.imperialTree.fetchParentGuids(childNode.getGuid()); if ( parentNodes.size() > 1 ){ this.imperialTree.removeInheritance(childNode.getGuid(),guid); } else { this.purge( childNode.getGuid() ); } } } if ( node.getType().getObjectName().equals(GenericNamespace.class.getName()) || node.getType().getObjectName().equals(GenericAppElement.class.getName())){ this.removeNode(guid); } else { UOI uoi = node.getType(); String metaType = this.getOperatorFactory().getMetaType( uoi.getObjectName() ); if( metaType == null ) { TreeNode newInstance = (TreeNode)uoi.newInstance( new Class[]{ TaskInstrument.class }, this.taskInstrument); metaType = newInstance.getMetaType(); } ElementOperator operator = this.getOperatorFactory().getOperator( metaType ); operator.purge( guid ); } } @Override public Namespace get( GUID guid ) { GUIDImperialTrieNode node = this.imperialTree.getNode( guid ); GenericNamespace namespace = new GenericNamespace( this.taskInstrument); GUIDImperialTrieNode guidDistributedTrieNode = this.imperialTree.getNode( node.getGuid() ); GUID metaGuid = guidDistributedTrieNode.getNodeMetadataGUID(); namespace.setDistributedTreeNode( guidDistributedTrieNode ); namespace.setName( this.namespaceManipulator.getNamespace( guid ).getName() ); namespace.setGuid( guid ); namespace.setMetaGuid( metaGuid ); return namespace; } @Override public Namespace get( GUID guid, int depth ) { return this.get( guid ); } @Override public Namespace getAsRootDepth( GUID guid ) { return this.get( guid ); } @Override public void update( TreeNode nodeWideData ) { GenericNamespace ns = ( GenericNamespace ) nodeWideData; this.namespaceManipulator.update( ns ); } @Override public void updateName( GUID guid, String name ) { } protected void removeNode( GUID guid ){ GUIDImperialTrieNode node = this.imperialTree.getNode(guid); this.imperialTree.purge( guid ); this.imperialTree.removeCachePath( guid ); this.namespaceManipulator.remove( node.getGuid() ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/operator/TaskElementOperator.java ================================================ package com.pinecone.hydra.task.kom.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.entity.GenericTaskElement; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.task.kom.source.TaskMasterManipulator; import com.pinecone.hydra.task.kom.source.TaskNodeManipulator; import com.pinecone.hydra.system.ko.UOIUtils; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public class TaskElementOperator extends ArchElementOperator implements ElementOperator { protected TaskNodeManipulator taskNodeManipulator; public TaskElementOperator( ElementOperatorFactory factory ) { this( factory.getTaskMasterManipulator(),factory.taskInstrument() ); this.factory = factory; } public TaskElementOperator( TaskMasterManipulator masterManipulator, TaskInstrument taskInstrument ){ super( masterManipulator, taskInstrument); this.taskNodeManipulator = masterManipulator.getTaskNodeManipulator(); } @Override public GUID insert( TreeNode treeNode ) { GenericTaskElement taskElement = (GenericTaskElement) treeNode; //将信息写入数据库 //将节点信息存入应用节点表 GuidAllocator guidAllocator = this.taskInstrument.getGuidAllocator(); GUID taskNodeGUID = guidAllocator.nextGUID(); taskElement.setGuid(taskNodeGUID); this.taskNodeManipulator.insert( taskElement ); //将节点信息存入主表 GUIDImperialTrieNode node = new GUIDImperialTrieNode(); node.setNodeMetadataGUID( taskNodeGUID ); // Since 20250419, the meta has been merged into the `node`. node.setGuid( taskNodeGUID ); node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) ); this.imperialTree.insert( node ); return taskNodeGUID; } @Override public void purge( GUID guid ) { this.removeNode( guid ); } @Override public TaskElement get( GUID guid ) { GUIDImperialTrieNode node = this.imperialTree.getNode( guid ); TaskElement taskElement = this.taskNodeManipulator.getTaskNode( guid, this.taskInstrument ); taskElement.setDistributedTreeNode(node); taskElement.setGuid( guid ); return taskElement; } @Override public TaskElement get( GUID guid, int depth ) { return this.get( guid ); } @Override public TaskElement getAsRootDepth( GUID guid ) { return this.get( guid ); } @Override public void update( TreeNode nodeWideData ) { TaskElement serviceElement = (TaskElement) nodeWideData; this.taskNodeManipulator.update( serviceElement ); this.imperialTree.removeCachePath( serviceElement.getGuid() ); } @Override public void updateName( GUID guid, String name ) { } private void removeNode( GUID guid ){ GUIDImperialTrieNode node = this.imperialTree.getNode(guid); this.imperialTree.purge( guid ); this.imperialTree.removeCachePath( guid ); this.taskNodeManipulator.remove(node.getGuid()); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/source/AppNodeManipulator.java ================================================ package com.pinecone.hydra.task.kom.source; import java.util.List; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.entity.AppElement; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; public interface AppNodeManipulator extends GUIDNameManipulator { void insert( AppElement appElement); void remove( GUID guid ); AppElement getAppElement( GUID guid, TaskInstrument instrument ); void update( AppElement appElement); List fetchJobNodeByName( String name ); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/source/TaskMasterManipulator.java ================================================ package com.pinecone.hydra.task.kom.source; import com.pinecone.hydra.task.kom.instance.source.InstanceNodeManipulator; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; public interface TaskMasterManipulator extends KOIMasterManipulator { TrieTreeManipulator getTrieTreeManipulator() ; AppNodeManipulator getAppNodeManipulator(); TaskNodeManipulator getTaskNodeManipulator(); TaskNamespaceManipulator getNamespaceManipulator(); TireOwnerManipulator getTireOwnerManipulator(); InstanceNodeManipulator getInstanceNodeManipulator(); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/source/TaskNamespaceManipulator.java ================================================ package com.pinecone.hydra.task.kom.source; import java.util.List; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.task.kom.entity.Namespace; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; public interface TaskNamespaceManipulator extends GUIDNameManipulator { void insert(Namespace ns); void remove(GUID guid); Namespace getNamespace(GUID guid); void update(Namespace ns); List fetchNamespaceNodeByName(String name); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/source/TaskNodeManipulator.java ================================================ package com.pinecone.hydra.task.kom.source; import java.time.LocalDateTime; import java.util.Collection; import java.util.List; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.entity.GenericTaskElement; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.task.marshal.TaskScheduleCycle; import com.pinecone.slime.meta.TableIndex64Meta; public interface TaskNodeManipulator extends GUIDNameManipulator { void insert( TaskElement taskElement ); void remove( GUID UUID ); TaskElement getTaskNode( GUID guid, TaskInstrument instrument ); void update( TaskElement taskElement ); List fetchTaskNodeByName( String name ); @Override List getGuidsByName( String name ); @Override List getGuidsByNameID( String name, GUID guid ); TableIndex64Meta selectSchedulableIdRange( Collection cycles, LocalDateTime targetTime ); List fetchSchedulableTasksInRange( long idMin, long idMax, Collection cycles, LocalDateTime targetTime ); List listPage(int offset, int pageSize); } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/marshal/TaskPriority.java ================================================ package com.pinecone.hydra.task.marshal; import java.util.Arrays; public enum TaskPriority { MAX(999), UNLIMITED(500), L0(50), L1(40), L2(30), L3(20), L4(10), L5(0); private final int value; private TaskPriority(int value ) { this.value = value; } public Integer getValue() { return this.value; } public static Integer byName( String name ) { try { TaskPriority taskPriority = valueOf(name); return taskPriority.getValue(); } catch ( IllegalArgumentException e ) { return null; } } public static TaskPriority of(int value ) { return (TaskPriority) Arrays.stream(values()) .filter((it) -> it.value == value) .findFirst().orElse((TaskPriority) null); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/marshal/TaskScheduleCycle.java ================================================ package com.pinecone.hydra.task.marshal; public enum TaskScheduleCycle { Undefined ( 0x00, "Undefined" ), Minute ( 0x01, "Minute" ), Hour ( 0x02, "Hour" ), Day ( 0x03, "Day" ), Week ( 0x04, "Week" ), Month ( 0x05, "Month" ), // Sub second level scheduling, unable to use regular scheduling channels, // requires client caching status (instance at minimum minute level) // 亚秒级调度,无法走常规调度通道,需要客户端缓存状态(实例为最小分钟级)[无法生成秒级实例] TickSecond ( 0xC0, "TickSecond" ), TickMills ( 0xC1, "TickMills" ), ; private final int code; private final String name; TaskScheduleCycle(int code, String name ) { this.code = code; this.name = name; } public String getName() { return this.name; } public int getCode() { return this.code; } public static TaskScheduleCycle getByCode( int code ) { for ( TaskScheduleCycle cycle : TaskScheduleCycle.values() ) { if ( cycle.code == code ) { return cycle; } } return null; } public static TaskScheduleCycle getByName( String name ) { for ( TaskScheduleCycle cycle : TaskScheduleCycle.values() ) { if ( cycle.name == name ) { return cycle; } } return null; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/marshal/TaskScheduleType.java ================================================ package com.pinecone.hydra.task.marshal; public enum TaskScheduleType { Undefined ( 0x00, "Undefined" ), Cycle ( 0x01, "Cycle" ), Manual ( 0x02, "Manual" ), Triggered ( 0x03, "Triggered" ), Resident ( 0x04, "Resident" ), ; private final int code; private final String name; TaskScheduleType(int code, String name ) { this.code = code; this.name = name; } public String getName() { return this.name; } public int getCode() { return this.code; } public static TaskScheduleType getByCode(int code ) { for ( TaskScheduleType type : TaskScheduleType.values() ) { if ( type.code == code ) { return type; } } return null; } } ================================================ FILE: Hydra/hydra-framework-runtime/src/test/java/com/ioc/SystemTestIoC.java ================================================ package com.ioc; import com.pinecone.Pinecone; import com.pinecone.framework.system.construction.*; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.hydra.auto.ArchInstructation; import com.pinecone.hydra.auto.Instructation; import com.pinecone.hydra.auto.PeriodicAutomaton; import com.pinecone.hydra.auto.PeriodicAutomatron; class SpawnInstruct extends ArchInstructation { @Structure( cycle = ReuseCycle.Disposable ) Actor npc; public SpawnInstruct() { super(); } @Override public void execute() throws Exception { Debug.trace( this.npc.name +" spawned !" ); } } public class SystemTestIoC { public static void testUnifyStructureInjector_MobSpawnner( ) throws Exception { PeriodicAutomatron modSpawnner = new PeriodicAutomaton( null, 500 ); modSpawnner.start(); StructureInstanceDispenser dispenser = new UnifyCentralInstanceDispenser(); UnifyStructureInjector injector = new UnifyStructureInjector( SpawnInstruct.class, dispenser ); Thread elderBrain = new Thread(()->{ for ( int i = 0; i < 100; i++ ) { Debug.sleep( 50 ); modSpawnner.command( (Instructation) injector.inject( new JSONMaptron( "{npc:{ name:NPC"+i+", hp:9999, species: Hydra, weapon:{ name:SuperDragon, damage:9999, attachedMagic:fire } }}" ), new SpawnInstruct() ) ); } }); elderBrain.start(); elderBrain.join(); modSpawnner.join(); } public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ //TestIoC.testInstancePool(); //TestIoC.testUnifyStructureInjector_Simple(); //TestIoC.testUnifyStructureInjector_List(); SystemTestIoC.testUnifyStructureInjector_MobSpawnner(); return 0; }, (Object[]) args ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/test/java/com/ioc/TestIoC.java ================================================ package com.ioc; import com.pinecone.Pinecone; import com.pinecone.framework.system.construction.*; import com.pinecone.framework.system.prototype.ObjectiveMap; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.DirectJSONInjector; import com.pinecone.framework.util.json.homotype.JSONGet; import com.pinecone.framework.util.json.homotype.MapStructure; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.framework.util.lang.GenericDynamicFactory; import java.util.List; import java.util.Map; enum Species { Dragon, Lizard, Bear, Wolf, Devil, Human, Druid, Elf, Dwarf, } class Weapon { @JSONGet protected String name; @MapStructure protected int damage; @JSONGet( "attachedMagic" ) protected String magic; public String toJSONString() { return DirectJSONInjector.instance().inject( this ).toString(); } public String toString(){ return DirectJSONInjector.instance().inject( this ).toString(); } } class Actor { @JSONGet protected String name; @JSONGet protected int hp; @JSONGet protected Species species = Species.Dragon; @Structure( cycle = ReuseCycle.Disposable ) protected Weapon weapon; protected boolean live = true; public String toJSONString() { return DirectJSONInjector.instance().inject( this ).toString(); } public String toString(){ return DirectJSONInjector.instance().inject( this ).toString(); } } class Team { @Structure( type = Actor.class, cycle = ReuseCycle.Disposable ) protected List craws; public String toJSONString() { return DirectJSONInjector.instance().inject( this ).toString(); } public String toString(){ return DirectJSONInjector.instance().inject( this ).toString(); } } class Tale { @Structure protected String type; @Structure( type = Team.class, cycle = ReuseCycle.Recyclable ) protected Map worlds; public String toJSONString() { return ( (JSONObject)DirectJSONInjector.instance().inject( this )).toJSONStringI(2); } public String toString(){ return DirectJSONInjector.instance().inject( this ).toString(); } } public class TestIoC { public static void testInstancePool( ) { DynamicFactory wolfNPCFactory = new GenericDynamicFactory(); GenericDynamicInstancePool npcs = new GenericDynamicInstancePool<>( wolfNPCFactory, 0, 0, Actor.class ); for ( int i = 0; i < 1e2; ++i ) { Actor npc = npcs.allocate(); Debug.trace( npc ); npcs.free(npc); } } public static void testUnifyStructureInjector_Simple( ) throws Exception { StructureInstanceDispenser dispenser = new UnifyCentralInstanceDispenser(); UnifyStructureInjector injector = new UnifyStructureInjector( Actor.class, dispenser ); Actor actor = new Actor(); JSONObject jo = new JSONMaptron( "{ name:RedPrince, hp:100, species: Lizard, weapon:{ name:TyrantSuit, damage:70, attachedMagic:fire } }" ); injector.inject( new ObjectiveMap<>(jo), actor ); Debug.trace( actor ); } public static void testUnifyStructureInjector_List( ) throws Exception { StructureInstanceDispenser dispenser = new UnifyCentralInstanceDispenser(); UnifyStructureInjector injector = new UnifyStructureInjector( Team.class, dispenser ); Team team = new Team(); JSONObject jo = new JSONMaptron( "{ craws: [" + "{ name:RedPrince, hp:100, species: Lizard, weapon:{ name:TyrantSuit, damage:70, attachedMagic:fire } }, " + "{ name:Ifan, hp:90, species: Human, weapon:{ name:Ranger, damage:50, attachedMagic:lightning } }, " + "] }" ); injector.inject( new ObjectiveMap<>(jo), team ); Debug.trace( team ); } public static void testUnifyStructureInjector_Sophisticate( ) throws Exception { StructureInstanceDispenser dispenser = new UnifyCentralInstanceDispenser(); UnifyStructureInjector injector = new UnifyStructureInjector( Tale.class, dispenser ); Tale tale = new Tale(); JSONObject jo = new JSONMaptron( "{ type:fantasy, worlds :{ d2: { craws: [" + "{ name:RedPrince, hp:100, species: Lizard, weapon:{ name:TyrantSuit, damage:70, attachedMagic:fire } }, " + "{ name:Ifan, hp:90, species: Human, weapon:{ name:Ranger, damage:50, attachedMagic:lightning } }, " + "] }," + "b3: { craws: [" + "{ name:DarkUrge, hp:120, species: Dragon, weapon:{ name:DragonSoul, damage:9999, attachedMagic:fire } }, " + "{ name:Karlack, hp:100, species: Devil, weapon:{ name:Everburn Blade, damage:120, attachedMagic:fire } }, " + "] }," + "no: { craws: [" + "{ name:Hydra, hp:9999, species: Hydra, weapon:{ name:SuperDragon, damage:9999, attachedMagic:fire } }, " + "] }" + " }}" ); injector.inject( new ObjectiveMap<>(jo), tale ); Debug.trace( tale ); } public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ //TestIoC.testInstancePool(); //TestIoC.testUnifyStructureInjector_Simple(); //TestIoC.testUnifyStructureInjector_List(); TestIoC.testUnifyStructureInjector_Sophisticate(); return 0; }, (Object[]) args ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/test/java/com/orchestration/SimpleExertium.java ================================================ package com.orchestration; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.orchestration.Exertium; public class SimpleExertium extends Exertium { String mszToken; public SimpleExertium( String szWho ) { this.mszToken = szWho; } @Override protected void doStart() { Debug.trace( "Hello hi, I am " + this.mszToken ); Debug.sleep(50); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/test/java/com/orchestration/SimpleParallelium.java ================================================ package com.orchestration; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.orchestration.parallel.ArchMasterParallelium; public class SimpleParallelium extends ArchMasterParallelium { String mszToken; public SimpleParallelium( String szWho ) { this.mszToken = szWho; } @Override protected void doStart() { Debug.trace( "Hello hi, I am " + this.mszToken ); Debug.trace( Thread.currentThread().getName() ); Debug.sleep(100); } @Override protected void doTerminate() { } @Override protected void doRollback() { } } ================================================ FILE: Hydra/hydra-framework-runtime/src/test/java/com/orchestration/TestBasicTransaction.java ================================================ package com.orchestration; import com.pinecone.hydra.orchestration.*; import com.pinecone.Pinecone; import com.pinecone.framework.system.NotImplementedException; public class TestBasicTransaction { public static void testSequential() { SequentialAction action = new SequentialAction(); //LoopAction action = new LoopAction(); //ParallelAction action = new ParallelAction(); action.getSeqExceptionNeglector().add( NotImplementedException.class ); action.add( new SimpleExertium( "Gay0" ) ); action.add( new SimpleExertium( "Gay1" ) ); Exertium e = new Exertium(); //action.add( e ); SequentialAction sa = new SequentialAction(); sa.add( new SimpleExertium( "A0" ) ); sa.add( new SimpleExertium( "A1" ) ); sa.add( new SimpleExertium( "A2" ) ); sa.add( new JumpPoint( 0 ) ); action.add( sa ); action.add( new SimpleExertium( "Gay2" ) ); action.add( ProcessController.BREAK ); action.add( new SimpleExertium( "Gay3" ) ); action.add( new SimpleExertium( "Gay4" ) ); //action.add( new JumpPoint(1)); action.start(); } public static void testParallel() { ParallelAction action = new ParallelAction(); action.add( ( new SimpleParallelium( "P0" ) ) ); action.add( new SimpleParallelium( "P1" ) ); action.add( new SimpleParallelium( "P2" ) ); action.add( new SimpleExertium( "E0" ) ); action.add( new SimpleExertium( "E1" ) ); action.add( new SimpleExertium( "E2" ) ); action.add( new SimpleExertium( "E3" ) ); action.add( new SimpleParallelium( "P3" ) ); action.add( new SimpleParallelium( "P4" ) ); action.add( new SimpleParallelium( "P5" ) ); action.add( new SimpleParallelium( "P6" ) ); action.start(); } public static void testGraph() { Transaction a0 = new SequentialAction(); a0.add( ( new SimpleExertium( "E0_0" ) ) ); a0.add( ( new SimpleExertium( "E0_1" ) ) ); ParallelAction a1 = new ParallelAction(); a1.add( ( new SimpleParallelium( "P1_0" ) ) ); a1.add( new SimpleParallelium( "P1_1" ) ); a1.add( new SimpleParallelium( "P1_2" ) ); a1.add( new SimpleExertium( "E1_0" ) ); a1.add( new SimpleExertium( "E1_1" ) ); a1.add( new SimpleParallelium( "P1_3" ) ); a1.add( new SimpleParallelium( "P1_4" ) ); SequentialAction aGroup = new SequentialAction(); aGroup.add( ( new SimpleExertium( "EG_0" ) ) ); aGroup.add( ( new SimpleExertium( "EG_1" ) ) ); a1.add( aGroup ); a1.add( ParallelAction.wrap( aGroup ) ); a0.add( a1 ); a0.add( ( new SimpleExertium( "E0_END" ) ) ); a0.start(); } public static void main( String[] args ) throws Exception { //String szJson = FileUtils.readAll("J:/120KWordsPhonetics.json5"); Pinecone.init( (Object...cfg )->{ //TestBasicTransaction.testSequential(); TestBasicTransaction.testParallel(); //TestBasicTransaction.testGraph(); return 0; }, (Object[]) args ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/test/java/com/orchestration/TestInstructation.java ================================================ package com.orchestration; import com.pinecone.Pinecone; import com.pinecone.framework.system.executum.ArchProcessum; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.auto.*; class SimpleInstruct extends ArchInstructation { String ss; public SimpleInstruct( String s ) { super(); this.ss = s; } @Override public void execute() throws Exception { Debug.trace( "Hello hi, fuck "+ this.ss +" !" ); // if( this.ss.equals( "1" ) ) { // throw new Exception(); // } } } class SimpleS extends ArchSuggestation { String ss; public SimpleS( String s ) { super(); this.ss = s; } @Override public void execute() { Debug.trace( "Hello hi, fuck "+ this.ss +" !" ); if( this.ss.equals( "1" ) ) { throw new RuntimeException(); } } } class SimplePI extends ArchParallelInstructation { String ss; public SimplePI( Processum parent, String s ) { super( parent ); this.ss = s; } @Override public void doExecute() throws Exception { for ( int i = 0; i < 1; i++ ) { Debug.trace( "Hello hi, fuck "+ this.ss +" !" ); } // if( this.ss.equals( "1" ) ) { // throw new Exception(); // } } } public class TestInstructation { public static void testMarshalling() throws Exception { Processum p = new ArchProcessum( "", null ) {}; GenericMarshalling marshalling = new GenericMarshalling(); marshalling.add( new SimpleInstruct( "0" ) ); //marshalling.add( new SimplePI( p,"1" ) ); marshalling.add( new SimpleS( "1" ) ); GenericMarshalling am = new GenericMarshalling(); am.add( new SimpleInstruct( "a0" ) ); am.add( new SimpleInstruct( "a1" ) ); marshalling.add( am ); marshalling.execute(); } public static void testPeriodicAutomaton() throws Exception { PeriodicAutomatron automatron = new PeriodicAutomaton( null, 500 ); automatron.command( new SimpleInstruct( "0" ) ); automatron.command( new SimpleInstruct( "1" ) ); automatron.command( new SimpleInstruct( "2" ) ); GenericMarshalling am = new GenericMarshalling( automatron ); am.add( new SimpleInstruct( "a0" ) ); am.add( new SimpleInstruct( "a1" ) ); //am.add( new SimplePI( automatron,"p1" ) ); automatron.command( am ); automatron.start(); //automatron.join(); Debug.sleep( 10 ); //automatron.command( new SimpleInstruct( "3" ) ); Thread push = new Thread( ()->{ for ( int i = 0; i < 100; i++ ) { Debug.sleep( 50 ); automatron.command( new SimpleInstruct( "push" + i ) ); } } ); push.start(); Thread push2 = new Thread( ()->{ for ( int i = 0; i < 100; i++ ) { Debug.sleep( 50 ); automatron.command( new SimpleInstruct( "2push" + i ) ); } } ); push2.start(); // Debug.sleep( 1500 ); // automatron.command( KernelInstructation.CONTINUE ); // Debug.sleep( 2000 ); // automatron.withdraw( KernelInstructation.CONTINUE ); // automatron.terminate(); push2.join(); push.join(); automatron.join(); } public static void testAutomaton() throws Exception { LifecycleAutomaton automatron = new Automaton( null ); automatron.setHeartbeatTimeoutMillis( 1000 ); automatron.start(); Thread push = new Thread( ()->{ int i = 0; for ( ; i < 10; i++ ) { Debug.sleep( 100 ); automatron.command( new SimpleInstruct( "push" + i ) ); } //automatron.command( KernelInstructation.DIE ); for ( ; i < 20; i++ ) { Debug.sleep( 100 ); automatron.command( new SimpleInstruct( "push" + i ) ); } } ); push.start(); Thread push2 = new Thread( ()->{ int i = 0; for ( ; i < 10; i++ ) { Debug.sleep( 100 ); automatron.command( new SimpleInstruct( "2push" + i ) ); } automatron.command( new SimplePI( automatron,"p1" ) ); } ); push2.start(); // Thread push3 = new Thread( ()->{ // while ( true ) { // Debug.sleep( 1500 ); // automatron.sendHeartbeat(); // } // //Debug.sleep( 1000 ); // //automatron.command( KernelInstructation.DIE ); // } ); // push3.start(); automatron.join(); } public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ //TestInstructation.testMarshalling(); TestInstructation.testPeriodicAutomaton(); // TestInstructation.testAutomaton(); return 0; }, (Object[]) args ); } } ================================================ FILE: Hydra/hydra-framework-runtime/src/test/java/com/servgram/TestServgram.java ================================================ package com.servgram; import com.pinecone.Pinecone; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JSON; import com.pinecone.framework.util.lang.*; import com.pinecone.framework.util.lang.iterator.JarFileIterator; import com.pinecone.framework.util.lang.iterator.NamespaceIterator; import com.pinecone.ulf.util.lang.PooledClassCandidateScanner; import java.util.ArrayList; import java.util.List; public class TestServgram { public static void testPackageCollector() throws Exception { Debug.trace( Thread.currentThread().getContextClassLoader().getResource("com/mysql/jdbc") ); //Debug.trace( Thread.currentThread().getContextClassLoader().getResource("com/pinecone/hydra") ); NamespaceCollector collector = new ClassNameFetcher(); //NamespaceCollector collector = new PackageNameFetcher(); //Debug.echo( JSON.stringify( collector.fetch( "com.mysql", true ), 2 ) ); Debug.echo( JSON.stringify( collector.fetch( "com.pinecone.hydra", true ), 2 ) ); Debug.echo( collector.fetchFirst( "com.pinecone.hydra" ) ); //TestServgram.class.getClassLoader().loadClass() //Debug.trace( Package.getPackage( "com.pinecone.hydra.servgram" ) ); } public static void testPackageScope() throws Exception { ScopedPackage scopedPackage = new LazyScopedPackage( "com.pinecone.hydra" ); Debug.trace( scopedPackage.children().get( 3 ).fetchChildrenClassNames() ); Debug.trace( scopedPackage.getPackage() ); Debug.trace( scopedPackage.tryLoad() ); Debug.trace( scopedPackage.getPackage(), scopedPackage.hasLoaded() ); Debug.trace( scopedPackage.fetchFirstClassName() ); //ClassPathScanningCandidateComponentProvider } public static void testIterator() throws Exception { //NamespaceIterator iterator = new DirectoryClassIterator( "/E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Pinecones/Hydra/target/classes/com/pinecone/hydra/umc/wolf", "com.pinecone.hydra.umc.wolf" ); //NamespaceIterator iterator = new DirectoryPackageIterator( "/E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Pinecones/Hydra/target/classes/com/pinecone/hydra/umc", "com.pinecone.hydra.umc" ); NamespaceIterator iterator = new JarFileIterator( "jar:file:/C:/Users/undefined/.m2/repository/mysql/mysql-connector-java/8.0.23/mysql-connector-java-8.0.23.jar!/com/mysql/jdbc", ".class" ); //NamespaceIterator iterator = new JarClassIterator( "jar:file:/E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Saurons/Shadow/target/shadow-1.2.7.jar!/BOOT-INF/lib/radium-2.1.0.jar!/com/sauron/radium/heistron" ); //NamespaceIterator iterator = new JarClassIterator( "jar:file:/E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Saurons/Shadow/target/shadow-c-1.2.7.jar!/BOOT-INF/lib/shadow-c-1.2.7.jar!/BOOT-INF/lib/radium-2.1.0.jar!/com/sauron/radium/heistron" ); //NamespaceIterator iterator = new JarClassIterator( "jar:file:/E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Saurons/Shadow/target/shadow-1.2.7.jar!/BOOT-INF/classes!/com/sauron/shadow/chronicle" ); //NamespaceIterator iterator = new JarPackageIterator( "jar:file:/E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Saurons/Shadow/target/shadow-1.2.7.jar!/BOOT-INF/classes!/com/sauron/shadow/chronicle" ); //NamespaceIterator iterator = new JarPackageIterator( "jar:file:/C:/Users/undefined/.m2/repository/mysql/mysql-connector-java/8.0.23/mysql-connector-java-8.0.23.jar!/com/mysql" ); while ( iterator.hasNext() ) { Debug.trace( iterator.next() ); } // JarEntryIterator iterator2 = new JarEntryIterator( "jar:file:/E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Saurons/Shadow/target/shadow-1.2.7.jar!/BOOT-INF/classes!/com/sauron/shadow/chronicle" ); // while ( iterator2.hasNext() ) { // Debug.trace( iterator2.next().getRealName() ); // } // PathNamespaceCollectum collectum = new JarClassCollectorAdapter(); PathNamespaceCollectum collectum = new JarPackageCollectorAdapter(); List classNames = new ArrayList<>(); collectum.collect( //"jar:file:/E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Saurons/Shadow/target/shadow-c-1.2.7.jar!/BOOT-INF/lib/shadow-c-1.2.7.jar!/BOOT-INF/lib/radium-2.1.0.jar!/com/sauron/radium/heistron", "jar:file:/E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Saurons/Shadow/target/shadow-1.2.7.jar!/BOOT-INF/classes!/com/sauron/shadow", "", classNames, true ); Debug.fmt( 2, classNames ); } public static void testScanner() throws Exception { ClassScanner scanner = new PooledClassCandidateScanner( null, Thread.currentThread().getContextClassLoader() ); PooledClassCandidateScanner scanner1 = (PooledClassCandidateScanner) scanner; List list = new ArrayList<>(); scanner1.scan( "com.pinecone.hydra.umc", true, list ); //scanner1.scan( "com.mysql.jdbc", true, list ); //scanner1.scan( "com.mysql.jdbc", false, list ); Debug.echo( JSON.stringify( list, 2 ) ); } public static void main( String[] args ) throws Exception { //String szJson = FileUtils.readAll("J:/120KWordsPhonetics.json5"); Pinecone.init( (Object...cfg )->{ //TestServgram.testPackageCollector(); //TestServgram.testPackageScope(); TestServgram.testIterator(); //TestServgram.testScanner(); return 0; }, (Object[]) args ); } } ================================================ FILE: Hydra/hydra-framework-service/pom.xml ================================================ hydra com.pinecone.hydra 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.pinecone.hydra.kernel hydra-framework-service 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile com.pinecone.hydra.kernel hydra-architecture 2.1.0 compile com.pinecone.hydra.kernel hydra-message-control 2.1.0 compile com.pinecone.ulf ulfhedinn 1.2.1 compile ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/entity/GenericNamespaceNode.java ================================================ package com.pinecone.hydra.scenario.entity; import com.pinecone.framework.util.id.GUID; public class GenericNamespaceNode implements NamespaceNode{ private long enumId; private GUID guid; private String name; private GenericNamespaceNodeMeta namespaceNodeMeta; private GenericScenarioCommonData scenarioCommonData; public GenericNamespaceNode() { } public GenericNamespaceNode(long enumId, GUID guid, String name, GenericNamespaceNodeMeta namespaceNodeMeta, GenericScenarioCommonData scenarioCommonData) { this.enumId = enumId; this.guid = guid; this.name = name; this.namespaceNodeMeta = namespaceNodeMeta; this.scenarioCommonData = scenarioCommonData; } public long getEnumId() { return enumId; } public void setEnumId(long enumId) { this.enumId = enumId; } public GUID getGuid() { return guid; } public void setGuid(GUID guid) { this.guid = guid; } public String getName() { return name; } public void setName(String name) { this.name = name; } public GenericNamespaceNodeMeta getNamespaceNodeMeta() { return namespaceNodeMeta; } public void setNamespaceNodeMeta(GenericNamespaceNodeMeta namespaceNodeMeta) { this.namespaceNodeMeta = namespaceNodeMeta; } /** * 获取 * @return scenarioCommonData */ public GenericScenarioCommonData getScenarioCommonData() { return scenarioCommonData; } /** * 设置 * @param scenarioCommonData */ public void setScenarioCommonData(GenericScenarioCommonData scenarioCommonData) { this.scenarioCommonData = scenarioCommonData; } public String toString() { return "GenericNamespaceNode{enumId = " + enumId + ", guid = " + guid + ", name = " + name + ", namespaceNodeMeta = " + namespaceNodeMeta + ", scenarioCommonData = " + scenarioCommonData + "}"; } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/entity/GenericNamespaceNodeMeta.java ================================================ package com.pinecone.hydra.scenario.entity; import com.pinecone.framework.util.id.GUID; public class GenericNamespaceNodeMeta implements NamespaceNodeMeta{ private long enumId; private GUID guid; public GenericNamespaceNodeMeta() { } public GenericNamespaceNodeMeta(long enumId, GUID guid) { this.enumId = enumId; this.guid = guid; } public long getEnumId() { return enumId; } public void setEnumId(long enumId) { this.enumId = enumId; } public GUID getGuid() { return guid; } public void setGuid(GUID guid) { this.guid = guid; } public String toString() { return "GenericNamespaceNodeMeta{enumId = " + enumId + ", guid = " + guid + "}"; } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/entity/GenericScenarioCommonData.java ================================================ package com.pinecone.hydra.scenario.entity; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public class GenericScenarioCommonData implements ScenarioCommonData{ private long enumId; private GUID guid; private LocalDateTime createTime; private LocalDateTime updateTime; public GenericScenarioCommonData() { } public GenericScenarioCommonData(long enumId, GUID guid, LocalDateTime createTime, LocalDateTime updateTime) { this.enumId = enumId; this.guid = guid; this.createTime = createTime; this.updateTime = updateTime; } public long getEnumId() { return enumId; } public void setEnumId(long enumId) { this.enumId = enumId; } public GUID getGuid() { return guid; } public void setGuid(GUID guid) { this.guid = guid; } public LocalDateTime getCreateTime() { return createTime; } public void setCreateTime(LocalDateTime createTime) { this.createTime = createTime; } public LocalDateTime getUpdateTime() { return updateTime; } public void setUpdateTime(LocalDateTime updateTime) { this.updateTime = updateTime; } public String toString() { return "GenericScenarioCommonData{enumId = " + enumId + ", guid = " + guid + ", createTime = " + createTime + ", updateTime = " + updateTime + "}"; } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/entity/NamespaceNode.java ================================================ package com.pinecone.hydra.scenario.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public interface NamespaceNode extends TreeNode { long getEnumId(); void setEnumId(long id); GUID getGuid(); void setGuid(GUID guid); @Override default NamespaceNode evinceTreeNode() { return this; } String getName(); void setName(String name); GenericNamespaceNodeMeta getNamespaceNodeMeta(); void setNamespaceNodeMeta(GenericNamespaceNodeMeta namespaceNodeMeta); GenericScenarioCommonData getScenarioCommonData(); void setScenarioCommonData(GenericScenarioCommonData scenarioCommonData); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/entity/NamespaceNodeMeta.java ================================================ package com.pinecone.hydra.scenario.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface NamespaceNodeMeta extends Pinenut { long getEnumId(); void setEnumId(long enumId); GUID getGuid(); void setGuid(GUID guid); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/entity/ScenarioCommonData.java ================================================ package com.pinecone.hydra.scenario.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public interface ScenarioCommonData extends Pinenut { long getEnumId(); void setEnumId(long id); GUID getGuid(); void setGuid(GUID guid); LocalDateTime getCreateTime(); void setCreateTime(LocalDateTime localDateTime); LocalDateTime getUpdateTime(); void setUpdateTime(LocalDateTime updateTime); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/source/NamespaceNodeManipulator.java ================================================ package com.pinecone.hydra.scenario.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.scenario.entity.NamespaceNode; import java.util.List; public interface NamespaceNodeManipulator extends Pinenut { void insert(NamespaceNode namespaceNode); void remove(GUID guid); NamespaceNode getNamespaceNode(GUID guid); void update(NamespaceNode namespaceNode); List getGuidsByName(String name); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/source/NamespaceNodeMetaManipulator.java ================================================ package com.pinecone.hydra.scenario.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.scenario.entity.NamespaceNodeMeta; public interface NamespaceNodeMetaManipulator extends Pinenut { void insert(NamespaceNodeMeta namespaceNodeMeta); void remove(GUID guid); NamespaceNodeMeta getNamespaceNodeMeta(GUID guid); void update(NamespaceNodeMeta namespaceNodeMeta); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/source/ScenarioCommonDataManipulator.java ================================================ package com.pinecone.hydra.scenario.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.scenario.entity.ScenarioCommonData; public interface ScenarioCommonDataManipulator extends Pinenut { void insert(ScenarioCommonData scenarioCommonData); void remove(GUID guid); ScenarioCommonData getScenarioCommonData(GUID guid); void update(ScenarioCommonData scenarioCommonData); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/source/ScenarioMasterManipulator.java ================================================ package com.pinecone.hydra.scenario.source; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; public interface ScenarioMasterManipulator extends KOIMasterManipulator { NamespaceNodeManipulator getNamespaceNodeManipulator(); NamespaceNodeMetaManipulator getNSNodeMetaManipulator(); ScenarioCommonDataManipulator getScenarioCommonDataManipulator(); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/tree/DistributedScenarioMetaTree.java ================================================ package com.pinecone.hydra.scenario.tree; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public interface DistributedScenarioMetaTree extends Pinenut { String getPath(GUID guid); GUID insert(TreeNode treeNode); TreeNode get(GUID guid); TreeNode parsePath(String path); void remove(GUID guid); TreeNode getAsRootDepth(GUID guid); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/tree/GenericDistributedScenarioMetaTree.java ================================================ package com.pinecone.hydra.scenario.tree; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.scenario.entity.GenericNamespaceNode; import com.pinecone.hydra.scenario.entity.GenericNamespaceNodeMeta; import com.pinecone.hydra.scenario.entity.GenericScenarioCommonData; import com.pinecone.hydra.scenario.entity.NamespaceNode; import com.pinecone.hydra.scenario.entity.NamespaceNodeMeta; import com.pinecone.hydra.scenario.entity.ScenarioCommonData; import com.pinecone.hydra.scenario.source.ScenarioMasterManipulator; import com.pinecone.hydra.scenario.source.NamespaceNodeManipulator; import com.pinecone.hydra.scenario.source.NamespaceNodeMetaManipulator; import com.pinecone.hydra.scenario.source.ScenarioCommonDataManipulator; import com.pinecone.hydra.system.ko.UOIUtils; import com.pinecone.hydra.unit.imperium.ImperialTree; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.RegimentedImperialTree; import com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator; import com.pinecone.ulf.util.guid.GUIDs; import com.pinecone.framework.util.id.GuidAllocator; import java.time.LocalDateTime; import java.util.List; public class GenericDistributedScenarioMetaTree implements DistributedScenarioMetaTree{ protected Hydrogen hydrogen; private ImperialTree distributedScenarioTree; private ScenarioMasterManipulator scenarioMasterManipulator; private NamespaceNodeMetaManipulator namespaceNodeMetaManipulator; private NamespaceNodeManipulator namespaceNodeManipulator; private ScenarioCommonDataManipulator scenarioCommonDataManipulator; public GenericDistributedScenarioMetaTree(Hydrogen hydrogen, KOIMasterManipulator masterManipulator){ this.hydrogen = hydrogen; this.scenarioMasterManipulator = (ScenarioMasterManipulator) masterManipulator; KOISkeletonMasterManipulator skeletonMasterManipulator = this.scenarioMasterManipulator.getSkeletonMasterManipulator(); TreeMasterManipulator treeMasterManipulator = (TreeMasterManipulator) skeletonMasterManipulator; this.distributedScenarioTree = new RegimentedImperialTree(treeMasterManipulator); this.namespaceNodeManipulator = this.scenarioMasterManipulator.getNamespaceNodeManipulator(); this.namespaceNodeMetaManipulator = this.scenarioMasterManipulator.getNSNodeMetaManipulator(); this.scenarioCommonDataManipulator = this.scenarioMasterManipulator.getScenarioCommonDataManipulator(); } public GenericDistributedScenarioMetaTree( Hydrogen hydrogen) { this.hydrogen = hydrogen; } public GenericDistributedScenarioMetaTree( KOIMappingDriver driver ) { this( driver.getSystem(), driver.getMasterManipulator() ); } @Override public String getPath(GUID guid) { String path = this.distributedScenarioTree.getCachePath(guid); if (path!=null) return path; ImperialTreeNode node = this.distributedScenarioTree.getNode(guid); Debug.trace(node.toString()); String assemblePath = this.getNodeName(node); while (!node.getParentGUIDs().isEmpty()){ List parentGuids = node.getParentGUIDs(); node = this.distributedScenarioTree.getNode(parentGuids.get(0)); String nodeName = this.getNodeName(node); assemblePath = nodeName + "." + assemblePath; } this.distributedScenarioTree.insertCachePath(guid,assemblePath); return assemblePath; } @Override public GUID insert( TreeNode treeNode ) { GenericNamespaceNode namespaceNode = (GenericNamespaceNode) treeNode; GuidAllocator guidAllocator = GUIDs.newGuidAllocator(); NamespaceNodeMeta namespaceNodeMeta = namespaceNode.getNamespaceNodeMeta(); GUID namespaceNodeMetaGuid = guidAllocator.nextGUID(); namespaceNodeMeta.setGuid(namespaceNodeMetaGuid); ScenarioCommonData scenarioCommonData = namespaceNode.getScenarioCommonData(); GUID scenarioCommonDataGuid = guidAllocator.nextGUID(); scenarioCommonData.setGuid(scenarioCommonDataGuid); scenarioCommonData.setCreateTime(LocalDateTime.now()); scenarioCommonData.setUpdateTime(LocalDateTime.now()); GUIDImperialTrieNode guidDistributedTrieNode = new GUIDImperialTrieNode(); GUID nodeGuid = guidAllocator.nextGUID(); namespaceNode.setGuid(nodeGuid); guidDistributedTrieNode.setGuid(nodeGuid); guidDistributedTrieNode.setNodeMetadataGUID(namespaceNodeMetaGuid); guidDistributedTrieNode.setBaseDataGUID(scenarioCommonDataGuid); guidDistributedTrieNode.setType(UOIUtils.createLocalJavaClass(namespaceNode.getClass().getName())); this.distributedScenarioTree.insert(guidDistributedTrieNode); this.namespaceNodeMetaManipulator.insert(namespaceNodeMeta); this.scenarioCommonDataManipulator.insert(scenarioCommonData); this.namespaceNodeManipulator.insert(namespaceNode); return null; } @Override public TreeNode get(GUID guid) { GUIDImperialTrieNode node = this.distributedScenarioTree.getNode(guid); NamespaceNode namespaceNode = this.namespaceNodeManipulator.getNamespaceNode(guid); GenericScenarioCommonData scenarioCommonData = (GenericScenarioCommonData) this.scenarioCommonDataManipulator.getScenarioCommonData(node.getAttributesGUID()); GenericNamespaceNodeMeta namespaceNodeMeta = (GenericNamespaceNodeMeta) this.namespaceNodeMetaManipulator.getNamespaceNodeMeta(node.getNodeMetadataGUID()); namespaceNode.setNamespaceNodeMeta(namespaceNodeMeta); namespaceNode.setScenarioCommonData(scenarioCommonData); return namespaceNode; } @Override public TreeNode parsePath(String path) { GUID guid = this.distributedScenarioTree.queryGUIDByPath( path ); if (guid != null){ return this.get(guid); } else{ String[] parts = this.processPath(path).split("\\."); List nodeByName = this.namespaceNodeManipulator.getGuidsByName(parts[parts.length - 1]); for(GUID nodeGuid :nodeByName){ if (this.getPath(nodeGuid).equals(path)){ return this.get(nodeGuid); } } } return null; } @Override public void remove(GUID guid) { List childNodes = this.distributedScenarioTree.getChildren(guid); if (childNodes == null || childNodes.isEmpty()){ this.removeNode(guid); } else { for(GUIDImperialTrieNode childNode : childNodes){ List parentNodes = this.distributedScenarioTree.fetchParentGuids(childNode.getGuid()); if (parentNodes.size() > 1){ this.distributedScenarioTree.removeInheritance(childNode.getGuid(),guid); }else { this.remove(childNode.getGuid()); } } this.removeNode(guid); } } @Override public TreeNode getAsRootDepth(GUID guid) { return null; } private String getNodeName(ImperialTreeNode node){ return this.namespaceNodeManipulator.getNamespaceNode(node.getGuid()).getName(); } private String processPath(String path) { // 使用正则表达式移除所有的括号及其内容 return path.replaceAll("\\(.*?\\)", ""); } private void removeNode(GUID guid){ GUIDImperialTrieNode node = this.distributedScenarioTree.getNode(guid); this.distributedScenarioTree.purge( guid ); this.namespaceNodeManipulator.remove(guid); this.namespaceNodeMetaManipulator.remove(node.getNodeMetadataGUID()); this.scenarioCommonDataManipulator.remove(node.getAttributesGUID()); this.distributedScenarioTree.removeCachePath(guid); } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/Application.java ================================================ package com.pinecone.hydra.service; import com.pinecone.framework.util.id.GUID; public interface Application extends ServiceFamilyMeta { long getEnumId(); GUID getGuid(); String getName(); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/ArchService.java ================================================ package com.pinecone.hydra.service; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.Identification; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.service.kom.entity.ServiceElement; public abstract class ArchService implements Service { protected Identification mServiceId; protected ServiceElement mServiceMetaData; protected Map mMetaDataScope; public ArchService( Identification serviceId, ServiceElement serviceElement, Map metaDataScope ){ this.mServiceId = serviceId; this.mServiceMetaData = serviceElement; this.mMetaDataScope = metaDataScope; } public ArchService( Identification serviceId, ServiceElement serviceElement ){ this( serviceId, serviceElement, null ); } @Override public String getName() { return this.mServiceMetaData.getName(); } @Override public String getType() { return this.mServiceMetaData.getType(); } @Override public String getDisplayName() { return this.mServiceMetaData.getName(); } @Override public String getFullName() { return this.mServiceMetaData.getPath(); } public GUID getGuid() { return this.mServiceMetaData.getGuid(); } @Override public Identification getId() { return this.getGuid(); } @Override public String getScenario() { return this.mServiceMetaData.getScenario(); } @Override public String getPrimaryImplLang() { return this.mServiceMetaData.getPrimaryImplLang(); } @Override public String getExtraInformation() { return this.mServiceMetaData.getExtraInformation(); } @Override public String getLevel() { return this.mServiceMetaData.getLevel(); } @Override public String getDescription() { return this.mServiceMetaData.getDescription(); } @Override public Map getMetaDataScope() { return this.mMetaDataScope; } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/ArchServiceFamilyMeta.java ================================================ package com.pinecone.hydra.service; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.Identification; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.ulf.util.guid.GUIDs; public abstract class ArchServiceFamilyMeta implements ServiceFamilyMeta { protected GUID guid; protected String name; protected String scenario; protected String primaryImplLang; protected String extraInformation; protected String szElementaryConfig; protected Map elementaryConfig; protected String level; protected String description; public ArchServiceFamilyMeta() { } public ArchServiceFamilyMeta( Map joEntity ) { this.apply( joEntity ); } protected ArchServiceFamilyMeta apply( Map joEntity ) { String szGuid = (String) joEntity.get( "guid" ); if( szGuid != null ) { this.guid = GUIDs.GUID128( (String) joEntity.get( "guid" ) ); } BeanMapDecoder.BasicDecoder.decode( this, joEntity ); return this; } public GUID getGuid() { return this.guid; } @Override public Identification getId() { return this.getGuid(); } @Override public String getName() { return this.name; } @Override public String getScenario() { return this.scenario; } @Override public String getPrimaryImplLang() { return this.primaryImplLang; } @Override public String getExtraInformation() { return this.extraInformation; } @Override public String getLevel() { return this.level; } @Override public String getDescription() { return this.description; } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/ProcApplication.java ================================================ package com.pinecone.hydra.service; public interface ProcApplication extends Application { } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/ScheduleType.java ================================================ package com.pinecone.hydra.service; public enum ScheduleType { Automatic ( "Automatic" ), Manual ( "Manual" ), //DryRun ( "DryRun" ), Disable ( "Disable" ); private final String value; ScheduleType( String value ){ this.value = value; } public String getName(){ return this.value; } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/Service.java ================================================ package com.pinecone.hydra.service; import com.pinecone.framework.util.name.Namespace; import java.util.Map; public interface Service extends ServiceFamilyMeta { String getName(); // Service Name, e.g. WpnService String getType(); String getDisplayName(); // Service Long Name, e.g. Windows Push Notification System Service String getDescription(); String getFullName(); Namespace getGroupNamespace(); String getGroupName(); String getScenario() ; String getPrimaryImplLang() ; String getExtraInformation() ; String getLevel() ; Object getProcessImageObject(); Map getMetaDataScope(); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/ServiceApplication.java ================================================ package com.pinecone.hydra.service; public interface ServiceApplication extends Application { } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/ServiceFamilyMeta.java ================================================ package com.pinecone.hydra.service; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.Identification; public interface ServiceFamilyMeta extends Pinenut { //long getEnumId(); //GUID getGuid(); Identification getId() ; String getName(); String getScenario(); String getPrimaryImplLang(); String getExtraInformation(); String getLevel(); String getDescription(); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/ServiceInstance.java ================================================ package com.pinecone.hydra.service; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.Identification; import com.pinecone.hydra.service.entity.USII; import java.time.LocalDateTime; public interface ServiceInstance extends Pinenut { Identification getId(); Identification getServiceId(); USII getUSII(); Object getProcessObject(); Service getService(); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/ServiceMeta.java ================================================ package com.pinecone.hydra.service; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.name.Namespace; public interface ServiceMeta extends Pinenut { GUID getGuid() ; String getName(); // Service Name, e.g. WpnService String getDisplayName(); // Service Long Name, e.g. Windows Push Notification System Service String getDescription(); String getFullName(); Namespace getGroupNamespace(); String getGroupName(); String getScenario() ; String getPrimaryImplLang() ; String getExtraInformation() ; String getLevel() ; } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/Serviciom.java ================================================ package com.pinecone.hydra.service; public interface Serviciom extends Service { } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/Servicium.java ================================================ package com.pinecone.hydra.service; import com.pinecone.framework.system.executum.Processum; public interface Servicium extends ServiceInstance { @Override Processum getProcessObject(); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/Status.java ================================================ package com.pinecone.hydra.service; public enum Status { New ( "New" ), WaitingDependency ( "WaitingDependency" ), Running ( "Running" ), Paused ( "Paused" ), Stopped ( "Stopped" ); private final String value; Status( String value ){ this.value = value; } public String getName(){ return this.value; } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/entity/BindUSII.java ================================================ package com.pinecone.hydra.service.entity; import com.pinecone.framework.util.id.Identification; public class BindUSII implements USII { protected Long clientId; protected Identification serviceId; protected Identification instanceId; public BindUSII( Long clientId, Identification serviceId, Identification instanceId ) { this.clientId = clientId; this.serviceId = serviceId; this.instanceId = instanceId; } public BindUSII(){} @Override public Long getClientId() { return this.clientId; } @Override public Identification getServiceId() { return this.serviceId; } @Override public Identification getInstanceId() { return this.instanceId; } @Override public String getFullKey() { return this.serviceId + ":" + this.instanceId + ":" + this.clientId; } @Override public boolean equals( Object obj ) { if ( this == obj ) { return true; } if ( obj instanceof USII) { USII USII = (USII) obj; return this.clientId.equals( USII.getClientId() ) && this.serviceId.equals( USII.getServiceId() ); } else if ( obj instanceof Number ) { return this.clientId.equals( obj ); } else if ( obj instanceof Identification ) { return this.serviceId.equals( obj ); } return false; } @Override public int hashCode() { return this.clientId.hashCode() ^ this.serviceId.hashCode(); } public static USII wrap(Long clientId, Identification serviceId, Identification instanceId ) { return new BindUSII( clientId, serviceId, instanceId ); } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/entity/USII.java ================================================ package com.pinecone.hydra.service.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.Identification; /** * Uniform Service Instance Identifier */ public interface USII extends Pinenut { Long getClientId(); Identification getServiceId(); Identification getInstanceId(); String getFullKey(); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/GenericNamespaceRules.java ================================================ package com.pinecone.hydra.service.kom; import com.pinecone.framework.util.id.GUID; public class GenericNamespaceRules implements NamespaceRules { // 规则id private long enumId; // 规则uuid private GUID guid; // 作用域 private String scope; // 名称 private String name; // 规则描述 private String description; public GenericNamespaceRules() { } public GenericNamespaceRules(long enumId, GUID guid, String scope, String name, String description) { this.enumId = enumId; this.guid = guid; this.scope = scope; this.name = name; this.description = description; } /** * 获取 * @return enumId */ public long getEnumId() { return enumId; } /** * 设置 * @param enumId */ public void setEnumId(long enumId) { this.enumId = enumId; } /** * 获取 * @return guid */ public GUID getGuid() { return guid; } /** * 设置 * @param guid */ public void setGuid(GUID guid) { this.guid = guid; } /** * 获取 * @return scope */ public String getScope() { return scope; } /** * 设置 * @param scope */ public void setScope(String scope) { this.scope = scope; } /** * 获取 * @return name */ public String getName() { return name; } /** * 设置 * @param name */ public void setName(String name) { this.name = name; } /** * 获取 * @return description */ public String getDescription() { return description; } /** * 设置 * @param description */ public void setDescription(String description) { this.description = description; } public String toString() { return "GenericClassificationRules{enumId = " + enumId + ", guid = " + guid + ", scope = " + scope + ", name = " + name + ", description = " + description + "}"; } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/KernelServiceConfig.java ================================================ package com.pinecone.hydra.service.kom; import com.pinecone.hydra.system.ko.ArchKernelObjectConfig; public class KernelServiceConfig extends ArchKernelObjectConfig implements ServiceConfig { } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/NamespaceRules.java ================================================ package com.pinecone.hydra.service.kom; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface NamespaceRules extends Pinenut { long getEnumId(); void setEnumId(long id); GUID getGuid(); void setGuid(GUID guid); String getScope(); void setScope(String scope); String getName(); void setName(String name); String getDescription(); void setDescription(String description); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/ServiceConfig.java ================================================ package com.pinecone.hydra.service.kom; import com.pinecone.hydra.system.ko.KernelObjectConfig; public interface ServiceConfig extends KernelObjectConfig { } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/ServiceFamilyNode.java ================================================ package com.pinecone.hydra.service.kom; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.Identification; import com.pinecone.hydra.service.ServiceFamilyMeta; public interface ServiceFamilyNode extends ServiceFamilyMeta { long getEnumId(); void setEnumId( long id ); void setName( String name ); GUID getGuid(); void setGuid( GUID guid ); @Override default Identification getId() { return this.getGuid(); } String getScenario(); void setScenario( String scenario ); String getPrimaryImplLang(); void setPrimaryImplLang( String primaryImplLang ); String getExtraInformation(); void setExtraInformation( String extraInformation ); String getLevel(); void setLevel( String level ); String getDescription(); void setDescription( String description ); ServiceFamilyNode apply( Map joEntity ) ; } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/ServiceInstrument.java ================================================ package com.pinecone.hydra.service.kom; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.entity.ApplicationElement; import com.pinecone.hydra.service.kom.entity.ElementNode; import com.pinecone.hydra.service.kom.entity.Namespace; import com.pinecone.hydra.service.kom.entity.ServiceElement; import com.pinecone.hydra.service.kom.entity.ServiceInstanceEntry; import com.pinecone.hydra.system.ko.kom.ReparseKOMTree; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import java.util.List; public interface ServiceInstrument extends ReparseKOMTree { ServiceConfig KernelServiceConfig = new KernelServiceConfig(); ApplicationElement affirmApplication ( String path ); Namespace affirmNamespace ( String path ); ServiceElement affirmService ( String path ); ElementNode queryElement ( String path ); boolean containsChild ( GUID parentGuid, String childName ); void update ( TreeNode treeNode ); List fetchAllService(); void createServiceInstance( ServiceInstanceEntry serviceInstanceEntry); ServiceInstanceEntry queryServiceInstance(GUID serviceId ); void updateServiceInstance( ServiceInstanceEntry element ); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/ServicePathSelector.java ================================================ package com.pinecone.hydra.service.kom; import com.pinecone.framework.util.name.path.PathResolver; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.system.ko.kom.StandardPathSelector; import com.pinecone.hydra.unit.imperium.ImperialTree; public class ServicePathSelector extends StandardPathSelector { public ServicePathSelector(PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator dirMan, GUIDNameManipulator[] fileMans ) { super( pathResolver, trieTree, dirMan, fileMans ); } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/UniformServiceInstrument.java ================================================ package com.pinecone.hydra.service.kom; import java.util.ArrayList; import java.util.List; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.service.kom.entity.ApplicationElement; import com.pinecone.hydra.service.kom.entity.ElementNode; import com.pinecone.hydra.service.kom.entity.GenericApplicationElement; import com.pinecone.hydra.service.kom.entity.GenericNamespace; import com.pinecone.hydra.service.kom.entity.GenericServiceElement; import com.pinecone.hydra.service.kom.entity.Namespace; import com.pinecone.hydra.service.kom.entity.ServiceElement; import com.pinecone.hydra.service.kom.entity.ServiceInstanceEntry; import com.pinecone.hydra.service.kom.entity.ServiceTreeNode; import com.pinecone.hydra.service.kom.entity.ServoElement; import com.pinecone.hydra.service.kom.operator.GenericElementOperatorFactory; import com.pinecone.hydra.service.kom.source.ApplicationNodeManipulator; import com.pinecone.hydra.service.kom.source.ServiceInstanceManipulator; import com.pinecone.hydra.service.kom.source.ServiceMasterManipulator; import com.pinecone.hydra.service.kom.source.ServiceNamespaceManipulator; import com.pinecone.hydra.service.kom.source.ServiceNodeManipulator; import com.pinecone.hydra.system.identifier.KOPathResolver; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; import com.pinecone.hydra.system.ko.kom.ArchReparseKOMTree; import com.pinecone.hydra.system.ko.kom.GenericReparseKOMTreeAddition; import com.pinecone.hydra.system.ko.kom.MultiFolderPathSelector; import com.pinecone.hydra.unit.imperium.ImperialTree; import com.pinecone.hydra.unit.imperium.RegimentedImperialTree; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; import com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator; import com.pinecone.ulf.util.guid.GUIDs; import com.pinecone.ulf.util.guid.i128.GuidAllocator128V7; public class UniformServiceInstrument extends ArchReparseKOMTree implements ServiceInstrument { //GenericDistributedScopeTree protected ImperialTree imperialTree; protected ServiceMasterManipulator serviceMasterManipulator; protected ServiceNamespaceManipulator serviceNamespaceManipulator; protected ApplicationNodeManipulator applicationNodeManipulator; protected ServiceNodeManipulator serviceNodeManipulator; protected ServiceInstanceManipulator serviceInstanceManipulator; protected List folderManipulators; protected List fileManipulators; public UniformServiceInstrument( Processum superiorProcess, KOIMasterManipulator masterManipulator, ServiceInstrument parent, String name, @Nullable GuidAllocator guidAllocator ) { super( superiorProcess, masterManipulator, ServiceInstrument.KernelServiceConfig, parent, name, guidAllocator ); this.serviceMasterManipulator = (ServiceMasterManipulator) masterManipulator; this.serviceNamespaceManipulator = this.serviceMasterManipulator.getNamespaceManipulator(); this.applicationNodeManipulator = this.serviceMasterManipulator.getApplicationNodeManipulator(); this.serviceNodeManipulator = this.serviceMasterManipulator.getServiceNodeManipulator(); this.serviceInstanceManipulator = this.serviceMasterManipulator.getServiceInstanceManipulator(); KOISkeletonMasterManipulator skeletonMasterManipulator = this.serviceMasterManipulator.getSkeletonMasterManipulator(); TreeMasterManipulator treeMasterManipulator = (TreeMasterManipulator) skeletonMasterManipulator; this.imperialTree = new RegimentedImperialTree(treeMasterManipulator); this.operatorFactory = new GenericElementOperatorFactory(this,(ServiceMasterManipulator) masterManipulator); this.pathResolver = new KOPathResolver( this.kernelObjectConfig ); // TODO for customize service tree architecture. this.folderManipulators = new ArrayList<>( List.of( this.serviceNamespaceManipulator, this.applicationNodeManipulator ) ); this.fileManipulators = new ArrayList<>( List.of( this.applicationNodeManipulator, this.serviceNodeManipulator ) ); this.pathSelector = new MultiFolderPathSelector( this.pathResolver, this.imperialTree, this.folderManipulators.toArray( new GUIDNameManipulator[]{} ), this.fileManipulators.toArray( new GUIDNameManipulator[]{} ) ); this.mReparseKOM = new GenericReparseKOMTreeAddition( this ); } public UniformServiceInstrument(Processum superiorProcess, KOIMasterManipulator masterManipulator ){ this( superiorProcess, masterManipulator, null, ServiceInstrument.class.getSimpleName(), new GuidAllocator128V7()); } // public UniformServiceInstrument( Hydrogen hydrogen ) { // this.hydrogen = hydrogen; // } public UniformServiceInstrument(KOIMappingDriver driver ) { this( driver.getSuperiorProcess(), driver.getMasterManipulator() ); } public UniformServiceInstrument(KOIMappingDriver driver, ServiceInstrument parent, String name ) { this( driver.getSuperiorProcess(), driver.getMasterManipulator(), parent, name, null ); } protected ServiceTreeNode affirmTreeNodeByPath( String path, Class cnSup, Class nsSup ) { String[] parts = this.pathResolver.segmentPathParts( path ); String currentPath = ""; GUID parentGuid = GUIDs.Dummy128(); ServiceTreeNode node = this.queryElement(path); if ( node != null ){ return node; } ServiceTreeNode ret = null; for( int i = 0; i < parts.length; ++i ){ currentPath = currentPath + ( i > 0 ? this.getConfig().getPathNameSeparator() : "" ) + parts[ i ]; node = this.queryElement( currentPath ); if ( node == null){ if ( i == parts.length - 1 && cnSup != null ){ ServoElement servoElement = (ServoElement) this.dynamicFactory.optNewInstance( cnSup, new Object[]{ this } ); servoElement.setName( parts[i] ); GUID guid = this.put( servoElement ); this.affirmOwnedNode( parentGuid, guid ); return servoElement; } else { Namespace namespace = (Namespace) this.dynamicFactory.optNewInstance( nsSup, new Object[]{ this } ); namespace.setName( parts[i] ); GUID guid = this.put( namespace ); if ( i != 0 ){ this.affirmOwnedNode( parentGuid, guid ); parentGuid = guid; } else { parentGuid = guid; } ret = namespace; } } else { parentGuid = node.getGuid(); } } return ret; } @Override public ApplicationElement affirmApplication( String path ) { return (ApplicationElement) this.affirmTreeNodeByPath( path, GenericApplicationElement.class, GenericNamespace.class ); } @Override public ServiceElement affirmService( String path ) { return (ServiceElement) this.affirmTreeNodeByPath( path, GenericServiceElement.class, GenericNamespace.class ); } @Override public ElementNode queryElement( String path ) { GUID guid = this.queryGUIDByPath( path ); if( guid != null ) { return this.get( guid ).evinceElementNode(); } return null; } @Override public Namespace affirmNamespace( String path ) { return ( Namespace ) this.affirmTreeNodeByPath( path, null, GenericNamespace.class ); } protected boolean containsChild( GUIDNameManipulator manipulator, GUID parentGuid, String childName ) { List guids = manipulator.getGuidsByName( childName ); for( GUID guid : guids ) { List ps = this.imperialTree.fetchParentGuids( guid ); if( ps.contains( parentGuid ) ){ return true; } } return false; } @Override public boolean containsChild( GUID parentGuid, String childName ) { for( GUIDNameManipulator manipulator : this.fileManipulators ) { if( this.containsChild( manipulator, parentGuid, childName ) ) { return true; } } for( GUIDNameManipulator manipulator : this.folderManipulators ) { if( this.containsChild( manipulator, parentGuid, childName ) ) { return true; } } return false; } /** * Affirm path exist in cache, if required. * 确保路径存在于缓存,如果有明确实现必要的话。 * 对于GenericDistributedScopeTree::getPath, 默认会自动写入缓存,因此这里可以通过getPath保证路径缓存一定存在。 * @param guid, target guid. * @return Path */ protected void affirmPathExist( GUID guid ) { this.imperialTree.getCachePath( guid ); } @Override public ServiceTreeNode get( GUID guid ){ return (ServiceTreeNode) super.get( guid ); } @Override public void update( TreeNode treeNode ) { TreeNodeOperator operator = this.operatorFactory.getOperator( treeNode.getMetaType() ); operator.update( treeNode ); } @Override public void remove( GUID guid ) { super.remove( guid ); } @Override public Object queryEntityHandleByNS(String path, String szBadSep, String szTargetSep) { return null; } @Override public List fetchAllService() { return this.serviceNodeManipulator.fetchAllService(); } @Override public void createServiceInstance(ServiceInstanceEntry serviceInstanceEntry) { this.serviceInstanceManipulator.initServiceInstance(serviceInstanceEntry); } @Override public ServiceInstanceEntry queryServiceInstance(GUID serviceId) { return this.serviceInstanceManipulator.queryServiceInstance( serviceId ); } @Override public void updateServiceInstance(ServiceInstanceEntry element) { this.serviceInstanceManipulator.updateServiceInstance( element ); } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/ApplicationElement.java ================================================ package com.pinecone.hydra.service.kom.entity; import com.pinecone.hydra.service.kom.ServiceFamilyNode; public interface ApplicationElement extends FolderElement, ServiceFamilyNode { String getDeploymentMethod(); void setDeploymentMethod( String deploymentMethod ); @Override default ApplicationElement evinceApplicationElement() { return this; } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/ArchElementNode.java ================================================ package com.pinecone.hydra.service.kom.entity; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.BeanColonist; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.service.ArchServiceFamilyMeta; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.framework.util.id.GuidAllocator; public abstract class ArchElementNode extends ArchServiceFamilyMeta implements ElementNode { protected long enumId; protected GUIDImperialTrieNode distributedTreeNode; protected ServiceInstrument serviceInstrument; public ArchElementNode() { super(); } public ArchElementNode( Map joEntity ) { super( joEntity ); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); } public ArchElementNode( Map joEntity, ServiceInstrument serviceInstrument) { super( joEntity ); this.apply(serviceInstrument); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); } public ArchElementNode( ServiceInstrument serviceInstrument) { this.apply(serviceInstrument); } public void apply( ServiceInstrument serviceInstrument) { this.serviceInstrument = serviceInstrument; GuidAllocator guidAllocator = this.serviceInstrument.getGuidAllocator(); this.setGuid( guidAllocator.nextGUID() ); } @Override public ArchElementNode apply( Map joEntity ) { super.apply( joEntity ); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); return this; } @Override public long getEnumId() { return this.enumId; } @Override public void setEnumId( long enumId ) { this.enumId = enumId; } @Override public GUID getGuid() { return this.guid; } @Override public void setGuid( GUID guid ) { this.guid = guid; } @Override public void setName( String name ) { this.name = name; } @Override public void setScenario( String scenario ) { this.scenario = scenario; } @Override public void setPrimaryImplLang( String primaryImplLang ) { this.primaryImplLang = primaryImplLang; } @Override public void setExtraInformation( String extraInformation ) { this.extraInformation = extraInformation; } @Override public void setLevel( String level ) { this.level = level; } @Override public void setDescription( String description ) { this.description = description; } @Override public GUIDImperialTrieNode getDistributedTreeNode() { return this.distributedTreeNode; } @Override public void setDistributedTreeNode( GUIDImperialTrieNode distributedTreeNode ) { this.distributedTreeNode = distributedTreeNode; } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } protected Collection fetchChildren() { Collection guids = this.fetchChildrenGuids(); List elementNodes = new ArrayList<>(); for( GUID guid : guids ){ ElementNode elementNode = (ElementNode) this.serviceInstrument.get( guid ); elementNodes.add( elementNode ); } return elementNodes; } protected Collection fetchChildrenGuids() { return this.serviceInstrument.fetchChildrenGuids( this.getGuid() ); } protected void addChild( ElementNode child ) { GUID childId; boolean bContainsChild = this.containsChild( child.getName() ); if( bContainsChild ) { return; } else { childId = this.serviceInstrument.put( child ); } this.serviceInstrument.affirmOwnedNode( this.guid, childId ); } protected boolean containsChild( String childName ) { return this.serviceInstrument.containsChild( this.guid, childName ); } @Override public JSONObject toJSONObject() { return BeanColonist.DirectColonist.populate( this, ServoElement.UnbeanifiedKeys ); } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/ArchServoElement.java ================================================ package com.pinecone.hydra.service.kom.entity; import java.time.LocalDateTime; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.service.kom.ServiceInstrument; public abstract class ArchServoElement extends ArchElementNode implements ServoElement { protected GUID metaGuid; protected String path; protected String type; protected String alias; protected String resourceType; protected LocalDateTime createTime; protected LocalDateTime updateTime; public ArchServoElement() { super(); this.createTime = LocalDateTime.now(); this.updateTime = LocalDateTime.now(); } public ArchServoElement( Map joEntity ) { super( joEntity ); this.createTime = LocalDateTime.now(); this.updateTime = LocalDateTime.now(); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); } public ArchServoElement( Map joEntity, ServiceInstrument serviceInstrument) { this( joEntity ); this.apply(serviceInstrument); } public ArchServoElement( ServiceInstrument serviceInstrument) { super(serviceInstrument); } @Override public ArchElementNode apply( Map joEntity ) { super.apply( joEntity ); this.createTime = LocalDateTime.now(); this.updateTime = LocalDateTime.now(); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); return this; } /** * Overridden to keep keys in prior json-decode. */ @Override public GUID getGuid() { return super.getGuid(); } @Override public String getName() { return super.getName(); } @Override public GUID getMetaGuid() { return this.metaGuid; } @Override public void setMetaGuid( GUID metaGuid ) { this.metaGuid = metaGuid; } @Override public String getPath() { return this.path; } @Override public void setPath( String path ) { this.path = path; } @Override public String getType() { return this.type; } @Override public void setType( String type ) { this.type = type; } @Override public String getAlias() { return this.alias; } @Override public void setAlias( String alias ) { this.alias = alias; } @Override public String getResourceType() { return this.resourceType; } @Override public void setResourceType( String resourceType ) { this.resourceType = resourceType; } @Override public LocalDateTime getCreateTime() { return this.createTime; } @Override public void setCreateTime( LocalDateTime createTime ) { this.createTime = createTime; } @Override public LocalDateTime getUpdateTime() { return this.updateTime; } @Override public void setUpdateTime( LocalDateTime updateTime ) { this.updateTime = updateTime; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this, UnbeanifiedKeys); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/CommonMeta.java ================================================ package com.pinecone.hydra.service.kom.entity; public interface CommonMeta extends ElementNode { } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/ElementNode.java ================================================ package com.pinecone.hydra.service.kom.entity; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.service.kom.ServiceFamilyNode; import com.pinecone.hydra.system.ko.meta.ElementObject; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; public interface ElementNode extends ServiceTreeNode, ServiceFamilyNode, ElementObject { default Namespace evinceNamespace() { return null; } default ApplicationElement evinceApplicationElement() { return null; } default ServiceElement evinceServiceElement() { return null; } GUIDImperialTrieNode getDistributedTreeNode(); void setDistributedTreeNode( GUIDImperialTrieNode distributedTreeNode ); JSONObject toJSONObject(); @Override default ElementNode evinceElementNode(){ return this; } @Override default String objectCategoryName() { return "Service"; } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/FolderElement.java ================================================ package com.pinecone.hydra.service.kom.entity; import java.util.Collection; import com.pinecone.framework.util.id.GUID; public interface FolderElement extends ElementNode { Collection fetchChildren(); Collection fetchChildrenGuids(); void addChild( ElementNode child ); boolean containsChild( String childName ); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/GenericApplicationElement.java ================================================ package com.pinecone.hydra.service.kom.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.BeanColonist; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import com.pinecone.hydra.service.kom.ServiceInstrument; import java.util.Collection; import java.util.List; import java.util.Map; public class GenericApplicationElement extends ArchServoElement implements ApplicationElement { protected String deploymentMethod; public GenericApplicationElement() { super(); } public GenericApplicationElement( Map joEntity ) { super( joEntity ); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); } public GenericApplicationElement( Map joEntity, ServiceInstrument serviceInstrument) { super( joEntity, serviceInstrument); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); } public GenericApplicationElement( ServiceInstrument serviceInstrument) { super(serviceInstrument); } @Override public String getDeploymentMethod() { return this.deploymentMethod; } @Override public void setDeploymentMethod( String deploymentMethod ) { this.deploymentMethod = deploymentMethod; } @Override public Collection fetchChildren() { return super.fetchChildren(); } @Override public Collection fetchChildrenGuids() { return super.fetchChildrenGuids(); } @Override public void addChild( ElementNode child ) { if( child instanceof FolderElement ) { throw new IllegalArgumentException( "Foisting `FolderElement` into application node is not accepted." ); } super.addChild( child ); } @Override public boolean containsChild( String childName ) { return super.containsChild( childName ); } @Override public JSONObject toJSONObject() { Collection children = this.fetchChildren(); JSONObject jo = BeanColonist.DirectColonist.populate( this, UnbeanifiedKeys); JSONObject joChildren = new JSONMaptron(); for( ElementNode node : children ) { joChildren.put( node.getName(), node.toJSONObject() ); } jo.put( "services", joChildren ); return jo; } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/GenericCommonMeta.java ================================================ package com.pinecone.hydra.service.kom.entity; public class GenericCommonMeta extends ArchElementNode implements CommonMeta { public GenericCommonMeta() { super(); } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/GenericNamespace.java ================================================ package com.pinecone.hydra.service.kom.entity; import java.util.Collection; import java.util.Map; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSONEncoder; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.BeanColonist; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import com.pinecone.hydra.service.kom.GenericNamespaceRules; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.kom.source.ServiceNamespaceManipulator; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; public class GenericNamespace extends ArchElementNode implements Namespace { protected GUID rulesGUID; protected GUID metaGuid; protected GUIDImperialTrieNode distributedTreeNode; protected GenericNamespaceRules classificationRules; protected ServiceNamespaceManipulator namespaceManipulator; public GenericNamespace() { super(); } public GenericNamespace( Map joEntity ) { super( joEntity ); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); } public GenericNamespace( Map joEntity, ServiceInstrument serviceInstrument) { super( joEntity, serviceInstrument); BeanMapDecoder.BasicDecoder.decode( this, joEntity ); } public GenericNamespace( ServiceInstrument serviceInstrument) { super(serviceInstrument); } public GenericNamespace(ServiceInstrument serviceInstrument, ServiceNamespaceManipulator namespaceManipulator ) { this(serviceInstrument); this.namespaceManipulator = namespaceManipulator; } @Override public GUIDImperialTrieNode getDistributedTreeNode() { return this.distributedTreeNode; } @Override public void setDistributedTreeNode( GUIDImperialTrieNode distributedTreeNode ) { this.distributedTreeNode = distributedTreeNode; } @Override public GenericNamespaceRules getClassificationRules() { return this.classificationRules; } @Override public void setClassificationRules( GenericNamespaceRules classificationRules ) { this.classificationRules = classificationRules; } @Override public GUID getMetaGuid() { return this.metaGuid; } @Override public void setMetaGuid( GUID metaGuid ) { this.metaGuid = metaGuid; } @Override public GUID getRulesGUID() { return this.rulesGUID; } @Override public void setRulesGUID( GUID rulesGUID ) { this.rulesGUID = rulesGUID; } @Override public JSONObject toJSONObject() { Collection children = this.fetchChildren(); JSONObject jo = new JSONMaptron(); for( ElementNode node : children ) { jo.put( node.getName(), node.toJSONObject() ); } return jo; } @Override public JSONObject toJSONDetails() { return BeanColonist.DirectColonist.populate( this, UnbeanifiedKeys); } @Override public String toJSONString() { return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "guid" , this.getGuid() ), new KeyValue<>( "name" , this.getName() ) } ); } @Override public String toString() { return this.name; } @Override public Collection fetchChildren() { return super.fetchChildren(); } @Override public Collection fetchChildrenGuids() { return super.fetchChildrenGuids(); } @Override public void addChild( ElementNode child ) { super.addChild( child ); } @Override public boolean containsChild( String childName ) { return super.containsChild( childName ); } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/GenericServiceElement.java ================================================ package com.pinecone.hydra.service.kom.entity; import com.pinecone.framework.util.json.JSON; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import com.pinecone.hydra.service.kom.ServiceInstrument; import java.util.Map; public class GenericServiceElement extends ArchServoElement implements ServiceElement { protected String serviceType; private void initSelf( Map joEntity ) { BeanMapDecoder.BasicDecoder.decode( this, joEntity ); if ( this.szElementaryConfig != null ) { this.elementaryConfig = (JSONObject)JSON.parse( this.szElementaryConfig ); } } public GenericServiceElement() { super(); } public GenericServiceElement( Map joEntity ) { super( joEntity ); this.initSelf( joEntity ); } public GenericServiceElement( Map joEntity, ServiceInstrument serviceInstrument) { super( joEntity, serviceInstrument); this.initSelf( joEntity ); } public GenericServiceElement( ServiceInstrument serviceInstrument) { super(serviceInstrument); } @Override public String getServiceType() { return this.serviceType; } @Override public void setServiceType( String serviceType ) { this.serviceType = serviceType; } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/GenericServiceInstanceEntity.java ================================================ package com.pinecone.hydra.service.kom.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.entity.ServiceInstanceEntry; import java.time.LocalDateTime; public class GenericServiceInstanceEntity implements ServiceInstanceEntry { protected GUID mGuid; protected GUID mServiceGuid; protected String mszStatus; protected LocalDateTime mLatestStartTime; protected LocalDateTime mLatestEndTime; protected String mErrorCause; protected int mnRunCount; protected GUID mDeployGuid; protected String mIp; @Override public void setGuid( GUID guid ) { this.mGuid = guid; } @Override public GUID getGuid() { return this.mGuid; } @Override public void setServiceGuid( GUID guid ) { this.mServiceGuid = guid; } @Override public GUID getServiceGuid() { return this.mServiceGuid; } @Override public String getStatus(){ return this.mszStatus; } @Override public void setStatus( String status ){ this.mszStatus = status; } @Override public LocalDateTime getLatestStartTime(){ return this.mLatestStartTime; } @Override public void setLatestStartTime( LocalDateTime latestStartTime ){ this.mLatestStartTime = latestStartTime; } @Override public LocalDateTime getLatestEndTime(){ return this.mLatestEndTime; } @Override public void setLatestEndTime( LocalDateTime latestEndTime ){ this.mLatestEndTime = latestEndTime; } @Override public String getErrorCause(){ return this.mErrorCause; } @Override public void setErrorCause( String errorCause ){ this.mErrorCause = errorCause; } @Override public int getRunCount(){ return this.mnRunCount; } @Override public void setRunCount( int runCount ){ this.mnRunCount = runCount; } @Override public GUID getDeployGuid() { return this.mDeployGuid; } @Override public void setDeployGuid( GUID deployGuid ) { this.mDeployGuid = deployGuid; } @Override public String getIp() { return this.mIp; } @Override public void setIp( String ip ) { this.mIp = ip; } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/Namespace.java ================================================ package com.pinecone.hydra.service.kom.entity; import java.util.Set; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.service.kom.GenericNamespaceRules; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; public interface Namespace extends FolderElement { Set UnbeanifiedKeys = Set.of( "distributedTreeNode", "classificationRules" ); long getEnumId(); void setEnumId( long id ); GUID getGuid(); void setGuid( GUID guid ); GUID getMetaGuid(); void setMetaGuid( GUID metaGuid ); String getName(); void setName( String name ); GUID getRulesGUID(); void setRulesGUID( GUID rulesGUID ); GenericNamespaceRules getClassificationRules(); void setClassificationRules( GenericNamespaceRules classificationRules ); GUIDImperialTrieNode getDistributedTreeNode(); void setDistributedTreeNode( GUIDImperialTrieNode distributedTreeNode ); @Override default Namespace evinceNamespace() { return this; } JSONObject toJSONDetails(); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/ServiceElement.java ================================================ package com.pinecone.hydra.service.kom.entity; public interface ServiceElement extends ServoElement { @Override default ServiceElement evinceServiceElement() { return this; } String getServiceType(); void setServiceType( String serviceType ); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/ServiceInstanceEntry.java ================================================ package com.pinecone.hydra.service.kom.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public interface ServiceInstanceEntry extends Pinenut { void setGuid( GUID guid ); GUID getGuid(); void setServiceGuid( GUID guid ); GUID getServiceGuid(); String getStatus(); void setStatus( String status ); LocalDateTime getLatestStartTime(); void setLatestStartTime( LocalDateTime latestStartTime ); LocalDateTime getLatestEndTime(); void setLatestEndTime( LocalDateTime latestEndTime ); String getErrorCause(); void setErrorCause( String errorCause ); int getRunCount(); void setRunCount( int runCount ); GUID getDeployGuid(); void setDeployGuid( GUID deployGuid ); String getIp(); void setIp( String ip ); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/ServiceTreeNode.java ================================================ package com.pinecone.hydra.service.kom.entity; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public interface ServiceTreeNode extends TreeNode { String getName(); default String getMetaType() { return this.className().replace("Generic",""); } default ServiceTreeNode evinceTreeNode(){ return this; } default ElementNode evinceElementNode(){ return null; } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/ServoElement.java ================================================ package com.pinecone.hydra.service.kom.entity; import java.time.LocalDateTime; import java.util.Set; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.ServiceFamilyNode; public interface ServoElement extends ElementNode, ServiceFamilyNode { Set UnbeanifiedKeys = Set.of( "distributedTreeNode" ); long getEnumId(); void setEnumId( long id ); GUID getGuid(); void setGuid( GUID guid ); GUID getMetaGuid(); void setMetaGuid( GUID metaGuid ); String getName(); void setName( String name ); String getPath(); void setPath( String path ); String getType(); void setType( String type ); String getAlias(); void setAlias( String alias ); String getResourceType(); void setResourceType( String resourceType ); LocalDateTime getCreateTime(); void setCreateTime( LocalDateTime createTime ); LocalDateTime getUpdateTime(); void setUpdateTime( LocalDateTime updateTime ); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/marshaling/ServiceInstrumentDecoder.java ================================================ package com.pinecone.hydra.service.kom.marshaling; import java.util.Map; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.entity.ElementNode; public interface ServiceInstrumentDecoder extends Pinenut { default ElementNode decode( Object val, GUID parentGUID ) { if ( val instanceof Map ) { Map map = (Map) val; if( map.isEmpty() ) { return null; } else if( map.size() > 1 ) { throw new IllegalArgumentException( "Root element should has at last 1." ); } Map.Entry kv = (Map.Entry) map.entrySet().iterator().next(); return this.decode( kv.getKey().toString(), kv.getValue(), parentGUID ); } return null; } ElementNode decode( String key, Object val, GUID parentGUID ); default ElementNode decode( Map.Entry kv, GUID parentGUID ) { return this.decode( kv.getKey().toString(), kv.getValue(), parentGUID ); } default ElementNode decode( Object val ) { return this.decode( val, null ); } default ElementNode decode( String key, Object val ) { return this.decode( key, val, null ); } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/marshaling/ServiceInstrumentEncoder.java ================================================ package com.pinecone.hydra.service.kom.marshaling; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.service.kom.entity.ElementNode; public interface ServiceInstrumentEncoder extends Pinenut { Object encode( ElementNode node ); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/marshaling/ServiceJSONDecoder.java ================================================ package com.pinecone.hydra.service.kom.marshaling; import java.util.Collection; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.kom.entity.ApplicationElement; import com.pinecone.hydra.service.kom.entity.ElementNode; import com.pinecone.hydra.service.kom.entity.FolderElement; import com.pinecone.hydra.service.kom.entity.GenericApplicationElement; import com.pinecone.hydra.service.kom.entity.GenericNamespace; import com.pinecone.hydra.service.kom.entity.GenericServiceElement; import com.pinecone.hydra.service.kom.entity.Namespace; import com.pinecone.hydra.service.kom.entity.ServiceElement; public class ServiceJSONDecoder implements ServiceInstrumentDecoder { protected ServiceInstrument instrument; public ServiceJSONDecoder(ServiceInstrument instrument ) { this.instrument = instrument; } @Override @SuppressWarnings( "unchecked" ) public ElementNode decode( String szName, Object o, GUID parentGuid ) { if ( o instanceof Map ) { return (ElementNode) this.instrument.get( this.decodeJSONObject( szName, (Map) o, parentGuid ).getGuid() ); } throw new IllegalArgumentException( "Elements of `ServersInstrument` should all be object." ); } protected Namespace newNamespace( String szName, Map jo ) { Namespace ns = new GenericNamespace( jo, this.instrument ); ns.setName( szName ); return ns; } protected Object[] affirmNSExisted( String szName, GUID parentGuid, Map jo ) { Namespace ns = null; if( parentGuid == null ) { ElementNode rootE = this.instrument.queryElement( szName ); if( rootE != null ) { if( rootE.evinceNamespace() == null ) { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be namespace.", szName ) ); } ns = rootE.evinceNamespace(); } } else { ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid ); if( parentNode instanceof Namespace ) { Collection destChildren = parentNode.evinceNamespace().fetchChildren(); for( ElementNode node : destChildren ) { if( szName.equals( node.getName() ) ) { if( node instanceof Namespace ) { ns = (Namespace) node; break; } else { throw new IllegalArgumentException( String.format( " Existed child-destination [%s] should be namespace.", szName ) ); } } } } } GUID currentGuid; if( ns == null ) { ns = this.newNamespace( szName, jo ); currentGuid = this.instrument.put( ns ); this.instrument.affirmOwnedNode( parentGuid, currentGuid ); } else { currentGuid = ns.getGuid(); } return new Object[] { ns, currentGuid }; } protected Object[] affirmAppExisted( String szName, GUID parentGuid, Map jo ) { ApplicationElement app = null; if( parentGuid == null ) { ElementNode rootE = this.instrument.queryElement( szName ); if( rootE != null ) { if( rootE.evinceApplicationElement() == null ) { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be `ApplicationElement`.", szName ) ); } app = rootE.evinceApplicationElement(); } } else { ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid ); if( parentNode instanceof Namespace ) { Collection destChildren = parentNode.evinceNamespace().fetchChildren(); for( ElementNode node : destChildren ) { if( szName.equals( node.getName() ) ) { if( node instanceof ApplicationElement ) { app = (ApplicationElement) node; break; } else { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be `ApplicationElement`.", szName ) ); } } } } } ApplicationElement neo ; if( app == null ) { neo = new GenericApplicationElement( jo, this.instrument ); neo.setName( szName ); } else { neo = app; } return new Object[] { app, neo }; } protected Object[] affirmSerExisted( String szName, GUID parentGuid, Map jo ) { ServiceElement ser = null; if( parentGuid == null ) { ElementNode rootE = this.instrument.queryElement( szName ); if( rootE != null ) { if( rootE.evinceServiceElement() == null ) { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be `ServiceElement`.", szName ) ); } ser = rootE.evinceServiceElement(); } } else { ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid ); Collection destChildren; if( parentNode instanceof FolderElement ) { destChildren = ( (FolderElement) parentNode ).fetchChildren(); for( ElementNode node : destChildren ) { if( szName.equals( node.getName() ) ) { if( node instanceof ServiceElement ) { ser = (ServiceElement) node; break; } else { throw new IllegalArgumentException( String.format( "Existed child-destination [%s] should be `ServiceElement`.", szName ) ); } } } } else { throw new IllegalStateException( String.format( "Parent of `ServiceElement` [%s] should be `FolderElement`.", szName ) ); } } ServiceElement neo ; if( ser == null ) { neo = new GenericServiceElement( jo, this.instrument ); neo.setName( szName ); } else { neo = ser; } return new Object[] { ser, neo }; } protected Object[] decodeExternalElements( String szMetaType, String szName, GUID parentGuid, Map jo ) throws IllegalArgumentException { throw new IllegalArgumentException( "Unknown metaType '" + szMetaType + "'." ); } protected void decodeChildren ( Map jo, GUID currentGuid ) { for ( Object o : jo.entrySet() ) { Map.Entry kv = (Map.Entry) o; Object val = kv.getValue(); if( val instanceof Map ) { this.decode( kv.getKey().toString(), val, currentGuid ); } } } protected ElementNode decodeJSONObject( String szName, Map jo, GUID parentGuid ) { String szMetaType = (String) jo.get( "metaType" ); boolean isNamespace = szMetaType == null || szMetaType.equals( Namespace.class.getSimpleName() ); ElementNode elementNode; GUID currentGuid; if ( isNamespace ) { Object[] pair = this.affirmNSExisted( szName, parentGuid, jo ); Namespace ns = (Namespace) pair[ 0 ]; currentGuid = (GUID) pair[ 1 ]; this.decodeChildren( jo, currentGuid ); elementNode = ns; } else { Object[] pair; boolean bIsFolderElement = false; if( szMetaType.equals( ApplicationElement.class.getSimpleName() ) ) { pair = this.affirmAppExisted( szName, parentGuid, jo ); bIsFolderElement = true; } else if( szMetaType.equals( ServiceElement.class.getSimpleName() ) ) { pair = this.affirmSerExisted( szName, parentGuid, jo ); } else { try{ pair = this.decodeExternalElements( szMetaType, szName, parentGuid, jo ); } catch ( RuntimeException e ) { throw new IllegalArgumentException( e ); } } ElementNode arc = (ElementNode) pair[ 0 ]; ElementNode neo = (ElementNode) pair[ 1 ]; if( arc == null ) { currentGuid = this.instrument.put( neo ); this.instrument.affirmOwnedNode( parentGuid, currentGuid ); } else { currentGuid = arc.getGuid(); this.instrument.update( neo ); } if( bIsFolderElement ) { Object services = jo.get( "services" ); if( services instanceof Map ) { Map joSer = (Map) services; this.decodeChildren( joSer, currentGuid ); } } elementNode = neo; } return elementNode; } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/marshaling/ServiceJSONEncoder.java ================================================ package com.pinecone.hydra.service.kom.marshaling; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.kom.entity.ElementNode; public class ServiceJSONEncoder implements ServiceInstrumentEncoder { protected ServiceInstrument instrument; public ServiceJSONEncoder(ServiceInstrument instrument ) { this.instrument = instrument; } @Override public Object encode( ElementNode node ) { return node.toJSONObject(); } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/operator/ApplicationElementOperator.java ================================================ package com.pinecone.hydra.service.kom.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.kom.entity.ApplicationElement; import com.pinecone.hydra.service.kom.entity.GenericApplicationElement; import com.pinecone.hydra.service.kom.entity.GenericNamespace; import com.pinecone.hydra.service.kom.source.ApplicationMetaManipulator; import com.pinecone.hydra.service.kom.source.ApplicationNodeManipulator; import com.pinecone.hydra.service.kom.source.ServiceMasterManipulator; import com.pinecone.hydra.system.ko.UOIUtils; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.framework.util.id.GuidAllocator; import java.util.List; public class ApplicationElementOperator extends ArchElementOperator implements ElementOperator { protected ApplicationNodeManipulator applicationNodeManipulator; protected ApplicationMetaManipulator applicationMetaManipulator; public ApplicationElementOperator(ElementOperatorFactory factory ) { this( factory.getServiceMasterManipulator(),factory.getServicesTree() ); this.factory = factory; } public ApplicationElementOperator(ServiceMasterManipulator masterManipulator, ServiceInstrument serviceInstrument){ super( masterManipulator, serviceInstrument); this.applicationNodeManipulator = masterManipulator.getApplicationNodeManipulator(); this.applicationMetaManipulator = masterManipulator.getApplicationElementManipulator(); } @Override public GUID insert( TreeNode treeNode ) { GenericApplicationElement applicationElement = (GenericApplicationElement) treeNode; GuidAllocator guidAllocator = this.serviceInstrument.getGuidAllocator(); GUID applicationNodeGUID = guidAllocator.nextGUID(); applicationElement.setGuid( applicationNodeGUID ); this.applicationNodeManipulator.insert( applicationElement ); GUID descriptionGUID = guidAllocator.nextGUID(); if( applicationElement.getMetaGuid() == null ){ applicationElement.setMetaGuid( descriptionGUID ); } this.applicationMetaManipulator.insert( applicationElement ); //将应用元信息存入元信息表 this.nodeMetaManipulator.insert( applicationElement ); //将节点信息存入主表 GUIDImperialTrieNode node = new GUIDImperialTrieNode(); node.setNodeMetadataGUID(descriptionGUID); node.setGuid(applicationNodeGUID); node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) ); this.imperialTree.insert( node ); return applicationNodeGUID; } @Override public void purge( GUID guid ) { //namespace节点需要递归删除其拥有节点若其引用节点,没有其他引用则进行清理 List childNodes = this.imperialTree.getChildren(guid); GUIDImperialTrieNode node = this.imperialTree.getNode(guid); if ( !childNodes.isEmpty() ){ List subordinates = this.imperialTree.getSubordinates(guid); if ( !subordinates.isEmpty() ){ for ( GUID subordinateGuid : subordinates ){ this.purge( subordinateGuid ); } } childNodes = this.imperialTree.getChildren( guid ); for( GUIDImperialTrieNode childNode : childNodes ){ List parentNodes = this.imperialTree.fetchParentGuids(childNode.getGuid()); if ( parentNodes.size() > 1 ){ this.imperialTree.removeInheritance(childNode.getGuid(),guid); } else { this.purge( childNode.getGuid() ); } } } if ( node.getType().getObjectName().equals( GenericNamespace.class.getName() ) ){ this.removeNode(guid); } else { UOI uoi = node.getType(); String metaType = this.getOperatorFactory().getMetaType( uoi.getObjectName() ); if( metaType == null ) { TreeNode newInstance = (TreeNode)uoi.newInstance( new Class[]{ ServiceInstrument.class }, this.serviceInstrument); metaType = newInstance.getMetaType(); } ElementOperator operator = this.getOperatorFactory().getOperator( metaType ); operator.purge( guid ); } } @Override public ApplicationElement get( GUID guid ) { GUIDImperialTrieNode node = this.imperialTree.getNode( guid ); ApplicationElement applicationElement; if( node.getNodeMetadataGUID() != null ){ applicationElement = this.applicationMetaManipulator.getApplicationElement( node.getNodeMetadataGUID(), this.serviceInstrument); } else { applicationElement = new GenericApplicationElement(); } this.applyCommonMeta( applicationElement, this.nodeMetaManipulator.getNodeCommonMeta( guid ) ); applicationElement.setName( this.applicationNodeManipulator.getApplicationNode(guid).getName() ); applicationElement.setGuid(applicationElement.getGuid()); return applicationElement; } @Override public ApplicationElement get( GUID guid, int depth ) { return this.get( guid ); } @Override public ApplicationElement getAsRootDepth( GUID guid ) { return this.get( guid ); } @Override public void update( TreeNode treeNode ) { GenericApplicationElement applicationElement = (GenericApplicationElement) treeNode; this.applicationNodeManipulator.update( applicationElement ); this.applicationMetaManipulator.update( applicationElement ); this.nodeMetaManipulator.update( applicationElement ); } @Override public void updateName( GUID guid, String name ) { } protected void removeNode( GUID guid ){ GUIDImperialTrieNode node = this.imperialTree.getNode( guid ); this.imperialTree.purge( guid ); this.imperialTree.removeCachePath(guid); this.applicationMetaManipulator.remove( node.getAttributesGUID() ); this.nodeMetaManipulator.remove( node.getNodeMetadataGUID() ); this.applicationNodeManipulator.remove( node.getGuid( )); } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/operator/ArchElementOperator.java ================================================ package com.pinecone.hydra.service.kom.operator; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.kom.entity.CommonMeta; import com.pinecone.hydra.service.kom.entity.ElementNode; import com.pinecone.hydra.service.kom.source.NodeMetaManipulator; import com.pinecone.hydra.service.kom.source.ServiceMasterManipulator; import com.pinecone.hydra.unit.imperium.ImperialTree; public abstract class ArchElementOperator implements ElementOperator { protected ServiceInstrument serviceInstrument; protected ImperialTree imperialTree; protected NodeMetaManipulator nodeMetaManipulator; protected ServiceMasterManipulator serviceMasterManipulator; protected ElementOperatorFactory factory; public ArchElementOperator( ElementOperatorFactory factory ){ this( factory.getServiceMasterManipulator(),factory.getServicesTree() ); this.factory = factory; } public ArchElementOperator( ServiceMasterManipulator masterManipulator, ServiceInstrument serviceInstrument){ this.imperialTree = serviceInstrument.getMasterTrieTree(); this.serviceInstrument = serviceInstrument; this.nodeMetaManipulator = masterManipulator.getNodeMetaManipulator(); this.serviceMasterManipulator = masterManipulator; //this.factory = new GenericServiceOperatorFactory(servicesTree,masterManipulator); } public ElementOperatorFactory getOperatorFactory() { return this.factory; } protected void applyCommonMeta( ElementNode ele, CommonMeta commonMeta ){ if( commonMeta != null ) { ele.setGuid ( commonMeta.getGuid() ); ele.setScenario ( commonMeta.getScenario() ); ele.setPrimaryImplLang ( commonMeta.getPrimaryImplLang() ); ele.setExtraInformation ( commonMeta.getExtraInformation() ); ele.setLevel ( commonMeta.getLevel() ); ele.setDescription ( commonMeta.getDescription() ); } } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/operator/ElementOperator.java ================================================ package com.pinecone.hydra.service.kom.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.entity.ElementNode; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; public interface ElementOperator extends TreeNodeOperator { @Override ElementNode get( GUID guid ); @Override ElementNode get( GUID guid, int depth ); @Override ElementNode getAsRootDepth( GUID guid ); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/operator/ElementOperatorFactory.java ================================================ package com.pinecone.hydra.service.kom.operator; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.kom.entity.ApplicationElement; import com.pinecone.hydra.service.kom.entity.Namespace; import com.pinecone.hydra.service.kom.entity.ServiceElement; import com.pinecone.hydra.service.kom.source.ServiceMasterManipulator; import com.pinecone.hydra.unit.imperium.operator.OperatorFactory; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; public interface ElementOperatorFactory extends OperatorFactory { String DefaultServiceNode = ServiceElement.class.getSimpleName(); String DefaultNamespace = Namespace.class.getSimpleName(); String DefaultApplicationNode = ApplicationElement.class.getSimpleName(); void register( String typeName, TreeNodeOperator functionalNodeOperation ); void registerMetaType( Class clazz, String metaType ); void registerMetaType( String classFullName, String metaType ); String getMetaType( String classFullName ); ElementOperator getOperator(String typeName ); ServiceInstrument getServicesTree(); ServiceMasterManipulator getServiceMasterManipulator(); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/operator/GenericElementOperatorFactory.java ================================================ package com.pinecone.hydra.service.kom.operator; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.kom.source.ServiceMasterManipulator; import com.pinecone.hydra.service.kom.entity.GenericNamespace; import com.pinecone.hydra.service.kom.entity.GenericApplicationElement; import com.pinecone.hydra.service.kom.entity.GenericServiceElement; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; import java.util.HashMap; import java.util.Map; import java.util.TreeMap; public class GenericElementOperatorFactory implements ElementOperatorFactory { protected ServiceMasterManipulator serviceMasterManipulator; protected ServiceInstrument serviceInstrument; protected Map registerer = new HashMap<>(); protected Map metaTypeMap = new TreeMap<>(); protected void registerDefaultMetaType( Class genericType ) { this.metaTypeMap.put( genericType.getName(), genericType.getSimpleName().replace("Generic","") ); } protected void registerDefaultMetaTypes() { this.registerDefaultMetaType( GenericNamespace.class ); this.registerDefaultMetaType( GenericServiceElement.class ); this.registerDefaultMetaType( GenericApplicationElement.class ); } public GenericElementOperatorFactory(ServiceInstrument serviceInstrument, ServiceMasterManipulator serviceMasterManipulator ){ this.serviceInstrument = serviceInstrument; this.serviceMasterManipulator = serviceMasterManipulator; this.registerer.put( ElementOperatorFactory.DefaultServiceNode, new ServiceElementOperator( this ) ); this.registerer.put( ElementOperatorFactory.DefaultApplicationNode, new ApplicationElementOperator(this) ); this.registerer.put( ElementOperatorFactory.DefaultNamespace, new NamespaceOperator(this) ); this.registerDefaultMetaTypes(); } @Override public void register( String typeName, TreeNodeOperator functionalNodeOperation ) { this.registerer.put( typeName, functionalNodeOperation ); } @Override public void registerMetaType( Class clazz, String metaType ){ this.registerMetaType( clazz.getName(), metaType ); } @Override public void registerMetaType( String classFullName, String metaType ){ this.metaTypeMap.put( classFullName, metaType ); } @Override public ServiceInstrument getServicesTree() { return this.serviceInstrument; } @Override public ServiceMasterManipulator getServiceMasterManipulator() { return this.serviceMasterManipulator; } @Override public String getMetaType( String classFullName ) { return this.metaTypeMap.get( classFullName ); } @Override public ElementOperator getOperator(String typeName ) { //Debug.trace( this.registerer.toString() ); return (ElementOperator) this.registerer.get( typeName ); } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/operator/NamespaceOperator.java ================================================ package com.pinecone.hydra.service.kom.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.service.kom.GenericNamespaceRules; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.kom.entity.GenericApplicationElement; import com.pinecone.hydra.service.kom.entity.GenericNamespace; import com.pinecone.hydra.service.kom.entity.Namespace; import com.pinecone.hydra.service.kom.source.NamespaceRulesManipulator; import com.pinecone.hydra.service.kom.source.ServiceMasterManipulator; import com.pinecone.hydra.service.kom.source.ServiceNamespaceManipulator; import com.pinecone.hydra.system.ko.UOIUtils; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.framework.util.id.GuidAllocator; import java.util.List; public class NamespaceOperator extends ArchElementOperator implements ElementOperator { protected ServiceNamespaceManipulator namespaceManipulator; protected NamespaceRulesManipulator namespaceRulesManipulator; public NamespaceOperator( ElementOperatorFactory factory ) { this( factory.getServiceMasterManipulator(),factory.getServicesTree() ); this.factory = factory; } public NamespaceOperator( ServiceMasterManipulator masterManipulator, ServiceInstrument serviceInstrument){ super( masterManipulator, serviceInstrument); this.namespaceManipulator = masterManipulator.getNamespaceManipulator(); this.namespaceRulesManipulator = masterManipulator.getNamespaceRulesManipulator(); } @Override public GUID insert( TreeNode treeNode ) { GenericNamespace ns = ( GenericNamespace ) treeNode; //存节点基础信息 GuidAllocator guidAllocator = this.serviceInstrument.getGuidAllocator(); GUID namespaceRulesGuid = ns.getGuid(); GenericNamespaceRules namespaceRules = ns.getClassificationRules(); if ( namespaceRules!= null ){ namespaceRules.setGuid( namespaceRulesGuid ); } else { namespaceRulesGuid = null; } GUID namespaceGuid = guidAllocator.nextGUID(); ns.setGuid( namespaceGuid ); ns.setRulesGUID( namespaceRulesGuid ); this.namespaceManipulator.insert( ns ); //存元信息 GUID metadataGUID = guidAllocator.nextGUID(); ns.setMetaGuid( metadataGUID ); this.nodeMetaManipulator.insertNS( ns ); GUIDImperialTrieNode node = new GUIDImperialTrieNode(); node.setBaseDataGUID( namespaceRulesGuid ); node.setGuid( namespaceGuid ); node.setNodeMetadataGUID( metadataGUID ); node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) ); this.imperialTree.insert( node ); return namespaceGuid; } @Override public void purge( GUID guid ) { //namespace节点需要递归删除其拥有节点若其引用节点,没有其他引用则进行清理 List childNodes = this.imperialTree.getChildren(guid); GUIDImperialTrieNode node = this.imperialTree.getNode(guid); if ( !childNodes.isEmpty() ){ List subordinates = this.imperialTree.getSubordinates(guid); if ( !subordinates.isEmpty() ){ for ( GUID subordinateGuid : subordinates ){ this.purge( subordinateGuid ); } } childNodes = this.imperialTree.getChildren( guid ); for( GUIDImperialTrieNode childNode : childNodes ){ List parentNodes = this.imperialTree.fetchParentGuids(childNode.getGuid()); if ( parentNodes.size() > 1 ){ this.imperialTree.removeInheritance(childNode.getGuid(),guid); } else { this.purge( childNode.getGuid() ); } } } if ( node.getType().getObjectName().equals(GenericNamespace.class.getName()) || node.getType().getObjectName().equals(GenericApplicationElement.class.getName())){ this.removeNode(guid); } else { UOI uoi = node.getType(); String metaType = this.getOperatorFactory().getMetaType( uoi.getObjectName() ); if( metaType == null ) { TreeNode newInstance = (TreeNode)uoi.newInstance( new Class[]{ ServiceInstrument.class }, this.serviceInstrument); metaType = newInstance.getMetaType(); } ElementOperator operator = this.getOperatorFactory().getOperator( metaType ); operator.purge( guid ); } } @Override public Namespace get( GUID guid ) { GUIDImperialTrieNode node = this.imperialTree.getNode( guid ); GenericNamespace namespace = new GenericNamespace( this.serviceInstrument); GenericNamespaceRules namespaceRules = this.namespaceRulesManipulator.getNamespaceRules( node.getAttributesGUID() ); GUIDImperialTrieNode guidDistributedTrieNode = this.imperialTree.getNode( node.getGuid() ); if ( namespaceRules != null ){ namespace.setRulesGUID( namespaceRules.getGuid() ); namespace.setClassificationRules( namespaceRules ); } GUID metaGuid = guidDistributedTrieNode.getNodeMetadataGUID(); namespace.setDistributedTreeNode( guidDistributedTrieNode ); namespace.setName( this.namespaceManipulator.getNamespace( guid ).getName() ); this.applyCommonMeta( namespace, this.nodeMetaManipulator.getNodeCommonMeta( metaGuid ) ); // GUID / MetaGUID difference. namespace.setGuid( guid ); namespace.setMetaGuid( metaGuid ); return namespace; } @Override public Namespace get( GUID guid, int depth ) { return this.get( guid ); } @Override public Namespace getAsRootDepth( GUID guid ) { return this.get( guid ); } @Override public void update( TreeNode nodeWideData ) { GenericNamespace ns = ( GenericNamespace ) nodeWideData; this.namespaceManipulator.update( ns ); GenericNamespaceRules classificationRules = ns.getClassificationRules(); this.namespaceRulesManipulator.update( classificationRules ); this.nodeMetaManipulator.update( ns ); } @Override public void updateName( GUID guid, String name ) { } protected void removeNode( GUID guid ){ GUIDImperialTrieNode node = this.imperialTree.getNode(guid); this.imperialTree.purge( guid ); this.imperialTree.removeCachePath( guid ); this.namespaceManipulator.remove( node.getGuid() ); this.namespaceRulesManipulator.remove( node.getNodeMetadataGUID() ); this.nodeMetaManipulator.remove( node.getAttributesGUID() ); } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/operator/ServiceElementOperator.java ================================================ package com.pinecone.hydra.service.kom.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.kom.entity.GenericServiceElement; import com.pinecone.hydra.service.kom.entity.ServiceElement; import com.pinecone.hydra.service.kom.source.ServiceMasterManipulator; import com.pinecone.hydra.service.kom.source.ServiceMetaManipulator; import com.pinecone.hydra.service.kom.source.ServiceNodeManipulator; import com.pinecone.hydra.system.ko.UOIUtils; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.framework.util.id.GuidAllocator; public class ServiceElementOperator extends ArchElementOperator implements ElementOperator { protected ServiceNodeManipulator serviceNodeManipulator; protected ServiceMetaManipulator serviceMetaManipulator; public ServiceElementOperator( ElementOperatorFactory factory ) { this( factory.getServiceMasterManipulator(),factory.getServicesTree() ); this.factory = factory; } public ServiceElementOperator( ServiceMasterManipulator masterManipulator, ServiceInstrument serviceInstrument){ super( masterManipulator, serviceInstrument); this.serviceNodeManipulator = masterManipulator.getServiceNodeManipulator(); this.serviceMetaManipulator = masterManipulator.getServiceMetaManipulator(); } @Override public GUID insert( TreeNode treeNode ) { GenericServiceElement serviceElement = (GenericServiceElement) treeNode; //将信息写入数据库 //将节点信息存入应用节点表 GuidAllocator guidAllocator = this.serviceInstrument.getGuidAllocator(); GUID serviceNodeGUID = guidAllocator.nextGUID(); serviceElement.setGuid(serviceNodeGUID); this.serviceNodeManipulator.insert( serviceElement ); //将应用节点基础信息存入信息表 GUID metaGUID = guidAllocator.nextGUID(); if ( serviceElement.getMetaGuid() == null ){ serviceElement.setMetaGuid( metaGUID ); } this.serviceMetaManipulator.insert( serviceElement ); //将应用元信息存入元信息表 this.nodeMetaManipulator.insert( serviceElement ); //将节点信息存入主表 GUIDImperialTrieNode node = new GUIDImperialTrieNode(); node.setNodeMetadataGUID( metaGUID ); node.setGuid( serviceNodeGUID ); node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) ); this.imperialTree.insert( node ); return serviceNodeGUID; } @Override public void purge( GUID guid ) { this.removeNode( guid ); } @Override public ServiceElement get( GUID guid ) { GUIDImperialTrieNode node = this.imperialTree.getNode(guid); ServiceElement serviceElement = new GenericServiceElement(); if( node.getNodeMetadataGUID() != null ){ serviceElement = this.serviceMetaManipulator.getServiceMeta( node.getNodeMetadataGUID() ); } this.applyCommonMeta( serviceElement, this.nodeMetaManipulator.getNodeCommonMeta( guid ) ); serviceElement.setDistributedTreeNode(node); serviceElement.setGuid( guid ); serviceElement.setName( this.serviceNodeManipulator.getServiceNode(guid).getName() ); return serviceElement; } @Override public ServiceElement get( GUID guid, int depth ) { return this.get( guid ); } @Override public ServiceElement getAsRootDepth( GUID guid ) { return this.get( guid ); } @Override public void update( TreeNode nodeWideData ) { GenericServiceElement serviceElement = (GenericServiceElement) nodeWideData; this.serviceNodeManipulator.update( serviceElement ); this.serviceMetaManipulator.update( serviceElement ); this.nodeMetaManipulator.update( serviceElement ); } @Override public void updateName(GUID guid, String name) { } private void removeNode( GUID guid ){ GUIDImperialTrieNode node = this.imperialTree.getNode(guid); this.imperialTree.purge( guid ); this.imperialTree.removeCachePath( guid ); this.serviceNodeManipulator.remove( node.getGuid() ); this.serviceMetaManipulator.remove( node.getAttributesGUID() ); this.nodeMetaManipulator.remove( node.getNodeMetadataGUID() ); } } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/source/ApplicationMetaManipulator.java ================================================ package com.pinecone.hydra.service.kom.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.kom.entity.ApplicationElement; public interface ApplicationMetaManipulator extends Pinenut { void insert( ApplicationElement applicationElement ); void remove( GUID guid ); ApplicationElement getApplicationElement( GUID guid, ServiceInstrument serviceInstrument); void update( ApplicationElement applicationElement ); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/source/ApplicationNodeManipulator.java ================================================ package com.pinecone.hydra.service.kom.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.entity.ApplicationElement; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import java.util.List; public interface ApplicationNodeManipulator extends GUIDNameManipulator { void insert( ApplicationElement applicationElement ); void remove( GUID guid); ApplicationElement getApplicationNode(GUID guid); void update( ApplicationElement applicationElement ); List fetchApplicationNodeByName( String name ); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/source/NamespaceRulesManipulator.java ================================================ package com.pinecone.hydra.service.kom.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.GenericNamespaceRules; public interface NamespaceRulesManipulator { void insert(GenericNamespaceRules classificationRules); void remove(GUID guid); GenericNamespaceRules getNamespaceRules(GUID guid); void update(GenericNamespaceRules classificationRules); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/source/NodeMetaManipulator.java ================================================ package com.pinecone.hydra.service.kom.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.ServiceFamilyNode; import com.pinecone.hydra.service.kom.entity.CommonMeta; import com.pinecone.hydra.service.kom.entity.Namespace; public interface NodeMetaManipulator extends Pinenut { void insert( ServiceFamilyNode node ); void insertNS( Namespace node ); void remove( GUID guid ); CommonMeta getNodeCommonMeta( GUID guid ); void update( ServiceFamilyNode node ); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/source/ServiceInstanceManipulator.java ================================================ package com.pinecone.hydra.service.kom.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.entity.ServiceInstanceEntry; public interface ServiceInstanceManipulator extends Pinenut { void initServiceInstance( ServiceInstanceEntry element ); ServiceInstanceEntry queryServiceInstance( GUID instanceId ); void updateServiceInstance( ServiceInstanceEntry element ); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/source/ServiceMasterManipulator.java ================================================ package com.pinecone.hydra.service.kom.source; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; public interface ServiceMasterManipulator extends KOIMasterManipulator { TrieTreeManipulator getTrieTreeManipulator() ; NodeMetaManipulator getNodeMetaManipulator(); ApplicationNodeManipulator getApplicationNodeManipulator(); ApplicationMetaManipulator getApplicationElementManipulator(); ServiceNodeManipulator getServiceNodeManipulator(); ServiceMetaManipulator getServiceMetaManipulator(); ServiceNamespaceManipulator getNamespaceManipulator(); ServiceInstanceManipulator getServiceInstanceManipulator(); NamespaceRulesManipulator getNamespaceRulesManipulator(); TireOwnerManipulator getTireOwnerManipulator(); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/source/ServiceMetaManipulator.java ================================================ package com.pinecone.hydra.service.kom.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.entity.ServiceElement; public interface ServiceMetaManipulator { void insert( ServiceElement serviceElement ); void remove( GUID guid ); void update( ServiceElement serviceElement ); ServiceElement getServiceMeta( GUID guid ); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/source/ServiceNamespaceManipulator.java ================================================ package com.pinecone.hydra.service.kom.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.entity.Namespace; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import java.util.List; public interface ServiceNamespaceManipulator extends GUIDNameManipulator { void insert( Namespace ns ); void remove( GUID guid ); Namespace getNamespace( GUID guid ); void update( Namespace ns ); List fetchNamespaceNodeByName( String name ); } ================================================ FILE: Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/source/ServiceNodeManipulator.java ================================================ package com.pinecone.hydra.service.kom.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.entity.ServiceElement; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import java.util.List; public interface ServiceNodeManipulator extends GUIDNameManipulator { //ServiceNode的CRUD void insert( ServiceElement serviceNode ); void remove(GUID UUID); ServiceElement getServiceNode(GUID UUID); void update(ServiceElement serviceNode); List fetchServiceNodeByName(String name); @Override List getGuidsByName(String name); @Override List getGuidsByNameID(String name, GUID guid); List fetchAllService(); } ================================================ FILE: Hydra/hydra-framework-storage/pom.xml ================================================ hydra com.pinecone.hydra 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.pinecone.hydra.kernel hydra-framework-storage 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile com.pinecone.hydra.kernel hydra-architecture 2.1.0 compile com.pinecone.ulf ulfhedinn 1.2.1 compile com.pinecone.ulf ulf-lib-oltp-rdb 1.2.1 compile com.pinecone.slime slime 2.1.0 compile com.pinecone.hydra.kernel hydra-architecture-storage 2.1.0 compile ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/ArchFileObjectMeta.java ================================================ package com.pinecone.hydra.storage; public abstract class ArchFileObjectMeta implements CheckedFile { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/ArchStorageConfig.java ================================================ package com.pinecone.hydra.storage; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.system.ko.ArchKernelObjectConfig; import com.pinecone.ulf.util.guid.GUIDs; import java.util.Map; public abstract class ArchStorageConfig extends ArchKernelObjectConfig implements StorageConfig { protected String mszLocalHostGuid = StorageConstants.LocalhostGUID.toString(); protected String mszDefaultVolumeGuid ; protected String mszDefaultTempFilePath ; protected ArchStorageConfig(){ super(); } public ArchStorageConfig( Map config ){ super( config ); this.mszLocalHostGuid = (String) config.getOrDefault("LocalHostGuid", StorageConstants.LocalhostGUID.toString()); this.mszDefaultVolumeGuid = (String) config.get("DefaultVolumeGuid"); this.mszDefaultTempFilePath = (String) config.get("DefaultTempFilePath"); } @Override public GUID getLocalHostGuid() { return GUIDs.GUID128(this.mszLocalHostGuid); } @Override public String getDefaultVolumeGuid() { return this.mszDefaultVolumeGuid; } @Override public String getDefaultTempFilePath() { return this.mszDefaultTempFilePath; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/CheckedFile.java ================================================ package com.pinecone.hydra.storage; public interface CheckedFile extends UFile { long getChecksum(); void setChecksum(long checksum); int getParityCheck(); void setParityCheck(int parityCheck); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/RandomAccessChanface.java ================================================ package com.pinecone.hydra.storage; import com.pinecone.hydra.storage.io.Chanface; import java.io.IOException; public interface RandomAccessChanface extends Chanface { void mark(int readlimit); void reset() throws IOException; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/ReadChannelRecalled.java ================================================ package com.pinecone.hydra.storage; import com.pinecone.framework.system.prototype.Pinenut; public class ReadChannelRecalled implements Pinenut { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/StorageConfig.java ================================================ package com.pinecone.hydra.storage; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.system.ko.KernelObjectConfig; public interface StorageConfig extends KernelObjectConfig { GUID getLocalHostGuid(); String getDefaultVolumeGuid(); String getDefaultTempFilePath(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/StorageConstants.java ================================================ package com.pinecone.hydra.storage; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.GUIDs; public final class StorageConstants { public static final String PathSeparator = "/"; public static final String period = "."; public static final String StorageVersionSignature = "Titan"; public static final GUID LocalhostGUID = GUIDs.GUID128( "00000000-0000-0000-0000-000000000000" ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/StorageExportIORequest.java ================================================ package com.pinecone.hydra.storage; public interface StorageExportIORequest extends StorageInstructRequest { String getSourceName(); // 具体存在磁盘的为 / I/O寻址 Source => Address void setSourceName( String sourceName ); long getCrc32(); void setCrc32( long crc32 ); Number getSize(); // 欲存储的声明大小 void setSize( Number size ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/StorageIOResponse.java ================================================ package com.pinecone.hydra.storage; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.Cluster; import com.pinecone.hydra.storage.file.entity.FileNode; import java.util.zip.CRC32; public interface StorageIOResponse extends StorageInstructResponse { GUID getObjectGuid(); void setObjectGuid( GUID objectGuid ); GUID getBottomGuid(); void setBottomGuid( GUID bottomGuid ); long getChecksum(); void setChecksum( long checksum ); long getParityCheck(); void setParityCheck( long parityCheck ); CRC32 getCre32(); void setCrc32( CRC32 crc32 ); String getSourceName(); void setSourceName( String name ); Cluster toCluster(); FileNode toFileNode(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/StorageInstructRequest.java ================================================ package com.pinecone.hydra.storage; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface StorageInstructRequest extends Pinenut { GUID getStorageObjectGuid(); // 存储单位的标识(指针) void setStorageObjectGuid( GUID storageObjectGuid ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/StorageInstructResponse.java ================================================ package com.pinecone.hydra.storage; import com.pinecone.framework.system.prototype.Pinenut; public interface StorageInstructResponse extends Pinenut { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/StorageNaming.java ================================================ package com.pinecone.hydra.storage; import com.pinecone.framework.system.prototype.Pinenut; public interface StorageNaming extends Pinenut { String naming( String objectName, String identity ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/StorageReceiveIORequest.java ================================================ package com.pinecone.hydra.storage; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.util.zip.CRC32; public interface StorageReceiveIORequest extends Pinenut { String getName(); void setName( String name ); Number getSize(); void setSize( Number size ); GUID getStorageObjectGuid(); void setStorageObjectGuid( GUID storageObjectGuid ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/TitanStorageExportIORequest.java ================================================ package com.pinecone.hydra.storage; import com.pinecone.framework.util.id.GUID; public class TitanStorageExportIORequest implements StorageExportIORequest { private String sourceName; private long crc32; private Number size; private GUID storageGuid; @Override public String getSourceName() { return this.sourceName; } @Override public void setSourceName(String sourceName) { this.sourceName = sourceName; } @Override public long getCrc32() { return this.crc32; } @Override public void setCrc32(long crc32) { this.crc32 = crc32; } @Override public Number getSize() { return this.size; } @Override public void setSize(Number size) { this.size = size; } @Override public GUID getStorageObjectGuid() { return this.storageGuid; } @Override public void setStorageObjectGuid(GUID storageObjectGuid) { this.storageGuid = storageObjectGuid; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/TitanStorageIOResponse.java ================================================ package com.pinecone.hydra.storage; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.Cluster; import com.pinecone.hydra.storage.file.entity.FileNode; import java.util.zip.CRC32; public class TitanStorageIOResponse implements StorageIOResponse { private GUID objectGuid; private long checksum; private long parityCheck; private CRC32 crc32; private String sourceName; private GUID bottomGuid; @Override public GUID getObjectGuid() { return this.objectGuid; } @Override public void setObjectGuid(GUID objectGuid) { this.objectGuid = objectGuid; } @Override public long getChecksum() { return this.checksum; } @Override public void setChecksum(long checksum) { this.checksum = checksum; } @Override public long getParityCheck() { return this.parityCheck; } @Override public void setParityCheck(long parityCheck) { this.parityCheck = parityCheck; } @Override public CRC32 getCre32() { return this.crc32; } @Override public void setCrc32(CRC32 crc32) { this.crc32 = crc32; } @Override public String getSourceName() { return this.sourceName; } @Override public void setSourceName(String sourceName) { this.sourceName = sourceName; } @Override public GUID getBottomGuid() { return this.bottomGuid; } @Override public void setBottomGuid(GUID bottomGuid) { this.bottomGuid = bottomGuid; } @Override public Cluster toCluster() { return null; } @Override public FileNode toFileNode() { return null; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/TitanStorageNaming.java ================================================ package com.pinecone.hydra.storage; public class TitanStorageNaming implements StorageNaming{ @Override public String naming( String objectName, String identity ) { return String.format( "%s_%s.storage", objectName, identity ); // TODO! CONST } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/TitanStorageReceiveIORequest.java ================================================ package com.pinecone.hydra.storage; import com.pinecone.framework.util.id.GUID; public class TitanStorageReceiveIORequest implements StorageReceiveIORequest { private String name; private Number size; private GUID storageObjectGuid; @Override public String getName() { return this.name; } @Override public void setName(String name) { this.name = name; } @Override public Number getSize() { return this.size; } @Override public void setSize(Number size) { this.size = size; } @Override public GUID getStorageObjectGuid() { return this.storageObjectGuid; } @Override public void setStorageObjectGuid(GUID storageObjectGuid) { this.storageObjectGuid = storageObjectGuid; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/BucketInstrument.java ================================================ package com.pinecone.hydra.storage.bucket; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.bucket.entity.Bucket; import com.pinecone.hydra.storage.bucket.entity.Site; import com.pinecone.hydra.storage.bucket.entity.SiteNode; import com.pinecone.hydra.storage.bucket.source.SiteManipulator; import java.util.List; public interface BucketInstrument extends Pinenut { GUID createBucket(Bucket bucket); void removeBucket( GUID bucketGuid ); void removeBucketByAccountAndBucketName( GUID accountGuid, String bucketName ); Bucket queryBucketByBucketGuid( GUID bucketGuid ); List queryBucketsByUserGuid( GUID userGuid ); SiteManipulator getSiteManipulator(); GUID createSite(Site site); void removeSite( GUID siteGuid ); void removeSite( String siteName ); Site querySite( GUID siteGuid ); List listSite(); GUID createSiteNode( SiteNode siteNode ); void removeSiteNode( GUID siteNodeGuid ); SiteNode querySiteNode( GUID siteNodeGuid ); List querySiteNodeBySiteGuid( GUID siteGuid ); void updateSiteNode( SiteNode siteNode ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/TitanBucketInstrument.java ================================================ package com.pinecone.hydra.storage.bucket; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.bucket.entity.Bucket; import com.pinecone.hydra.storage.bucket.entity.Site; import com.pinecone.hydra.storage.bucket.entity.SiteNode; import com.pinecone.hydra.storage.bucket.source.BucketManipulator; import com.pinecone.hydra.storage.bucket.source.BucketMasterManipulator; import com.pinecone.hydra.storage.bucket.source.SiteManipulator; import com.pinecone.hydra.storage.bucket.source.SiteNodeManipulator; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.ulf.util.guid.GUIDs; import java.time.LocalDateTime; import java.util.List; public class TitanBucketInstrument implements BucketInstrument { protected Hydrogen hydrogen; protected BucketMasterManipulator masterManipulator; protected BucketManipulator bucketManipulator; protected SiteManipulator siteManipulator; protected SiteNodeManipulator siteNodeManipulator; protected GuidAllocator guidAllocator; public TitanBucketInstrument(Hydrogen hydrogen, KOIMasterManipulator masterManipulator, String name ){ this.hydrogen = hydrogen; this.masterManipulator = (BucketMasterManipulator) masterManipulator; this.guidAllocator = GUIDs.newGuidAllocator(); this.bucketManipulator = this.masterManipulator.getBucketManipulator(); this.siteManipulator = this.masterManipulator.getSiteManipulator(); this.siteNodeManipulator = this.masterManipulator.getSiteNodeManipulator(); } public TitanBucketInstrument( Hydrogen hydrogen, KOIMasterManipulator masterManipulator ){ this( hydrogen, masterManipulator, KOMFileSystem.class.getSimpleName() ); } public TitanBucketInstrument(KOIMappingDriver driver ) { this( driver.getSystem(), driver.getMasterManipulator() ); } @Override public GUID createBucket(Bucket bucket) { GUID guid = this.guidAllocator.nextGUID(); bucket.setBucketGuid( guid ); this.bucketManipulator.insert( bucket ); return guid; } @Override public void removeBucket(GUID bucketGuid) { this.bucketManipulator.remove( bucketGuid ); } @Override public void removeBucketByAccountAndBucketName(GUID accountGuid, String bucketName) { } @Override public Bucket queryBucketByBucketGuid(GUID bucketGuid) { return this.bucketManipulator.queryBucketByBucketGuid( bucketGuid ); } @Override public List queryBucketsByUserGuid(GUID userGuid) { return this.bucketManipulator.queryBucketsByUserGuid( userGuid ); } @Override public SiteManipulator getSiteManipulator() { return this.siteManipulator; } @Override public GUID createSite(Site site) { GUID guid = this.guidAllocator.nextGUID(); site.setSiteGuid(guid); site.setCreateTime(LocalDateTime.now()); this.siteManipulator.insert(site); return guid; } @Override public void removeSite(GUID siteGuid) { this.siteManipulator.remove(siteGuid); } @Override public void removeSite(String siteName) { this.siteManipulator.removeByName( siteName ); } @Override public Site querySite(GUID siteGuid) { return this.siteManipulator.querySite(siteGuid); } @Override public List listSite() { return this.siteManipulator.listSite(); } @Override public GUID createSiteNode(SiteNode siteNode) { if ( siteNode.getNodeGuid() == null ){ siteNode.setNodeGuid( this.guidAllocator.nextGUID() ); } this.siteNodeManipulator.insert( siteNode ); return siteNode.getNodeGuid(); } @Override public void removeSiteNode(GUID siteNodeGuid) { this.siteNodeManipulator.remove( siteNodeGuid ); } @Override public SiteNode querySiteNode(GUID siteNodeGuid) { return this.siteNodeManipulator.querySiteNode( siteNodeGuid ); } @Override public List querySiteNodeBySiteGuid( GUID siteGuid ) { return this.siteNodeManipulator.querySiteNodeBySiteGuid( siteGuid ); } @Override public void updateSiteNode(SiteNode siteNode) { this.siteNodeManipulator.update( siteNode ); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/entity/Bucket.java ================================================ package com.pinecone.hydra.storage.bucket.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public interface Bucket extends Pinenut { int getEnumId(); String getBucketName(); void setBucketName( String bucketName ); GUID getBucketGuid(); void setBucketGuid( GUID bucketGuid ); GUID getUserGuid(); void setUserGuid( GUID userGuid ); GUID getMountPoint(); void setMountPoint( GUID mountPoint ); LocalDateTime getCreateTime(); void setCreateTime( LocalDateTime createTime ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/entity/GenericBucket.java ================================================ package com.pinecone.hydra.storage.bucket.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.ulf.util.guid.GUIDs; import java.time.LocalDateTime; public class GenericBucket implements Bucket{ protected int enumId; protected String bucketName; protected LocalDateTime createTime; protected GUID bucketGuid; protected GUID userGuid; protected GUID mountPoint; protected GuidAllocator guidAllocator; public GenericBucket(){ this.guidAllocator = GUIDs.newGuidAllocator(); this.bucketGuid = this.guidAllocator.nextGUID(); } @Override public int getEnumId() { return this.enumId; } @Override public String getBucketName() { return this.bucketName; } @Override public void setBucketName(String bucketName) { this.bucketName = bucketName; } @Override public GUID getBucketGuid() { return this.bucketGuid; } @Override public void setBucketGuid(GUID bucketGuid) { this.bucketGuid = bucketGuid; } @Override public GUID getUserGuid() { return this.userGuid; } @Override public void setUserGuid(GUID userGuid) { this.userGuid = userGuid; } @Override public GUID getMountPoint() { return this.mountPoint; } @Override public void setMountPoint(GUID mountPoint) { this.mountPoint = mountPoint; } @Override public LocalDateTime getCreateTime() { return this.createTime; } @Override public void setCreateTime(LocalDateTime createTime) { this.createTime = createTime; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/entity/GenericSite.java ================================================ package com.pinecone.hydra.storage.bucket.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import java.time.LocalDateTime; public class GenericSite implements Site{ private long enumId; private String siteName; private LocalDateTime createTime; private GUID siteGuid; private GUID mountPointGuid; public GenericSite(){} public GenericSite(long enumId, String siteName, LocalDateTime createTime, GUID siteGuid, GUID mountPointGuid) { this.enumId = enumId; this.siteName = siteName; this.createTime = createTime; this.siteGuid = siteGuid; this.mountPointGuid = mountPointGuid; } @Override public long getEnumId() { return this.enumId; } @Override public void setEnumId(long enumId) { this.enumId = enumId; } @Override public String getSiteName() { return this.siteName; } @Override public void setSiteName(String siteName) { this.siteName = siteName; } @Override public LocalDateTime getCreateTime() { return this.createTime; } @Override public void setCreateTime(LocalDateTime createTime) { this.createTime = createTime; } @Override public GUID getSiteGuid() { return this.siteGuid; } @Override public void setSiteGuid(GUID siteGuid) { this.siteGuid = siteGuid; } @Override public GUID getMountPointGuid() { return this.mountPointGuid; } @Override public void setMountPointGuid(GUID mountPointGuid) { this.mountPointGuid = mountPointGuid; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/entity/GenericSiteNode.java ================================================ package com.pinecone.hydra.storage.bucket.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; public class GenericSiteNode implements SiteNode{ protected long enumId; protected String nodeName; protected GUID nodeGuid; protected GUID siteGuid; protected int state; protected int isEnabled; protected GUID relatedService; @Override public long getEnumId() { return this.enumId; } @Override public void setEnumId(long enumId) { this.enumId = enumId; } @Override public String getNodeName() { return this.nodeName; } @Override public void setNodeName(String nodeName) { this.nodeName = nodeName; } @Override public GUID getNodeGuid() { return this.nodeGuid; } @Override public void setNodeGuid(GUID nodeGuid) { this.nodeGuid = nodeGuid; } @Override public GUID getSiteGuid() { return this.siteGuid; } @Override public void setSiteGuid(GUID siteGuid) { this.siteGuid = siteGuid; } @Override public int getState() { return this.state; } @Override public void setState(int state) { this.state = state; } @Override public int getIsEnabled() { return this.isEnabled; } @Override public void setIsEnabled(int isEnabled) { this.isEnabled = isEnabled; } @Override public GUID getRelatedService() { return this.relatedService; } @Override public void setRelatedService(GUID relatedService) { this.relatedService = relatedService; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/entity/Site.java ================================================ package com.pinecone.hydra.storage.bucket.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public interface Site extends Pinenut { long getEnumId(); void setEnumId( long enumId); String getSiteName(); void setSiteName( String siteName ); LocalDateTime getCreateTime(); void setCreateTime( LocalDateTime createTime ); GUID getSiteGuid(); void setSiteGuid( GUID siteGuid ); GUID getMountPointGuid(); void setMountPointGuid( GUID mountPointGuid ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/entity/SiteNode.java ================================================ package com.pinecone.hydra.storage.bucket.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface SiteNode extends Pinenut { long getEnumId(); void setEnumId( long enumId ); String getNodeName(); void setNodeName( String nodeName ); GUID getNodeGuid(); void setNodeGuid( GUID nodeGuid ); GUID getSiteGuid(); void setSiteGuid( GUID siteGuid ); int getState(); void setState( int state ); int getIsEnabled(); void setIsEnabled( int isEnabled ); GUID getRelatedService(); void setRelatedService( GUID relatedService ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/source/BucketManipulator.java ================================================ package com.pinecone.hydra.storage.bucket.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.bucket.entity.Bucket; import java.util.List; public interface BucketManipulator extends Pinenut { void insert( Bucket bucket ); void remove( GUID bucketGuid ); void removeByAccountAndBucketName( GUID accountGuid, String bucketName ); Bucket queryBucketByBucketGuid( GUID bucketGuid ); List queryBucketsByUserGuid( GUID userGuid ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/source/BucketMasterManipulator.java ================================================ package com.pinecone.hydra.storage.bucket.source; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; public interface BucketMasterManipulator extends KOIMasterManipulator { BucketManipulator getBucketManipulator(); SiteManipulator getSiteManipulator(); SiteNodeManipulator getSiteNodeManipulator(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/source/SiteManipulator.java ================================================ package com.pinecone.hydra.storage.bucket.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.bucket.entity.Site; import java.util.List; public interface SiteManipulator extends Pinenut { void insert(Site site ); void remove( GUID siteGuid ); void removeByName( String siteName ); Site querySite( GUID siteGuid ); Site querySiteByName( String siteName ); List listSite(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/source/SiteNodeManipulator.java ================================================ package com.pinecone.hydra.storage.bucket.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.bucket.entity.Site; import com.pinecone.hydra.storage.bucket.entity.SiteNode; import java.util.List; public interface SiteNodeManipulator extends Pinenut { void insert(SiteNode siteNode ); void remove( GUID siteNodeGuid ); SiteNode querySiteNode( GUID siteNodeGuid ); List querySiteNodeBySiteGuid( GUID siteGuid ); void update( SiteNode siteNode ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/ClusterSegmentNaming.java ================================================ package com.pinecone.hydra.storage.file; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface ClusterSegmentNaming extends Pinenut { String naming( String fileName, long segId, String crc3 ); String naming (String fileName, GUID frameGuid, int threadId ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/FileConstants.java ================================================ package com.pinecone.hydra.storage.file; public final class FileConstants { public static final Number DefaultClusterSize = 10 * 1024 * 1024L; // 10 MB public static final String StorageVersionSignature = "Generic"; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/FileSystemConfig.java ================================================ package com.pinecone.hydra.storage.file; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.StorageConfig; import com.pinecone.hydra.system.ko.KernelObjectConfig; public interface FileSystemConfig extends StorageConfig { String getVersionSignature(); Number getClusterSize(); GUID getLocalhostGUID(); Number getmTinyFileStripSizing(); long getPathQueryExpiryTimeHotMil(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/KOFSClusterSegmentNaming.java ================================================ package com.pinecone.hydra.storage.file; import com.pinecone.framework.util.id.GUID; public class KOFSClusterSegmentNaming implements ClusterSegmentNaming { @Override public String naming( String fileName,long segId,String crc3 ){ return String.format( "%s_seg%d_%s.frame", fileName, segId, crc3 ); } @Override public String naming(String fileName, GUID frameGuid, int threadId) { return String.format( "%s_%s-%d.strip", fileName, frameGuid.toString(), threadId ); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/KOMFileSystem.java ================================================ package com.pinecone.hydra.storage.file; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.StorageConfig; import com.pinecone.hydra.storage.file.entity.Cluster; import com.pinecone.hydra.storage.file.entity.ClusterPage; import com.pinecone.hydra.storage.file.entity.FSNodeAllotment; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.file.entity.FileTreeNode; import com.pinecone.hydra.storage.file.entity.Folder; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.RemoteCluster; import com.pinecone.hydra.storage.file.source.FileMasterManipulator; import com.pinecone.hydra.storage.file.transmit.exporter.FileExportEntity; import com.pinecone.hydra.storage.file.transmit.receiver.FileReceiveEntity; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.system.ko.kom.ReparseKOMTree; import com.pinecone.hydra.unit.imperium.entity.EntityNode; import com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import java.io.IOException; import java.util.List; import java.util.TreeMap; public interface KOMFileSystem extends ReparseKOMTree { FileSystemConfig KernelFileSystemConfig = new KernelFileSystemConfig(); @Override String getPath( GUID guid ); @Override String getFullName( GUID guid ); @Override GUID put( TreeNode treeNode ); @Override FileTreeNode get(GUID guid ); @Override FileTreeNode get( GUID guid, int depth ); void update( FileTreeNode node); @Override FileTreeNode getAsRootDepth( GUID guid ); FileNode getFileNode(GUID guid ); Folder getFolder(GUID guid ); @Override GUID queryGUIDByPath( String path ); @Override GUID queryGUIDByFN ( String fullName ); @Override FileSystemConfig getConfig(); //todo update方法 @Override void remove( GUID guid ); @Override void removeReparseLink( GUID guid ); @Override List getChildren( GUID guid ); @Override void rename( GUID guid, String name ); default void rename( String path, String name ) { this.rename( this.assertPath( path ), name ); } @Override default GUID assertPath( String path, String pathType ) throws IllegalArgumentException { GUID guid = this.queryGUIDByPath( path ); if( guid == null ) { throw new IllegalArgumentException( "Undefined " + pathType + " '" + path + "'" ); } return guid; } @Override default GUID assertPath( String path ) throws IllegalArgumentException { return this.assertPath( path, "path" ); } List getAllTreeNode(); /** 断言,确保节点唯一拥有关系*/ @Override void affirmOwnedNode( GUID parentGuid, GUID childGuid ); FileNode affirmFileNode( String path ); Folder affirmFolder( String path); @Override void newHardLink ( GUID sourceGuid, GUID targetGuid ); /** set affinityParentGuid for child.*/ void setDataAffinityGuid ( GUID childGuid, GUID affinityParentGuid ); default void setDataAffinity ( String childPath, String parentPath ) { GUID childGuid = this.assertPath( childPath ); GUID parentGuid = this.assertPath( parentPath ); if( childGuid == parentGuid ) { throw new IllegalArgumentException( "Cyclic path detected '" + childPath + "'" ); } this.setDataAffinityGuid( childGuid, parentGuid ); } Object querySelector ( String szSelector ); ElementNode queryElement(String path); @Override void remove(String path); @Override EntityNode queryNode(String path); @Override ReparseLinkNode queryReparseLink(String path); List selectByName(String name); void moveTo(String sourcePath, String destinationPath); void move(String sourcePath, String destinationPath); void copy(String sourcePath, String destinationPath, VolumeManager volumeManager) throws IOException; void directCopy( String sourcePath, String destinationPath ) throws IOException; @Override List fetchRoot(); Object querySelectorJ(String szSelector); List querySelectorAll(String szSelector); FSNodeAllotment getFSNodeAllotment(); TreeMap getClustersByFileGuid( GUID guid ); List fetchClustersPageByFileGuid( GUID fileGuid, long offset, int pageSize ); ClusterPage fetchClustersByFileGuid( GUID fileGuid, int pageSize ); ClusterPage fetchClustersByFileGuid( GUID fileGuid ); Cluster getLastCluster(GUID guid ); void setFolderVolumeMapping(GUID folderGuid, GUID volumeGuid ); GUID getMappingVolume(GUID folderGuid ); GUID getMappingVolume(String path ); Cluster getClusterByFileWithId(GUID fileGuid, long segId ); void receive( FileReceiveEntity entity ) throws IOException; void receive( FileReceiveEntity entity, Number offset, Number endSize )throws IOException; void randomReceive( FileReceiveEntity entity, Number offset, Number endSize ) throws IOException; void export( FileExportEntity entity ) throws IOException; void export( FileExportEntity entity, Number offset, Number endSize ); FileMasterManipulator getFileMasterManipulator(); void updateCluster( FileNode fileNode, long segId ); void deleteCluster( FileNode fileNode, long segId ); long countFileCluster( GUID fileGuid ); void renameFile( String filePath, String newFileName ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/KernelFileSystemConfig.java ================================================ package com.pinecone.hydra.storage.file; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.ArchStorageConfig; import com.pinecone.hydra.storage.StorageConstants; import com.pinecone.hydra.storage.file.cache.DefaultCacheConstants; import com.pinecone.hydra.storage.volume.VolumeConstants; import com.pinecone.ulf.util.guid.GUIDs; import java.util.Map; public class KernelFileSystemConfig extends ArchStorageConfig implements FileSystemConfig { protected String mszVersionSignature = FileConstants.StorageVersionSignature; protected Number mnClusterSize = FileConstants.DefaultClusterSize; protected GUID mLocalhostGUID = StorageConstants.LocalhostGUID; protected Number mTinyFileStripSizing = VolumeConstants.TinyFileStripSizing; protected long mPathQueryExpiryTimeHotMil = DefaultCacheConstants.PathQueryExpiryTimeHotMil; public KernelFileSystemConfig() { super(); } public KernelFileSystemConfig( Map config ) { super( config ); this.mszVersionSignature = (String) config.getOrDefault("VersionSignature", FileConstants.StorageVersionSignature); this.mnClusterSize = (Number) config.getOrDefault("ClusterSize", FileConstants.DefaultClusterSize); this.mLocalhostGUID = GUIDs.GUID128( String.valueOf(config.getOrDefault("LocalhostGUID", StorageConstants.LocalhostGUID)) ); this.mTinyFileStripSizing = (Number) config.getOrDefault("TinyFileStripSizing", VolumeConstants.TinyFileStripSizing); this.mPathQueryExpiryTimeHotMil = ((Number) config.getOrDefault("PathQueryExpiryTimeHotMil", DefaultCacheConstants.PathQueryExpiryTimeHotMil)).longValue(); } @Override public String getVersionSignature() { return this.mszVersionSignature; } public Number getClusterSize() { return this.mnClusterSize; } public GUID getLocalhostGUID() { return this.mLocalhostGUID; } @Override public Number getmTinyFileStripSizing() { return this.mTinyFileStripSizing; } @Override public long getPathQueryExpiryTimeHotMil() { return this.mPathQueryExpiryTimeHotMil ; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/UniformObjectFileSystem.java ================================================ package com.pinecone.hydra.storage.file; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.storage.StorageConstants; import com.pinecone.hydra.storage.file.cache.DefaultCacheConstants; import com.pinecone.hydra.storage.file.external.ExternalFileSystemInstrument; import com.pinecone.hydra.storage.file.external.KenExternalFileSystemInstrument; import com.pinecone.hydra.storage.file.entity.Cluster; import com.pinecone.hydra.storage.file.entity.ClusterPage; import com.pinecone.hydra.storage.file.entity.ClusterPage64; import com.pinecone.hydra.storage.file.entity.FSNodeAllotment; import com.pinecone.hydra.storage.file.entity.GenericFSNodeAllotment; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.file.entity.FileTreeNode; import com.pinecone.hydra.storage.file.entity.Folder; import com.pinecone.hydra.storage.file.entity.GenericFileNode; import com.pinecone.hydra.storage.file.entity.GenericFolder; import com.pinecone.hydra.storage.file.entity.LocalCluster; import com.pinecone.hydra.storage.file.entity.RemoteCluster; import com.pinecone.hydra.storage.file.operator.FileSystemOperator; import com.pinecone.hydra.storage.file.operator.FileSystemOperatorFactory; import com.pinecone.hydra.storage.file.operator.GenericFileSystemOperatorFactory; import com.pinecone.hydra.storage.file.source.FileSystemAttributeManipulator; import com.pinecone.hydra.storage.file.source.FileManipulator; import com.pinecone.hydra.storage.file.source.FileMasterManipulator; import com.pinecone.hydra.storage.file.source.FileMetaManipulator; import com.pinecone.hydra.storage.file.source.FolderManipulator; import com.pinecone.hydra.storage.file.source.FolderMetaManipulator; import com.pinecone.hydra.storage.file.source.FolderVolumeMappingManipulator; import com.pinecone.hydra.storage.file.source.LocalClusterManipulator; import com.pinecone.hydra.storage.file.source.RemoteClusterManipulator; import com.pinecone.hydra.storage.file.source.SymbolicManipulator; import com.pinecone.hydra.storage.file.source.SymbolicMetaManipulator; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.transmit.exporter.FileExportEntity; import com.pinecone.hydra.storage.file.transmit.exporter.TitanFileExportEntity64; import com.pinecone.hydra.storage.file.transmit.receiver.FileReceiveEntity; import com.pinecone.hydra.storage.file.transmit.receiver.TitanFileReceiveEntity64; import com.pinecone.hydra.storage.io.TitanFileChannelChanface; import com.pinecone.hydra.storage.io.TitanOutputStreamChanface; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.system.identifier.KOPathResolver; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.hydra.system.ko.kom.ArchReparseKOMTree; import com.pinecone.hydra.system.ko.kom.GenericReparseKOMTreeAddition; import com.pinecone.hydra.system.ko.kom.StandardPathSelector; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; import com.pinecone.slime.map.indexable.IndexableMapQuerier; import com.pinecone.ulf.util.guid.GUIDs; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.nio.channels.FileChannel; import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.TreeMap; /** * Pinecone Ursus For Java UniformObjectFileSystem * Author: Ken, Harald.E (Dragon King) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Uniform Object File System (Ken`s OFS / KOFS) * Uniform Distribute Object Storage File System * Supported TB-PB-ZB Level Big Data Storage * * ***************************************************************************************** */ public class UniformObjectFileSystem extends ArchReparseKOMTree implements KOMFileSystem { protected FSNodeAllotment fsNodeAllotment; protected FileSystemAttributeManipulator fileSystemAttributeManipulator; protected FileManipulator fileManipulator; protected FileMasterManipulator fileMasterManipulator; protected FileMetaManipulator fileMetaManipulator; protected FolderManipulator folderManipulator; protected FolderMetaManipulator folderMetaManipulator; protected LocalClusterManipulator localClusterManipulator; protected RemoteClusterManipulator remoteClusterManipulator; protected SymbolicManipulator symbolicManipulator; protected SymbolicMetaManipulator symbolicMetaManipulator; protected FolderVolumeMappingManipulator folderVolumeMappingManipulator; protected IndexableMapQuerier globalPathGuidCacheQuerier; protected ExternalFileSystemInstrument directFileSystemAccessor; public UniformObjectFileSystem( Processum superiorProcess, KOIMasterManipulator masterManipulator, KOMFileSystem parent, String name, IndexableMapQuerier globalPathGuidCacheQuerier, FileSystemConfig fileSystemConfig, @Nullable GuidAllocator guidAllocator ){ // Phase [1] Construct system. super( superiorProcess, masterManipulator, fileSystemConfig, parent, name, guidAllocator ); // Phase [2] Construct fundamentals. this.fileMasterManipulator = (FileMasterManipulator) masterManipulator; this.pathResolver = new KOPathResolver( this.kernelObjectConfig ); // Phase [3] Construct manipulators. this.operatorFactory = new GenericFileSystemOperatorFactory( this, (FileMasterManipulator) masterManipulator ); this.fileSystemAttributeManipulator = this.fileMasterManipulator.getAttributeManipulator(); this.fileManipulator = this.fileMasterManipulator.getFileManipulator(); this.fileMetaManipulator = this.fileMasterManipulator.getFileMetaManipulator(); this.folderManipulator = this.fileMasterManipulator.getFolderManipulator(); this.folderMetaManipulator = this.fileMasterManipulator.getFolderMetaManipulator(); this.localClusterManipulator = this.fileMasterManipulator.getLocalClusterManipulator(); this.remoteClusterManipulator = this.fileMasterManipulator.getRemoteClusterManipulator(); this.symbolicManipulator = this.fileMasterManipulator.getSymbolicManipulator(); this.symbolicMetaManipulator = this.fileMasterManipulator.getSymbolicMetaManipulator(); this.folderVolumeMappingManipulator = this.fileMasterManipulator.getFolderVolumeRelationManipulator(); // Phase [4] Construct selectors. this.pathSelector = new StandardPathSelector( this.pathResolver, this.imperialTree, this.folderManipulator, new GUIDNameManipulator[] { this.fileManipulator } ); // Warning: ReparseKOMTreeAddition must be constructed only after `pathSelector` has been constructed. this.mReparseKOM = new GenericReparseKOMTreeAddition( this ); // Phase [5] Construct misc. // this.propertyTypeConverter = new DefaultPropertyConverter(); // this.textValueTypeConverter = new DefaultTextValueConverter(); this.fsNodeAllotment = new GenericFSNodeAllotment(this.fileMasterManipulator,this); this.globalPathGuidCacheQuerier = globalPathGuidCacheQuerier; this.directFileSystemAccessor = new KenExternalFileSystemInstrument(this); } // public GenericKOMFileSystem( Hydrogen hydrogen ) { // this.hydrogen = hydrogen; // } public UniformObjectFileSystem( Processum superiorProcess, KOIMasterManipulator masterManipulator, KOMFileSystem parent, String name,FileSystemConfig fileSystemConfig ) { this( superiorProcess, masterManipulator, parent, name, null,fileSystemConfig, null ); } public UniformObjectFileSystem( Processum superiorProcess, KOIMasterManipulator masterManipulator, FileSystemConfig fileSystemConfig ){ this( superiorProcess, masterManipulator, null, KOMFileSystem.class.getSimpleName(),fileSystemConfig ); } public UniformObjectFileSystem( Processum superiorProcess, KOIMasterManipulator masterManipulator, IndexableMapQuerier globalPathGuidCacheQuerier, FileSystemConfig fileSystemConfig ){ this( superiorProcess, masterManipulator, null, KOMFileSystem.class.getSimpleName(), globalPathGuidCacheQuerier,fileSystemConfig, null ); } public UniformObjectFileSystem( KOIMappingDriver driver, KOMFileSystem parent, String name, FileSystemConfig fileSystemConfig ) { this( driver.getSuperiorProcess(), driver.getMasterManipulator(), parent, name, fileSystemConfig ); } public UniformObjectFileSystem( KOIMappingDriver driver,FileSystemConfig fileSystemConfig ) { this( driver.getSuperiorProcess(), driver.getMasterManipulator(), fileSystemConfig ); } public UniformObjectFileSystem( KOIMappingDriver driver, IndexableMapQuerier globalPathGuidCacheQuerier, FileSystemConfig fileSystemConfig ) { this( driver.getSuperiorProcess(), driver.getMasterManipulator(), globalPathGuidCacheQuerier, fileSystemConfig ); } protected void apply( IndexableMapQuerier globalPathGuidCacheQuerier ) { this.globalPathGuidCacheQuerier = globalPathGuidCacheQuerier; } @Override public FileTreeNode get(GUID guid, int depth ) { return (FileTreeNode) super.get( guid, depth ); } @Override public FileMasterManipulator getFileMasterManipulator() { return this.fileMasterManipulator; } @Override public FileTreeNode get( GUID guid ) { return (FileTreeNode) super.get( guid ); } @Override public void update(FileTreeNode node) { TreeNodeOperator operator = this.operatorFactory.getOperator(node.getMetaType()); operator.update( node ); } @Override public FileTreeNode getAsRootDepth( GUID guid ) { return (FileTreeNode) super.getAsRootDepth( guid ); } @Override @SuppressWarnings( "unchecked" ) public List fetchRoot() { return (List) super.fetchRoot(); } @Override public FileSystemConfig getConfig() { return (FileSystemConfig) super.getConfig(); } public FileSystemOperatorFactory getOperatorFactory() { return (FileSystemOperatorFactory) this.operatorFactory; } @Override public FileNode getFileNode(GUID guid) { return ( FileNode ) this.get( guid ); } @Override public Folder getFolder(GUID guid) { return ( Folder ) this.get( guid ); } @Override public void remove(String path) { String key = DefaultCacheConstants.FilePathCacheNS + path; this.globalPathGuidCacheQuerier.erase(key); super.remove(path); } @Override public void remove( GUID guid ){ super.remove( guid ); this.remoteClusterManipulator.remove( guid ); this.localClusterManipulator.remove( guid ); } @Override public List getAllTreeNode() { List nameSpaceNodes = this.fileManipulator.dumpGuid(); List confNodes = this.folderManipulator.dumpGuid(); ArrayList treeNodes = new ArrayList<>(); for (GUID guid : nameSpaceNodes){ TreeNode treeNode = this.get(guid); treeNodes.add(treeNode); } for ( GUID guid : confNodes ){ TreeNode treeNode = this.get(guid); treeNodes.add(treeNode); } return treeNodes; } protected FileTreeNode affirmTreeNodeByPath( String path, Class cnSup, Class nsSup ) { String[] parts = this.pathResolver.segmentPathParts( path ); String currentPath = ""; GUID parentGuid = GUIDs.Dummy128(); FileTreeNode node = this.queryElement( path ); if( node != null ) { return node; } FileTreeNode ret = null; for( int i = 0; i < parts.length; ++i ){ currentPath = currentPath + ( i > 0 ? this.getConfig().getPathNameSeparator() : "" ) + parts[ i ]; node = this.queryElement( currentPath ); if ( node == null){ if ( i == parts.length - 1 && cnSup != null ){ FileNode fileNode = (FileNode) this.dynamicFactory.optNewInstance( cnSup, new Object[]{ this } ); fileNode.setName( parts[i] ); GUID guid = this.put( fileNode ); this.affirmOwnedNode( parentGuid, guid ); return fileNode; } else { Folder folder = (Folder) this.dynamicFactory.optNewInstance( nsSup, new Object[]{ this } ); folder.setName(parts[i]); GUID guid = this.put(folder); if ( i != 0 ){ this.affirmOwnedNode( parentGuid, guid ); parentGuid = guid; } else { parentGuid = guid; } ret = folder; } } else { parentGuid = node.getGuid(); } } return ret; } @Override public FileNode affirmFileNode(String path) { FileNode fileNode = (FileNode) this.affirmTreeNodeByPath(path, GenericFileNode.class, GenericFolder.class); this.initVolume( path ); return fileNode; } @Override public Folder affirmFolder(String path) { Folder folder = (Folder) this.affirmTreeNodeByPath(path, null, GenericFolder.class); this.initVolume( path ); return folder; } @Override public void setDataAffinityGuid( GUID childGuid, GUID affinityParentGuid ) { } @Override public GUID queryGUIDByPath( String path ) { FileSystemConfig config = this.getConfig(); if ( this.globalPathGuidCacheQuerier != null ) { String key = DefaultCacheConstants.FilePathCacheNS + path; String szGUID = this.globalPathGuidCacheQuerier.get( key ); if ( StringUtils.isNoneEmpty( szGUID ) ) { return GUIDs.GUID128( szGUID ); } } GUID guid = super.queryGUIDByPath( path ); // Into OLTP-RDB if ( this.globalPathGuidCacheQuerier != null ) { String key = DefaultCacheConstants.FilePathCacheNS + path; this.globalPathGuidCacheQuerier.insert( key, guid.toString(), config.getPathQueryExpiryTimeHotMil() ); } return guid; } @Override public ElementNode queryElement(String path) { GUID guid = this.queryGUIDByPath( path ); if( guid != null ) { return (ElementNode) this.get( guid ); } return this.directFileSystemAccessor.queryElement(path); } @Override public List selectByName(String name) { return null; } @Override public void moveTo(String sourcePath, String destinationPath) { GUID[] pair = this.assertCopyMove( sourcePath, destinationPath ); GUID sourceGuid = pair[ 0 ]; GUID destinationGuid = pair[ 1 ]; this.imperialTree.moveTo( sourceGuid, destinationGuid ); this.imperialTree.removeCachePath( sourceGuid ); } @Override public void move(String sourcePath, String destinationPath) { GUID sourceGuid = this.assertPath( sourcePath, "source" ); List sourParts = this.pathResolver.resolvePathParts( sourcePath ); List destParts = this.pathResolver.resolvePathParts( destinationPath ); String szLastDestTarget = destParts.get( destParts.size() - 1 ); sourcePath = sourcePath.trim(); destinationPath = destinationPath.trim(); // Case1: Move "game/terraria/npc" => "game/minecraft/npc", which has the same dest name. // Case1-1: Move "game/terraria/npc/" => "game/minecraft/npc/" // Case1-2: Move "game/terraria/npc/." => "game/minecraft/npc/." if( sourParts.get( sourParts.size() - 1 ).equals( szLastDestTarget ) || szLastDestTarget.equals( "." ) || ( sourcePath.endsWith( this.getConfig().getPathNameSeparator() ) && destinationPath.endsWith( this.getConfig().getPathNameSeparator() ) ) ) { destParts.remove( destParts.size() - 1 ); String szParentPath = this.pathResolver.assemblePath( destParts ); destParts.add( szLastDestTarget ); // Move to, which has the same name or explicit current dir `.`. this.moveTo( sourcePath, szParentPath ); } // Case 2: "game/terraria/npc" => "game/minecraft/character/" || "game/minecraft/character/." // game/terraria/npc => game/minecraft/character/npc else if ( !sourcePath.endsWith( this.getConfig().getPathNameSeparator() ) && ( destinationPath.endsWith( this.getConfig().getPathNameSeparator() ) || destinationPath.endsWith( "." ) ) ) { Folder target = this.affirmFolder( destinationPath ); this.imperialTree.moveTo( sourceGuid, target.getGuid() ); } // Case3: Move "game/terraria/npc" => "game/minecraft/character", move all children therein. // game/terraria/npc/f1 => game/minecraft/character/f1 // game/terraria/npc/f2 => game/minecraft/character/f2 // etc. else { // Case3-1: Is config or other none namespace node. // Move "game/terraria/file" => "game/minecraft/dir". // Case3-2: "game/terraria/npc/" => "game/minecraft/character" // Eq.Case2: Move "game/terraria/npc" => "game/minecraft/character", if( !this.folderManipulator.isFolder( sourceGuid ) ) { Folder target = this.affirmFolder( destinationPath ); this.imperialTree.moveTo( sourceGuid, target.getGuid() ); } else { List children = this.getChildren( sourceGuid ); if( !children.isEmpty() ) { Folder target = this.affirmFolder( destinationPath ); for( TreeNode node : children ) { this.imperialTree.moveTo( node.getGuid(), target.getGuid() ); } } } this.imperialTree.removeTreeNodeOnly( sourceGuid ); } this.imperialTree.removeCachePath( sourceGuid ); } @Override public void copy(String sourcePath, String destinationPath, VolumeManager volumeManager) throws IOException { ElementNode elementNode = this.queryElement(destinationPath); this.copy(sourcePath,elementNode,volumeManager); } @Override public void directCopy(String sourcePath, String destinationPath) throws IOException { this.directFileSystemAccessor.copy( sourcePath,destinationPath ); } private void copy(String sourcePath, FileTreeNode fileTreeNode, VolumeManager volumeManager ) throws IOException { if( fileTreeNode instanceof Folder ){ List children = this.getChildren(fileTreeNode.getGuid()); for(TreeNode child : children){ FileTreeNode childFileTreeNode = this.get(child.getGuid()); this.copy(sourcePath + StorageConstants.PathSeparator + fileTreeNode.getName(), childFileTreeNode,volumeManager); } } else { String name = fileTreeNode.getName(); String[] split = name.split(StorageConstants.period); // File tempFile = File.createTempFile(split[0], StorageConstants.PathSeparator + split[1]); File tempFile = new File(this.getConfig().getDefaultTempFilePath()+name); if(!tempFile.createNewFile()){ throw new IOException( "Creating file compromised, what :" + tempFile.toPath() ); } FileOutputStream fileOutputStream = new FileOutputStream(tempFile); TitanOutputStreamChanface outputStreamChanface = new TitanOutputStreamChanface(fileOutputStream); TitanFileExportEntity64 exportEntity64 = new TitanFileExportEntity64(this, volumeManager, (FileNode) fileTreeNode, outputStreamChanface); exportEntity64.export(); FileNode fileNode = this.fsNodeAllotment.newFileNode(); FileChannel channel = FileChannel.open(tempFile.toPath(), StandardOpenOption.READ); TitanFileChannelChanface titanFileChannelKChannel = new TitanFileChannelChanface( channel ); fileNode.setDefinitionSize( tempFile.length() ); fileNode.setName( tempFile.getName() ); String destDirPath = sourcePath + StorageConstants.PathSeparator + name; TitanFileReceiveEntity64 receiveEntity64 = new TitanFileReceiveEntity64(this, destDirPath, fileNode, titanFileChannelKChannel, volumeManager); this.receive( receiveEntity64 ); tempFile.delete(); fileOutputStream.close(); channel.close(); } } @Override public Object querySelectorJ( String szSelector ) { return null; } @Override public TreeMap getClustersByFileGuid( GUID guid ) { TreeMap< Long, Cluster> frameMap = new TreeMap<>(); List remoteClusters = this.remoteClusterManipulator.fetchRemoteClusterByFileGuid( guid ); for( RemoteCluster remoteCluster : remoteClusters ){ if( remoteCluster.getDeviceGuid().equals( this.getConfig().getLocalHostGuid() )){ LocalCluster localCluster = this.localClusterManipulator.getLocalClusterByGuid( remoteCluster.getSegGuid() ); frameMap.put( localCluster.getSegId(), localCluster ); } else { //todo 远程获取逻辑 } } return frameMap; } @Override public List fetchClustersPageByFileGuid( GUID fileGuid, long offset, int pageSize ) { return this.remoteClusterManipulator.fetchRemoteClusterByFileGuid( fileGuid, offset, pageSize ); } @Override public ClusterPage fetchClustersByFileGuid( GUID fileGuid, int pageSize ) { return new ClusterPage64(this, this.remoteClusterManipulator, this.localClusterManipulator, fileGuid, pageSize ); } @Override public ClusterPage fetchClustersByFileGuid( GUID fileGuid ) { return new ClusterPage64( this,this.remoteClusterManipulator, this.localClusterManipulator, fileGuid ); } @Override public FSNodeAllotment getFSNodeAllotment() { return this.fsNodeAllotment; } @Override public Object querySelector(String szSelector) { return null; } @Override public List querySelectorAll(String szSelector) { return null; } @Override public Cluster getLastCluster(GUID guid) { RemoteCluster remoteCluster = this.remoteClusterManipulator.getLastCluster(guid); if ( remoteCluster.getDeviceGuid().equals( this.getConfig().getLocalHostGuid() )){ return this.localClusterManipulator.getLocalClusterByGuid(remoteCluster.getSegGuid()); } else { //todo 远端获取方法 } return null; } private String getNodeName(ImperialTreeNode node ){ UOI type = node.getType(); TreeNode newInstance = (TreeNode)type.newInstance(); TreeNodeOperator operator = this.getOperatorFactory().getOperator(newInstance.getMetaType()); TreeNode treeNode = operator.get(node.getGuid()); return treeNode.getName(); } private boolean allNonNull( List list ) { return list.stream().noneMatch( Objects::isNull ); } protected GUID[] assertCopyMove ( String sourcePath, String destinationPath ) throws IllegalArgumentException { GUID sourceGuid = this.queryGUIDByPath( sourcePath ); if( sourceGuid == null ) { throw new IllegalArgumentException( "Undefined source '" + sourcePath + "'" ); } GUID destinationGuid = this.queryGUIDByPath( destinationPath ); if( !this.folderManipulator.isFolder( destinationGuid ) ){ throw new IllegalArgumentException( "Illegal destination '" + destinationPath + "', should be namespace." ); } if( destinationGuid == null ) { throw new IllegalArgumentException( "Undefined destination '" + destinationPath + "'" ); } if( sourceGuid == destinationGuid ) { throw new IllegalArgumentException( "Cyclic path detected '" + sourcePath + "'" ); } return new GUID[] { sourceGuid, destinationGuid }; } @Override public void receive( FileReceiveEntity entity) throws IOException { entity.receive(); } @Override public void receive( FileReceiveEntity entity, Number offset, Number endSize) throws IOException { entity.receive(offset, endSize ); } @Override public void randomReceive(FileReceiveEntity entity, Number offset, Number endSize) throws IOException { entity.randomReceive( offset,endSize ); } @Override public void export( FileExportEntity entity ) throws IOException { entity.export(); } @Override public void export( FileExportEntity entity, Number offset, Number endSize ) { } @Override public void setFolderVolumeMapping(GUID folderGuid, GUID volumeGuid) { this.folderVolumeMappingManipulator.insert( folderGuid, volumeGuid ); } @Override public GUID getMappingVolume(GUID folderGuid) { return this.folderVolumeMappingManipulator.getVolumeGuid( folderGuid ); } @Override public GUID getMappingVolume(String path) { String[] parts = this.pathResolver.segmentPathParts( path ); GUID currentVolumeGuid = null; String currentPath = ""; for( int i = 0; i < parts.length - 1; i++ ){ currentPath = currentPath + ( i > 0 ? this.getConfig().getPathNameSeparator() : "" ) + parts[ i ]; ElementNode elementNode = this.queryElement(currentPath); Folder folder = this.getFolder(elementNode.getGuid()); GUID relationVolume = folder.getRelationVolume(); if ( relationVolume != null ){ currentVolumeGuid = relationVolume; } } return currentVolumeGuid; } @Override public Cluster getClusterByFileWithId(GUID fileGuid, long segId) { return this.localClusterManipulator.getClusterByFileWithId( fileGuid,segId ); } @Override public void updateCluster(FileNode fileNode, long segId) { } @Override public void deleteCluster(FileNode fileNode, long segId) { this.remoteClusterManipulator.removeClusterByFileWithId( fileNode.getGuid(), segId ); this.localClusterManipulator.removeClusterByFileWithId( fileNode.getGuid(), segId ); } @Override public long countFileCluster(GUID fileGuid) { return this.remoteClusterManipulator.countFileClusters( fileGuid ); } @Override public void renameFile(String filePath, String newFileName) { ElementNode elementNode = this.queryElement(filePath); elementNode.setName( newFileName ); FileSystemOperator operator = (FileSystemOperator)this.operatorFactory.getOperator(elementNode.getMetaType()); operator.rename( elementNode.getGuid(), newFileName ); } private void initVolume(String path ){ String[] parts = this.pathResolver.segmentPathParts( path ); Folder root = this.getFolder(this.queryGUIDByPath(parts[0])); if( root.getRelationVolume() == null ){ root.applyVolume( GUIDs.GUID128( this.getConfig().getDefaultVolumeGuid() ) ); } } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/Verification.java ================================================ package com.pinecone.hydra.storage.file; import com.pinecone.framework.system.prototype.Pinenut; import java.util.zip.CRC32; //todo 改成接口 public class Verification implements Pinenut { private CRC32 crc32; private long checksum; private int parityCheck; public Verification() { } public Verification(CRC32 crc32, long checksum, int parityCheck) { this.crc32 = crc32; this.checksum = checksum; this.parityCheck = parityCheck; } public CRC32 getCrc32() { return crc32; } public void setCrc32(CRC32 crc32) { this.crc32 = crc32; } public long getChecksum() { return checksum; } public void setChecksum(long checksum) { this.checksum = checksum; } public int getParityCheck() { return parityCheck; } public void setParityCheck(int parityCheck) { this.parityCheck = parityCheck; } public String toString() { return "Verification{crc32 = " + crc32 + ", checksum = " + checksum + ", parityCheck = " + parityCheck + "}"; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/builder/ComponentUOFSBuilder.java ================================================ package com.pinecone.hydra.storage.file.builder; import com.pinecone.framework.unit.BitSet64; import com.pinecone.hydra.storage.file.FileSystemConfig; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.UniformObjectFileSystem; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; public class ComponentUOFSBuilder implements UOFSBuilder { public static long DEFAULT_GENERATE_FEATURE = 0L; protected UOFSComponentor[] mComponentorIndex = new UOFSComponentor[ Feature.featuresSize() ]; protected KOIMappingDriver mKOIMappingDriver; protected long mFeatureValues = DEFAULT_GENERATE_FEATURE; protected FileSystemConfig mFileSystemConfig; public ComponentUOFSBuilder ( KOIMappingDriver driver, FileSystemConfig config ) { this.mKOIMappingDriver = driver; this.mFileSystemConfig = config; } @Override public UOFSBuilder registerComponentor( UOFSComponentor componentor ) { int i = componentor.getFeature().ordinal(); this.mComponentorIndex[ i ] = componentor; this.mFeatureValues = BitSet64.setBit( this.mFeatureValues, i ); return this; } @Override public KOMFileSystem buildByRegistered() { return this.build( this.mFeatureValues ); } @Override public KOMFileSystem build( Feature... features ) { long featureValues = DEFAULT_GENERATE_FEATURE; for ( int i = 0; i < features.length; ++i ) { Feature feature = features[ i ]; featureValues = Feature.config( featureValues, feature, true ); } return this.build( featureValues ); } @Override public KOMFileSystem build( long featureValues ) { KOMFileSystem fs = new UniformObjectFileSystem( this.mKOIMappingDriver, this.mFileSystemConfig ); for ( int i = 0; i < Feature.featuresSize(); ++i ) { if ( ( featureValues & (1L << i) ) != 0 ) { this.mComponentorIndex[ i ].apply( fs ); } } return fs; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/builder/Feature.java ================================================ package com.pinecone.hydra.storage.file.builder; public enum Feature { EnableGlobalCache ; public final long mask = 1 << this.ordinal(); private Feature() { } public final long getMask() { return this.mask; } public static boolean isEnabled( long features, Feature feature ) { return (features & feature.mask) != 0; } public static long config( long features, Feature feature, boolean state ) { if ( state ) { features |= feature.mask; } else { features &= ~feature.mask; } return features; } public static long of( Feature[] features ) { if ( features == null ) { return 0L; } else { long value = 0L; for ( int i = 0; i < features.length; ++i ) { Feature feature = features[ i ]; value |= feature.mask; } return value; } } public static int featuresSize() { return Feature.values().length; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/builder/UOFSBuilder.java ================================================ package com.pinecone.hydra.storage.file.builder; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.storage.file.KOMFileSystem; public interface UOFSBuilder extends Pinenut { KOMFileSystem build( Feature ...features ); KOMFileSystem build( long featureValues ); KOMFileSystem buildByRegistered(); UOFSBuilder registerComponentor( UOFSComponentor componentor ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/builder/UOFSComponentor.java ================================================ package com.pinecone.hydra.storage.file.builder; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.storage.file.KOMFileSystem; public interface UOFSComponentor extends Pinenut { void apply( KOMFileSystem fs ); Feature getFeature(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/cache/DefaultCacheConstants.java ================================================ package com.pinecone.hydra.storage.file.cache; import java.util.concurrent.TimeUnit; public final class DefaultCacheConstants { public static final String FilePathCacheNS = "FILE_PATH_CACHE_NS_"; public static final long PathQueryExpiryTimeHotMil = TimeUnit.HOURS.toMillis( 4 ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/cache/FileSystemCacheConfig.java ================================================ package com.pinecone.hydra.storage.file.cache; import com.pinecone.framework.system.prototype.Pinenut; public interface FileSystemCacheConfig extends Pinenut { String getRedisHost(); int getRedisPort(); int getRedisTimeOut(); String getRedisPassword(); int getRedisDatabase(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/cache/MappedFileSystemCacheConfig.java ================================================ package com.pinecone.hydra.storage.file.cache; import com.pinecone.framework.util.json.JSONObject; public class MappedFileSystemCacheConfig implements FileSystemCacheConfig { //protected Map protoConfig; protected JSONObject protoConfig; protected String redisHost; protected int redisPort; protected int redisTimeOut; protected String redisPassword; protected int redisDatabase; public MappedFileSystemCacheConfig( JSONObject protoConfig ){ this.protoConfig = protoConfig; this.redisHost = this.protoConfig.optString("redisHost"); this.redisPort = this.protoConfig.optInt("redisPort", 6379); this.redisTimeOut = this.protoConfig.optInt("redisTimeOut",2000); this.redisPassword = this.protoConfig.optString("redisPassword"); this.redisDatabase = this.protoConfig.optInt( "redisDatabase" ); } @Override public String getRedisHost() { return this.redisHost; } @Override public int getRedisPort() { return this.redisPort; } @Override public int getRedisTimeOut() { return this.redisTimeOut; } @Override public String getRedisPassword() { return this.redisPassword; } @Override public int getRedisDatabase() { return this.redisDatabase; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/ArcReparseSemanticNode.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.storage.file.KOMFileSystem; import java.time.LocalDateTime; public abstract class ArcReparseSemanticNode extends ArchElementNode implements ReparseSemanticNode { protected String reparsedPoint; protected KOMFileSystem fileSystem; public ArcReparseSemanticNode(){ this.createTime = LocalDateTime.now(); this.updateTime = LocalDateTime.now(); } public ArcReparseSemanticNode( KOMFileSystem fileSystem ) { this(); this.fileSystem = fileSystem; GuidAllocator guidAllocator = this.fileSystem.getGuidAllocator(); this.setGuid( guidAllocator.nextGUID() ); } @Override public String getReparsedPoint() { return this.reparsedPoint; } @Override public void setReparsedPoint(String reparsedPoint) { this.reparsedPoint = reparsedPoint; } @Override public KOMFileSystem parentFileSystem() { return this.fileSystem; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/ArchCluster.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.util.id.GUID; public class ArchCluster implements Cluster { private long enumId; private GUID fileGuid; private GUID segGuid; private long segId; private long crc32; private long size; public ArchCluster() { } public ArchCluster(long enumId, GUID fileGuid, GUID segGuid, long segId, long crc32, long size) { this.enumId = enumId; this.fileGuid = fileGuid; this.segGuid = segGuid; this.segId = segId; this.crc32 = crc32; this.size = size; } @Override public long getEnumId() { return enumId; } @Override public void setEnumId(long enumId) { this.enumId = enumId; } @Override public GUID getFileGuid() { return fileGuid; } @Override public void setFileGuid(GUID fileGuid) { this.fileGuid = fileGuid; } @Override public GUID getSegGuid() { return this.segGuid; } @Override public void setSegGuid(GUID segGuid) { this.segGuid = segGuid; } @Override public long getSegId() { return segId; } @Override public void setSegId(long segId) { this.segId = segId; } @Override public long getCrc32() { return this.crc32; } @Override public void setCrc32( long crc32 ) { this.crc32 = crc32; } @Override public long getSize() { return size; } @Override public void setSize(long size) { this.size = size; } @Override public void remove() { } @Override public void save() { } public String toString() { return "ArchCluster{enumId = " + enumId + ", fileGuid = " + fileGuid + ", segGuid = " + segGuid + ", segId = " + segId + ", crc32 = " + crc32 + ", size = " + size + "}"; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/ArchElementNode.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public abstract class ArchElementNode implements ElementNode { protected long enumId; protected GUID guid; protected LocalDateTime createTime; protected LocalDateTime updateTime; protected String name; protected FileSystemAttributes fileSystemAttributes; @Override public long getEnumId() { return enumId; } public void setEnumId(long enumId) { this.enumId = enumId; } @Override public GUID getGuid() { return guid; } @Override public void setGuid(GUID guid) { this.guid = guid; } @Override public LocalDateTime getCreateTime() { return createTime; } public void setCreateTime(LocalDateTime createTime) { this.createTime = createTime; } @Override public LocalDateTime getUpdateTime() { return updateTime; } public void setUpdateTime(LocalDateTime updateTime) { this.updateTime = updateTime; } @Override public String getName() { return this.name; } @Override public void setName(String name) { this.name = name; } @Override public FileSystemAttributes getAttributes() { return fileSystemAttributes; } @Override public void setAttributes( FileSystemAttributes fileSystemAttributes ) { this.fileSystemAttributes = fileSystemAttributes; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/Cluster.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface Cluster extends Pinenut { long getEnumId(); void setEnumId(long enumId); GUID getFileGuid(); void setFileGuid(GUID fileGuid); GUID getSegGuid(); void setSegGuid(GUID segGuid); long getSegId(); void setSegId(long segId); long getCrc32(); void setCrc32(long crc32); long getSize(); void setSize(long size); void save(); void remove(); default LocalCluster evinceLocalCluster(){ return null; } default RemoteCluster evinceRemoteCluster(){ return null; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/ClusterPage.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.system.prototype.Pinenut; public interface ClusterPage extends Pinenut { int getPageSize() ; int getPageSum() ; long getCurrentPage() ; long getClusters() ; Cluster getCluster( long segId ) ; LocalCluster getLocalCluster( long segId ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/ClusterPage64.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.source.LocalClusterManipulator; import com.pinecone.hydra.storage.file.source.RemoteClusterManipulator; import java.util.List; public class ClusterPage64 implements ClusterPage { protected KOMFileSystem komFileSystem; protected RemoteClusterManipulator remoteClusterManipulator; protected LocalClusterManipulator localClusterManipulator; protected List mCurrClusterPage; protected int mnPageSize; protected int mnPageSum; protected int mnCurrPageAt; protected long mnClusters; protected GUID fileGuid; public ClusterPage64(KOMFileSystem fileSystem, RemoteClusterManipulator remoteClusterManipulator, LocalClusterManipulator localClusterManipulator, GUID fileGuid, int pageSize ) { this.komFileSystem = fileSystem; this.fileGuid = fileGuid; this.localClusterManipulator = localClusterManipulator; this.remoteClusterManipulator = remoteClusterManipulator; this.mnPageSize = pageSize; this.mnClusters = remoteClusterManipulator.countFileClusters( fileGuid ); this.mnCurrPageAt = 0; this.mnPageSum = (int) Math.ceil( (double) this.mnClusters / this.mnPageSize ); this.mCurrClusterPage = this.loadClusterPage(0); } public ClusterPage64(KOMFileSystem fileSystem, RemoteClusterManipulator remoteClusterManipulator, LocalClusterManipulator localClusterManipulator, GUID fileGuid ) { this( fileSystem,remoteClusterManipulator, localClusterManipulator, fileGuid, 10 ); } @SuppressWarnings( "unchecked" ) protected List loadClusterPage( int pageIndex ) { return (List) this.remoteClusterManipulator.fetchRemoteClusterByFileGuid( this.fileGuid, (long) pageIndex * this.mnPageSize, this.mnPageSize ); } @Override public int getPageSize() { return this.mnPageSize; } @Override public int getPageSum() { return this.mnPageSum; } @Override public long getCurrentPage() { return this.mnCurrPageAt; } @Override public long getClusters() { return this.mnClusters; } @Override public Cluster getCluster( long segId ) { if ( segId >= this.mnClusters ) { return null; } if ( !this.isInCurrentPage( segId ) ) { this.mnCurrPageAt = this.calculatePageIndex( segId ); this.mCurrClusterPage = this.loadClusterPage(this.mnCurrPageAt); } return this.findClusterInPage(segId); } @Override public LocalCluster getLocalCluster( long segId ) { Cluster cluster = this.getCluster( segId ); if ( cluster instanceof LocalCluster ) { return (LocalCluster) cluster; } else if ( cluster instanceof RemoteCluster ) { RemoteCluster remoteCluster = (RemoteCluster) cluster; if( remoteCluster.getDeviceGuid().equals( this.komFileSystem.getConfig().getLocalHostGuid() )) { return this.localClusterManipulator.getLocalClusterByGuid( remoteCluster.getSegGuid() ); } } return null; } protected boolean isInCurrentPage( long segId ) { return segId >= this.mCurrClusterPage.get(0).getSegId() && segId <= this.mCurrClusterPage.get(this.mCurrClusterPage.size() - 1).getSegId(); } protected Cluster findClusterInPage( long segId ) { int offset = (int) (segId % this.mnPageSize); if ( offset < 0 || offset >= this.mCurrClusterPage.size() ) { return null; } return this.mCurrClusterPage.get( offset ); } protected int calculatePageIndex( long segId ) { // pageIndex = segId / pageSize (向下取整) return (int) (segId / this.mnPageSize); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/DirectlyExternalSymbolic.java ================================================ package com.pinecone.hydra.storage.file.entity; public class DirectlyExternalSymbolic { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/ElementNode.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.system.ko.meta.ElementObject; import java.time.LocalDateTime; public interface ElementNode extends FileTreeNode, ElementObject { long getEnumId(); GUID getGuid(); void setGuid(GUID guid); LocalDateTime getCreateTime(); LocalDateTime getUpdateTime(); String getName(); void setName(String name); FileSystemAttributes getAttributes(); void setAttributes( FileSystemAttributes attributes ); KOMFileSystem parentFileSystem(); @Override default String objectCategoryName() { return "Storage"; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/ExternalSymbolic.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.hydra.storage.file.source.ExternalSymbolicManipulator; public interface ExternalSymbolic extends Symbolic { void apply( ExternalSymbolicManipulator externalSymbolicManipulator ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/FSNodeAllotment.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface FSNodeAllotment extends Pinenut { Folder newFolder(); Folder newFolder( String name ); FileNode newFileNode(); FileNode newFileNode( String name, long definitionSize, boolean crc32Xor, boolean integrityCheckEnable, boolean disableCluster); FileNode newFileNode( String name, long definitionSize ); FileNode newFileNode( String name, boolean crc32Xor, boolean integrityCheckEnable, boolean disableCluster); LocalCluster newLocalCluster(); LocalCluster newLocalCluster(GUID fileGuid, int segId, String sourceName, long crc32, long size, long fileStartOffset ); LocalCluster newLocalCluster(GUID fileGuid, int segId, String sourceName ); RemoteCluster newRemoteCluster(); RemoteCluster newRemoteCluster(GUID fileGuid, int segId, long crc32, long size ); RemoteCluster newRemoteCluster(GUID fileGuid, int segId ); Symbolic newSymbolic(); SymbolicMeta newSymbolicMeta(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/FileMeta.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface FileMeta extends Pinenut { long getEnumId(); void setEnumId(long enumId); GUID getGuid(); void setGuid(GUID guid); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/FileNode.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.CheckedFile; import java.time.LocalDateTime; import java.util.TreeMap; public interface FileNode extends ElementNode, CheckedFile { LocalDateTime getDeletedTime(); void setDeletedTime(LocalDateTime deletedTime); long getChecksum(); void setChecksum(long checksum); int getParityCheck(); void setParityCheck(int parityCheck); void copyValueTo(GUID destinationGuid ); void copyTo (GUID destinationGuid); FileMeta getFileMeta(); void startDistribution(FileMeta fileMeta); GUID getDataAffinityGuid(); boolean getIsUploadSuccessful(); void setIsUploadSuccessful( boolean isUploadSuccessful ); TreeMap getClusters(); @Override default FileNode evinceFileNode() { return this; } void removeCluster(); long getPhysicalSize(); void setPhysicalSize(long physicalSize); long getLogicSize(); void setLogicSize(long logicSize); long getDefinitionSize(); void setDefinitionSize(long definitionSize); long getCrc32Xor(); void setCrc32Xor( long crc32Xor ); boolean getIntegrityCheckEnable(); void setIntegrityCheckEnable(boolean integrityCheckEnable); boolean getDisableCluster(); void setDisableCluster(boolean disableCluster); boolean isUploadSuccess(); String getPath(); void setPath( String path ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/FileSystemAttributes.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.util.Collection; import java.util.Map; import java.util.Set; public interface FileSystemAttributes extends Pinenut ,Map { GUID getGuid(); void setGuid( GUID guid ); String getAttribute( String key ); void setAttribute( String key, String value ); Map getAttributes(); void setAttributes( Map attributes ); ElementNode parentElement(); @Override default boolean isEmpty() { return this.getAttributes().isEmpty(); } @Override default int size() { return this.getAttributes().size(); } @Override default boolean containsKey( Object key ) { return this.getAttributes().containsKey( key ); } @Override default boolean containsValue( Object value ) { return this.getAttributes().containsValue(value); } @Override default String get( Object key ) { return this.getAttributes().get(key); } @Override default Set keySet() { return this.getAttributes().keySet(); } @Override default Collection values() { return this.getAttributes().values(); } @Override default Set> entrySet() { return this.getAttributes().entrySet(); } String insert( String key, String value ) ; String update( String key, String value ) ; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/FileTreeNode.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public interface FileTreeNode extends TreeNode { default FileNode evinceFileNode(){ return null; } default Folder evinceFolder(){ return null; } default Symbolic evinceSymbolic() { return null; } void setName(String s); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/Folder.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.KOMFileSystem; import java.util.List; import java.util.Map; import java.util.Set; public interface Folder extends ElementNode { void setNodeAttribute(FileSystemAttributes attributes); FolderMeta getFolderMeta(); void setFolderMeta(FolderMeta folderMeta); Map getChildren(); List fetchChildrenGuids(); void setChildrenGuids( List contentGuids, int depth ); List listItem(); void put ( String key, FileTreeNode val ); void remove ( String key ); void put ( ElementNode child ); Folder createFolder( String name ); ExternalSymbolic createExternalSymbolic( String name, String reparsedPoint ); KOMFileSystem getFileTree(); boolean containsKey ( String key ); boolean isEmpty(); @Override default Folder evinceFolder() { return this; } Set keySet(); Set> entrySet(); void copyTo(GUID destinationGuid); void copyNamespaceMetaTo(GUID destinationGuid); long TotalFolderSize(); void applyVolume( GUID volumeGuid ); GUID getRelationVolume(); String getPath(); void setPath( String path ); Integer getSyncState(); void setSyncState( Integer syncState ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/FolderMeta.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface FolderMeta extends Pinenut { long getEnumId(); void setEnumId(long enumId); GUID getGuid(); void setGuid(GUID guid); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericExternalSymbolic.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.source.ExternalSymbolicManipulator; public class GenericExternalSymbolic extends ArcReparseSemanticNode implements ExternalSymbolic { private SymbolicMeta symbolicMeta; private ExternalSymbolicManipulator externalSymbolicManipulator; public GenericExternalSymbolic() { super(); } public GenericExternalSymbolic( KOMFileSystem fileSystem ) { super( fileSystem ); } @Override public SymbolicMeta getSymbolicMeta() { return this.symbolicMeta; } @Override public void setSymbolicMeta(SymbolicMeta symbolicMeta) { this.symbolicMeta = symbolicMeta; } @Override public void create() { this.externalSymbolicManipulator.insert( this ); } @Override public void remove() { this.externalSymbolicManipulator.remove( this.guid ); this.symbolicMeta.remove(); } @Override public void apply(ExternalSymbolicManipulator externalSymbolicManipulator) { this.externalSymbolicManipulator = externalSymbolicManipulator; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericFSNodeAllotment.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.source.FileMasterManipulator; import com.pinecone.framework.util.id.GuidAllocator; public class GenericFSNodeAllotment implements FSNodeAllotment { private FileMasterManipulator fileMasterManipulator; private KOMFileSystem fileSystem; private GuidAllocator guidAllocator; public GenericFSNodeAllotment(FileMasterManipulator fileMasterManipulator, KOMFileSystem fileSystem){ this.fileMasterManipulator = fileMasterManipulator; this.fileSystem = fileSystem; this.guidAllocator = fileSystem.getGuidAllocator(); } @Override public Folder newFolder(){ GenericFolder folder = new GenericFolder(fileSystem, fileMasterManipulator.getFolderManipulator()); folder.setGuid( guidAllocator.nextGUID() ); return folder; } @Override public Folder newFolder(String name) { GenericFolder folder = new GenericFolder(fileSystem, fileMasterManipulator.getFolderManipulator()); folder.setName( name ); folder.setGuid( guidAllocator.nextGUID() ); return folder; } @Override public FileNode newFileNode(){ GenericFileNode fileNode = new GenericFileNode(fileSystem, fileMasterManipulator.getFileManipulator()); fileNode.setGuid( guidAllocator.nextGUID() ); return fileNode; } @Override public FileNode newFileNode(String name, long definitionSize, boolean crc32Xor, boolean integrityCheckEnable, boolean disableCluster) { GenericFileNode fileNode = new GenericFileNode(fileSystem, fileMasterManipulator.getFileManipulator()); fileNode.setGuid( guidAllocator.nextGUID() ); fileNode.setName( name ); //fileNode.setCrc32Xor( crc32Xor ); fileNode.setDefinitionSize( definitionSize ); fileNode.setIntegrityCheckEnable( integrityCheckEnable ); fileNode.setDisableCluster( disableCluster ); return fileNode; } @Override public FileNode newFileNode(String name, long definitionSize) { GenericFileNode fileNode = new GenericFileNode(fileSystem, fileMasterManipulator.getFileManipulator()); fileNode.setName( name ); fileNode.setDefinitionSize( definitionSize ); fileNode.setGuid( guidAllocator.nextGUID() ); return fileNode; } @Override public FileNode newFileNode(String name, boolean crc32Xor, boolean integrityCheckEnable, boolean disableCluster) { GenericFileNode fileNode = new GenericFileNode(fileSystem, fileMasterManipulator.getFileManipulator()); fileNode.setName( name ); //fileNode.setCrc32Xor( crc32Xor ); fileNode.setDisableCluster( disableCluster ); fileNode.setIntegrityCheckEnable( integrityCheckEnable ); return fileNode; } @Override public LocalCluster newLocalCluster(){ GenericLocalCluster frame = new GenericLocalCluster(fileMasterManipulator.getLocalClusterManipulator()); frame.setSegGuid( guidAllocator.nextGUID() ); frame.setLocalClusterManipulator( this.fileMasterManipulator.getLocalClusterManipulator() ); return frame; } @Override public LocalCluster newLocalCluster(GUID fileGuid, int segId, String sourceName, long crc32, long size, long fileStartOffset) { GenericLocalCluster frame = new GenericLocalCluster(fileMasterManipulator.getLocalClusterManipulator()); frame.setSegGuid( guidAllocator.nextGUID() ); frame.setSegId( segId ); frame.setSourceName( sourceName ); frame.setCrc32( crc32 ); frame.setSize( size ); frame.setFileGuid( fileGuid ); return frame; } @Override public LocalCluster newLocalCluster(GUID fileGuid, int segId, String sourceName) { GenericLocalCluster frame = new GenericLocalCluster(fileMasterManipulator.getLocalClusterManipulator()); frame.setFileGuid( fileGuid ); frame.setSegId( segId ); frame.setSourceName( sourceName ); frame.setSegGuid( guidAllocator.nextGUID() ); return frame; } @Override public RemoteCluster newRemoteCluster(){ GenericRemoteCluster frame = new GenericRemoteCluster(fileMasterManipulator.getRemoteClusterManipulator()); frame.setSegGuid( guidAllocator.nextGUID() ); return frame; } @Override public RemoteCluster newRemoteCluster(GUID fileGuid, int segId, long crc32, long size) { GenericRemoteCluster frame = new GenericRemoteCluster(fileMasterManipulator.getRemoteClusterManipulator()); frame.setSegGuid( guidAllocator.nextGUID() ); frame.setFileGuid( fileGuid ); frame.setSegId( segId ); frame.setCrc32( crc32 ); frame.setSize( size ); return frame; } @Override public RemoteCluster newRemoteCluster(GUID fileGuid, int segId) { GenericRemoteCluster frame = new GenericRemoteCluster(fileMasterManipulator.getRemoteClusterManipulator()); frame.setFileGuid( fileGuid ); frame.setSegGuid( guidAllocator.nextGUID() ); frame.setSegId( segId ); return frame; } @Override public Symbolic newSymbolic() { return new GenericSymbolic(this.fileMasterManipulator.getSymbolicManipulator()); } @Override public SymbolicMeta newSymbolicMeta() { return new GenericSymbolicMeta(this.fileMasterManipulator.getSymbolicMetaManipulator()); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericFileMeta.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.util.id.GUID; public class GenericFileMeta implements FileMeta { private long enumId; private GUID guid; public GenericFileMeta() { } public GenericFileMeta(long enumId, GUID guid) { this.enumId = enumId; this.guid = guid; } public long getEnumId() { return enumId; } public void setEnumId(long enumId) { this.enumId = enumId; } public GUID getGuid() { return guid; } public void setGuid(GUID guid) { this.guid = guid; } public String toString() { return "GenericFileMeta{enumId = " + enumId + ", guid = " + guid + "}"; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericFileNode.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.source.FileManipulator; import com.pinecone.framework.util.id.GuidAllocator; import java.time.LocalDateTime; import java.util.TreeMap; public class GenericFileNode extends ArchElementNode implements FileNode{ private LocalDateTime deletedTime; private long checksum; private int parityCheck; private FileMeta fileMeta; private KOMFileSystem fileSystem; private FileManipulator fileManipulator; private TreeMap clusters = new TreeMap<>(); private boolean isUploadSuccessful; private long physicalSize; private long logicSize; private long definitionSize; private long crc32Xor; private boolean integrityCheckEnable; private boolean disableCluster; private String path; @Override public boolean getIsUploadSuccessful() { return this.isUploadSuccessful; } @Override public void setIsUploadSuccessful(boolean isUploadSuccessful) { this.isUploadSuccessful = isUploadSuccessful; } @Override public TreeMap getClusters() { return this.fileSystem.getClustersByFileGuid( this.guid ); } public GenericFileNode() { } public GenericFileNode( KOMFileSystem fileSystem ) { this.fileSystem = fileSystem; GuidAllocator guidAllocator = this.fileSystem.getGuidAllocator(); this.setGuid( guidAllocator.nextGUID() ); this.setCreateTime( LocalDateTime.now() ); } public GenericFileNode( KOMFileSystem fileSystem, FileManipulator fileManipulator ) { this(fileSystem); this.fileManipulator = fileManipulator; } public void apply( KOMFileSystem fileSystem ) { this.fileSystem = fileSystem; } public long getEnumId() { return enumId; } public void setEnumId(long enumId) { this.enumId = enumId; } public GUID getGuid() { return guid; } public void setGuid(GUID guid) { this.guid = guid; } public LocalDateTime getCreateTime() { return createTime; } public void setCreateTime(LocalDateTime createTime) { this.createTime = createTime; } public LocalDateTime getUpdateTime() { return updateTime; } public void setUpdateTime(LocalDateTime updateTime) { this.updateTime = updateTime; } public String getName() { return name; } @Override public FileSystemAttributes getAttributes() { return this.fileSystemAttributes; } @Override public KOMFileSystem parentFileSystem() { return fileSystem; } public void setName(String name) { this.name = name; } public LocalDateTime getDeletedTime() { return deletedTime; } public void setDeletedTime(LocalDateTime deletedTime) { this.deletedTime = deletedTime; } public long getChecksum() { return checksum; } public void setChecksum(long checksum) { this.checksum = checksum; } public int getParityCheck() { return parityCheck; } public void setParityCheck(int parityCheck) { this.parityCheck = parityCheck; } @Override public void copyValueTo(GUID destinationGuid) { } @Override public void copyTo(GUID destinationGuid) { } public FileMeta getFileMeta() { return fileMeta; } public void startDistribution(FileMeta fileMeta) { this.fileMeta = fileMeta; } @Override public GUID getDataAffinityGuid() { return null; } public FileSystemAttributes getAttribute() { return fileSystemAttributes; } public void setAttribute(FileSystemAttributes fileSystemAttributes) { this.fileSystemAttributes = fileSystemAttributes; } @Override public void removeCluster() { if ( this.clusters == null || this.clusters.isEmpty() ){ this.clusters = this.fileSystem.getClustersByFileGuid( this.guid ); } for ( Cluster cluster : this.clusters.values() ){ cluster.remove(); } } @Override public long getPhysicalSize() { return this.physicalSize; } @Override public void setPhysicalSize(long physicalSize) { this.physicalSize = physicalSize; } @Override public long getLogicSize() { return this.logicSize; } @Override public void setLogicSize(long logicSize) { this.logicSize = logicSize; } @Override public long getDefinitionSize() { return this.definitionSize; } @Override public void setDefinitionSize(long definitionSize) { this.definitionSize = definitionSize; } @Override public long getCrc32Xor() { return this.crc32Xor; } @Override public void setCrc32Xor( long crc32Xor ) { this.crc32Xor = crc32Xor; } @Override public boolean getIntegrityCheckEnable() { return this.integrityCheckEnable; } @Override public void setIntegrityCheckEnable(boolean integrityCheckEnable) { this.integrityCheckEnable = integrityCheckEnable; } @Override public boolean getDisableCluster() { return this.disableCluster; } @Override public boolean isUploadSuccess() { if ( this.physicalSize == this.definitionSize ){ return true; } return false; } @Override public Number size() { return this.physicalSize; } @Override public void setDisableCluster(boolean disableCluster) { this.disableCluster = disableCluster; } @Override public String getPath() { return this.path; } @Override public void setPath(String path) { this.path = path; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericFolder.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.source.FolderManipulator; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.framework.util.id.GuidAllocator; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; public class GenericFolder extends ArchElementNode implements Folder{ private FileSystemAttributes attributes; private FolderMeta folderMeta; private KOMFileSystem fileSystem; private FolderManipulator folderManipulator; private String path; private Integer syncState; public GenericFolder() { } public GenericFolder( KOMFileSystem fileSystem ) { this.fileSystem = fileSystem; GuidAllocator guidAllocator = this.fileSystem.getGuidAllocator(); this.setGuid( guidAllocator.nextGUID() ); this.setCreateTime( LocalDateTime.now() ); this.folderManipulator = fileSystem.getFileMasterManipulator().getFolderManipulator(); } public GenericFolder( KOMFileSystem fileSystem, FolderManipulator folderManipulator ) { this(fileSystem); this.folderManipulator = folderManipulator; } public void apply(KOMFileSystem fileSystem ) { this.fileSystem = fileSystem; } @Override public KOMFileSystem parentFileSystem() { return this.fileSystem; } @Override public void setNodeAttribute(FileSystemAttributes attributes) { this.attributes = attributes; } @Override public FolderMeta getFolderMeta() { return this.folderMeta; } @Override public void setFolderMeta(FolderMeta folderMeta) { this.folderMeta = folderMeta; } @Override public Map getChildren() { return null; } @Override public List fetchChildrenGuids() { return null; } @Override public void setChildrenGuids( List contentGuids, int depth ) { } @Override public List listItem() { ArrayList fileTreeNodes = new ArrayList<>(); List children = this.fileSystem.getChildren(this.guid); for( TreeNode node : children ){ // if( node instanceof ExternalSymbolic ){ // ExternalSymbolic externalSymbolic = (ExternalSymbolic) node; // String reparsedPoint = externalSymbolic.getReparsedPoint(); // File file = new File(reparsedPoint); // if( file.isDirectory() ){ // GenericExternalFolder externalFolder = new GenericExternalFolder(file); // fileTreeNodes.add(externalFolder); // }else { // GenericExternalFile externalFile = new GenericExternalFile(file); // fileTreeNodes.add(externalFile); // } // }else { // FileTreeNode fileTreeNode = this.fileSystem.get(node.getGuid()); // fileTreeNodes.add( fileTreeNode ); // } FileTreeNode fileTreeNode = this.fileSystem.get(node.getGuid()); fileTreeNodes.add( fileTreeNode ); } return fileTreeNodes; } @Override public void put( String key, FileTreeNode val ) { } @Override public void remove( String key ) { } @Override public void put( ElementNode child ) { this.fileSystem.put( child ); this.fileSystem.affirmOwnedNode( this.guid, child.getGuid() ); } @Override public Folder createFolder( String name ) { Folder neo = new GenericFolder( this.fileSystem ); neo.setName( name ); this.put( neo ); return neo; } @Override public ExternalSymbolic createExternalSymbolic( String name, String reparsedPoint ) { ExternalSymbolic neo = new GenericExternalSymbolic( this.fileSystem ); neo.setName( name ); neo.setReparsedPoint( reparsedPoint ); this.put( neo ); return neo; } @Override public KOMFileSystem getFileTree() { return this.fileSystem; } @Override public long TotalFolderSize() { long size = 0; List children = this.fileSystem.getChildren(this.guid); for( TreeNode node : children ){ if ( node instanceof Folder ){ Folder folder = (Folder) node; size += folder.TotalFolderSize(); } else if( node instanceof FileNode ){ FileNode file = (FileNode) node; size += file.size().longValue(); } } return size; } @Override public boolean containsKey(String key) { return false; } // @Override // public Number size() { // long size = 0; // List children = this.fileSystem.getChildren(this.guid); // for( TreeNode node : children ){ // ElementNode elementNode = (ElementNode) node; // size += elementNode.size().longValue(); // } // return size; // } @Override public boolean isEmpty() { return false; } @Override public Set keySet() { return null; } @Override public Set> entrySet() { return null; } @Override public void copyTo(GUID destinationGuid) { } @Override public void copyNamespaceMetaTo(GUID destinationGuid) { } @Override public String getPath() { return this.path; } @Override public void setPath(String path) { this.path = path; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } @Override public void applyVolume(GUID volumeGuid) { this.fileSystem.setFolderVolumeMapping( this.guid, volumeGuid ); } @Override public GUID getRelationVolume() { return this.fileSystem.getMappingVolume( this.guid ); } @Override public Integer getSyncState() { return this.syncState; } @Override public void setSyncState(Integer syncState) { this.syncState = syncState; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericFolderMeta.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.util.id.GUID; public class GenericFolderMeta implements FolderMeta{ private long enumId; private GUID guid; public GenericFolderMeta() { } public GenericFolderMeta(long enumId, GUID guid) { this.enumId = enumId; this.guid = guid; } public long getEnumId() { return enumId; } public void setEnumId(long enumId) { this.enumId = enumId; } public GUID getGuid() { return guid; } public void setGuid(GUID guid) { this.guid = guid; } public String toString() { return "GenericFolderMeta{enumId = " + enumId + ", guid = " + guid + "}"; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericLocalCluster.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.hydra.storage.file.source.LocalClusterManipulator; import java.time.LocalDateTime; public class GenericLocalCluster extends ArchCluster implements LocalCluster { private LocalDateTime createTime; private LocalDateTime updateTime; private String sourceName; private LocalClusterManipulator localClusterManipulator; private long definitionSize; private long fileStartOffset; @Override public long getDefinitionSize() { return this.definitionSize; } @Override public void setDefinitionSize(long definitionSize) { this.definitionSize = definitionSize; } public GenericLocalCluster() { } public GenericLocalCluster(LocalDateTime createTime, LocalDateTime updateTime, String sourceName) { this.createTime = createTime; this.updateTime = updateTime; this.sourceName = sourceName; } public GenericLocalCluster(LocalClusterManipulator localClusterManipulator ) { this.localClusterManipulator = localClusterManipulator; this.createTime = LocalDateTime.now(); this.updateTime = LocalDateTime.now(); } public LocalDateTime getCreateTime() { return createTime; } public void setCreateTime(LocalDateTime createTime) { this.createTime = createTime; } @Override public long getFileStartOffset() { return this.fileStartOffset; } @Override public void setFileStartOffset(long fileStartOffset) { this.fileStartOffset = fileStartOffset; } public LocalDateTime getUpdateTime() { return updateTime; } @Override public void setLocalClusterManipulator(LocalClusterManipulator localClusterManipulator) { this.localClusterManipulator = localClusterManipulator; } public void setUpdateTime(LocalDateTime updateTime) { this.updateTime = updateTime; } public String getSourceName() { return sourceName; } public void setSourceName(String sourceName) { this.sourceName = sourceName; } @Override public void save() { LocalCluster frame = this.localClusterManipulator.getClusterByFileWithId(this.getFileGuid(), this.getSegId()); if( frame == null ){ this.localClusterManipulator.insert(this); }else { this.localClusterManipulator.update( this ); } } @Override public void remove() { this.localClusterManipulator.remove( this.getSegGuid() ); } public String toString() { return "GenericLocalCluster{createTime = " + createTime + ", updateTime = " + updateTime + ", sourceName = " + sourceName + "}"; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericLocalClusterMeta.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.unit.LinkedTreeMap; import com.pinecone.framework.util.id.GUID; import java.util.Map; public class GenericLocalClusterMeta implements LocalClusterMeta{ private long enumId; private GUID guid; private String key; private String value; protected Map metas = new LinkedTreeMap<>(); @Override public long getEnumId() { return this.enumId; } @Override public void setEnumId(long enumId) { this.enumId = enumId; } @Override public GUID getGuid() { return this.guid; } @Override public void setGuid(GUID guid) { this.guid = guid; } @Override public String getKey() { return this.key; } @Override public void setKey(String key) { this.key = key; } @Override public String getValue() { return this.value; } @Override public void setValue(String value) { this.value = value; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericRemoteCluster.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.source.RemoteClusterManipulator; public class GenericRemoteCluster extends ArchCluster implements RemoteCluster { private GUID deviceGuid; private RemoteClusterManipulator frameManipulator; public GenericRemoteCluster() { } public GenericRemoteCluster(GUID deviceGuid) { this.deviceGuid = deviceGuid; } public GenericRemoteCluster(RemoteClusterManipulator remoteClusterManipulator ) { this.frameManipulator = remoteClusterManipulator; } public GUID getDeviceGuid() { return deviceGuid; } public void setDeviceGuid(GUID deviceGuid) { this.deviceGuid = deviceGuid; } @Override public void setRemoteClusterManipulator(RemoteClusterManipulator remoteClusterManipulator) { this.frameManipulator = remoteClusterManipulator; } public String toString() { return "GenericRemoteCluster{deviceGuid = " + deviceGuid + "}"; } @Override public void save() { RemoteCluster cluster = this.frameManipulator.getClusterByFileWithId(this.getFileGuid(), this.getSegId()); if( cluster == null ){ this.frameManipulator.insert(this); } } @Override public void remove() { this.frameManipulator.remove( this.getSegGuid() ); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericSymbolic.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.hydra.storage.file.source.SymbolicManipulator; public class GenericSymbolic extends ArcReparseSemanticNode implements Symbolic{ private SymbolicMeta symbolicMeta; private SymbolicManipulator symbolicManipulator; public GenericSymbolic( SymbolicManipulator symbolicManipulator ) { this.symbolicManipulator = symbolicManipulator; } public SymbolicMeta getSymbolicMeta() { return symbolicMeta; } public void setSymbolicMeta(SymbolicMeta symbolicMeta) { this.symbolicMeta = symbolicMeta; } @Override public void create() { this.symbolicManipulator.insert(this); } @Override public void remove() { this.symbolicManipulator.remove(this.guid); this.symbolicMeta.remove(); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericSymbolicMeta.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.source.SymbolicMetaManipulator; public class GenericSymbolicMeta implements SymbolicMeta{ private long enumId; private GUID guid; private SymbolicMetaManipulator symbolicMetaManipulator; public GenericSymbolicMeta() { } public GenericSymbolicMeta( SymbolicMetaManipulator symbolicMetaManipulator ) { this.symbolicMetaManipulator = symbolicMetaManipulator; } public GenericSymbolicMeta(long enumId, GUID guid) { this.enumId = enumId; this.guid = guid; } public long getEnumId() { return enumId; } public void setEnumId(long enumId) { this.enumId = enumId; } public GUID getGuid() { return guid; } public void setGuid(GUID guid) { this.guid = guid; } @Override public void save() { this.symbolicMetaManipulator.insert(this); } @Override public void remove() { this.symbolicMetaManipulator.remove(this.guid); } public String toString() { return "GenericSymbolicMeta{enumId = " + enumId + ", guid = " + guid + "}"; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/LocalCluster.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.hydra.storage.file.source.LocalClusterManipulator; import java.time.LocalDateTime; public interface LocalCluster extends Cluster { LocalDateTime getCreateTime(); void setCreateTime(LocalDateTime createTime); LocalDateTime getUpdateTime(); void setUpdateTime(LocalDateTime updateTime); String getSourceName(); void setSourceName(String sourceName); @Override default LocalCluster evinceLocalCluster() { return this; } void setLocalClusterManipulator(LocalClusterManipulator localClusterManipulator); long getDefinitionSize(); void setDefinitionSize( long definitionSize ); long getFileStartOffset(); void setFileStartOffset( long fileStartOffset ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/LocalClusterMeta.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.util.id.GUID; public interface LocalClusterMeta { long getEnumId(); void setEnumId(long enumId); GUID getGuid(); void setGuid(GUID guid); String getKey(); void setKey(String key); String getValue(); void setValue( String value ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/RemoteCluster.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.source.RemoteClusterManipulator; public interface RemoteCluster extends Cluster { GUID getDeviceGuid(); void setDeviceGuid(GUID deviceGuid); void setRemoteClusterManipulator(RemoteClusterManipulator remoteClusterManipulator); @Override default RemoteCluster evinceRemoteCluster() { return this; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/ReparseSemanticNode.java ================================================ package com.pinecone.hydra.storage.file.entity; import java.time.LocalDateTime; import com.pinecone.framework.util.id.GUID; public interface ReparseSemanticNode extends ElementNode { long getEnumId(); void setEnumId(long enumId); GUID getGuid(); void setGuid(GUID guid); LocalDateTime getCreateTime(); void setCreateTime( LocalDateTime createTime ); LocalDateTime getUpdateTime(); void setUpdateTime( LocalDateTime updateTime ); String getName(); void setName( String name ); String getReparsedPoint(); void setReparsedPoint( String reparsedPoint ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/Symbolic.java ================================================ package com.pinecone.hydra.storage.file.entity; public interface Symbolic extends ReparseSemanticNode { SymbolicMeta getSymbolicMeta(); void setSymbolicMeta( SymbolicMeta symbolicMeta ); void create(); void remove(); @Override default Symbolic evinceSymbolic() { return this; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/SymbolicMeta.java ================================================ package com.pinecone.hydra.storage.file.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface SymbolicMeta extends Pinenut { long getEnumId(); void setEnumId( long enumId ); GUID getGuid(); void setGuid(GUID guid); void save(); void remove(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/external/ArchNativeExternalFileObject.java ================================================ package com.pinecone.hydra.storage.file.external; import java.io.File; import java.net.URI; import java.time.Instant; import java.time.LocalDateTime; import java.time.ZoneId; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.ArchElementNode; public abstract class ArchNativeExternalFileObject extends ArchElementNode implements ExternalFileObject { protected File mNativeFile; public ArchNativeExternalFileObject( File file ) { this.mNativeFile = file; this.name = file.getName(); long lastModified = file.lastModified(); this.updateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(lastModified), ZoneId.systemDefault()); this.createTime = this.updateTime; } @Override public KOMFileSystem parentFileSystem() { return null; } public File getNativeFile() { return this.mNativeFile; } @Override public URI toURI() { return this.mNativeFile.toURI(); } public String getURI() { return this.toURI().toString(); } @Override public String getPath() { return this.mNativeFile.getPath(); } @Override public boolean delete() { return this.mNativeFile.delete(); } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/external/ExternalFile.java ================================================ package com.pinecone.hydra.storage.file.external; import com.pinecone.hydra.storage.UFile; import java.io.File; import java.net.URI; public interface ExternalFile extends ExternalFileObject, UFile { File getNativeFile(); URI toURI(); String getName(); String getPath(); boolean delete(); default boolean exists() { return this.getNativeFile().exists(); } @Override default Object getNativeHandle() { return this.getNativeFile(); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/external/ExternalFileObject.java ================================================ package com.pinecone.hydra.storage.file.external; import java.net.URI; import com.pinecone.hydra.storage.file.entity.ElementNode; public interface ExternalFileObject extends ElementNode { URI toURI(); @Override String getName(); String getPath(); boolean delete(); Object getNativeHandle(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/external/ExternalFileSystemInstrument.java ================================================ package com.pinecone.hydra.storage.file.external; import com.pinecone.framework.system.regime.Instrument; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.ExternalSymbolic; import java.io.IOException; public interface ExternalFileSystemInstrument extends Instrument { void insertExternalSymbolic( ExternalSymbolic externalSymbolic ); void createExternalSymbolic( String folderPath, String externalSymbolicName,String reparsedPoint ); ElementNode queryElement( String path ); void copy( String sourcePath, String destinationPath ) throws IOException; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/external/ExternalFolder.java ================================================ package com.pinecone.hydra.storage.file.external; import com.pinecone.hydra.storage.file.entity.FileTreeNode; import java.io.File; import java.net.URI; import java.util.List; public interface ExternalFolder extends ExternalFileObject { File getNativeFile(); URI toURI(); String getName(); String getPath(); String[] list(); File[] listFiles(); List listItem(); boolean delete(); @Override default Object getNativeHandle() { return this.getNativeFile(); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/external/ExternalSymbolicSelector.java ================================================ package com.pinecone.hydra.storage.file.external; import com.pinecone.hydra.system.ko.kom.ReparsePointSelector; public interface ExternalSymbolicSelector extends ReparsePointSelector { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/external/GenericNativeExternalFile.java ================================================ package com.pinecone.hydra.storage.file.external; import java.io.File; public class GenericNativeExternalFile extends ArchNativeExternalFileObject implements ExternalFile { public GenericNativeExternalFile( File file ) { super( file ); } @Override public Number size() { return this.mNativeFile.getTotalSpace(); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/external/GenericNativeExternalFolder.java ================================================ package com.pinecone.hydra.storage.file.external; import com.pinecone.framework.util.io.FileUtils; import com.pinecone.hydra.storage.file.entity.FileTreeNode; import java.io.File; import java.io.IOException; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import java.util.List; public class GenericNativeExternalFolder extends ArchNativeExternalFileObject implements ExternalFolder { public GenericNativeExternalFolder( File file ){ super( file ); } @Override public String[] list() { return this.mNativeFile.list(); } @Override public File[] listFiles() { return this.mNativeFile.listFiles(); } @Override public List listItem() { ArrayList fileTreeNodes = new ArrayList<>(); File[] files = this.listFiles(); if( files.length > 0 ){ for( int i = 0;i < files.length; ++i ){ File file = files[i]; if( file.isDirectory() ){ fileTreeNodes.add( new GenericNativeExternalFolder(file) ); } else { fileTreeNodes.add( new GenericNativeExternalFile( file ) ); } } } return fileTreeNodes; } @Override public boolean delete() { try { FileUtils.purgeDirectory( this.mNativeFile ); } catch ( IOException e ) { return false; } return true; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/external/KenExternalFileSystemInstrument.java ================================================ package com.pinecone.hydra.storage.file.external; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.name.path.PathResolver; import com.pinecone.hydra.storage.StorageConstants; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.ExternalSymbolic; import com.pinecone.hydra.storage.file.source.ExternalSymbolicManipulator; import com.pinecone.hydra.storage.file.source.FileManipulator; import com.pinecone.hydra.storage.file.source.FileMasterManipulator; import com.pinecone.hydra.storage.file.source.FolderManipulator; import com.pinecone.hydra.storage.natives.NativeExternalFileSystems; import com.pinecone.hydra.system.identifier.KOPathResolver; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.system.ko.kom.PathSelector; import com.pinecone.hydra.unit.imperium.ImperialTree; import java.io.File; import java.io.IOException; import java.util.List; public class KenExternalFileSystemInstrument implements ExternalFileSystemInstrument { protected KOMFileSystem fileSystem; protected PathResolver pathResolver; protected PathSelector pathSelector; protected FileMasterManipulator fileMasterManipulator; protected FolderManipulator folderManipulator; protected FileManipulator fileManipulator; protected ExternalSymbolicManipulator externalSymbolicManipulator; protected ImperialTree imperialTree; public KenExternalFileSystemInstrument( KOMFileSystem fileSystem ){ this.fileSystem = fileSystem; this.pathResolver = new KOPathResolver( fileSystem.getConfig() ); this.fileMasterManipulator = this.fileSystem.getFileMasterManipulator(); this.fileManipulator = this.fileMasterManipulator.getFileManipulator(); this.folderManipulator = this.fileMasterManipulator.getFolderManipulator(); this.externalSymbolicManipulator = this.fileMasterManipulator.getExternalSymbolicManipulator(); this.imperialTree = fileSystem.getMasterTrieTree(); this.pathSelector = new KenExternalSymbolicSelector( this.pathResolver, this.fileSystem.getMasterTrieTree(),this.folderManipulator, new GUIDNameManipulator[] { this.fileManipulator }, this.externalSymbolicManipulator ); } @Override public ElementNode queryElement( String path ) { GUID guid = this.queryGUIDByPath(path); if( guid == null ) { return null; } ExternalSymbolic externalSymbolic = this.externalSymbolicManipulator.getSymbolicByGuid(guid); String externalPath = this.fileSystem.getPath(externalSymbolic.getGuid()); String remainingPath = path.substring(externalPath.length()).replaceFirst( StorageConstants.PathSeparator, "" ); String realFilePath = externalSymbolic.getReparsedPoint() + StorageConstants.PathSeparator + remainingPath; File file = new File(realFilePath); if( file.isDirectory() ){ return new GenericNativeExternalFolder( file ); } else { return new GenericNativeExternalFile( file ); } } @Override public void insertExternalSymbolic( ExternalSymbolic externalSymbolic ) { this.externalSymbolicManipulator.insert( externalSymbolic ); } @Override public void createExternalSymbolic( String folderPath, String externalSymbolicName, String reparsedPoint ) { ElementNode elementNode = this.fileSystem.queryElement(folderPath); elementNode.evinceFolder().createExternalSymbolic( externalSymbolicName,reparsedPoint ); } @Override public void copy( String sourcePath, String destinationPath ) throws IOException { NativeExternalFileSystems.copy( sourcePath, destinationPath ); } private GUID queryGUIDByPath( String path ) { return this.queryGUIDByNS( path, null, null ); } private GUID queryGUIDByNS( String path, String szBadSep, String szTargetSep ) { if( szTargetSep != null ) { path = path.replace( szBadSep, szTargetSep ); } String[] parts = this.pathResolver.segmentPathParts( path ); List resolvedParts = this.pathResolver.resolvePath( parts ); path = this.pathResolver.assemblePath( resolvedParts ); GUID guid = this.imperialTree.queryGUIDByPath( path ); if ( guid != null ){ return guid; } guid = this.pathSelector.searchGUID( resolvedParts ); return guid; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/external/KenExternalSymbolicSelector.java ================================================ package com.pinecone.hydra.storage.file.external; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.name.path.PathResolver; import com.pinecone.hydra.storage.file.source.ExternalSymbolicManipulator; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.system.ko.kom.MultiFolderPathSelector; import com.pinecone.hydra.system.ko.kom.ReparseLinkSelector; import com.pinecone.hydra.unit.imperium.ImperialTree; import com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode; import java.util.List; public class KenExternalSymbolicSelector extends ReparseLinkSelector implements ExternalSymbolicSelector { protected ExternalSymbolicManipulator mExternalSymbolicManipulator; public KenExternalSymbolicSelector( MultiFolderPathSelector pathSelector ) { super( pathSelector ); } public KenExternalSymbolicSelector( PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator dirMan, GUIDNameManipulator[] fileMans, ExternalSymbolicManipulator externalSymbolicManipulator ) { super( pathResolver, trieTree, new GUIDNameManipulator[]{ dirMan }, fileMans ); this.mExternalSymbolicManipulator = externalSymbolicManipulator; } public KenExternalSymbolicSelector( PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator[] dirMans, GUIDNameManipulator[] fileMans, ExternalSymbolicManipulator externalSymbolicManipulator ) { super( pathResolver, trieTree, dirMans, fileMans ); this.mExternalSymbolicManipulator = externalSymbolicManipulator; } @Override public Object search( String[] parts ) { List resolvedParts = this.pathResolver.resolvePath(parts); return this.dfsSearch( resolvedParts ); } @Override public ReparseLinkNode searchLinkNode(String[] parts ) { Object result = this.search( parts ); if( result instanceof ReparseLinkNode ) { return (ReparseLinkNode) result; } return null; } @Override protected Object beforeDFSTermination( String currentPart, GUID guid ) { Object obj = super.beforeDFSTermination( currentPart, guid ); if ( obj == null ) { boolean b = this.mExternalSymbolicManipulator.isSymbolicMatchedByNameGuid( currentPart, guid ); if ( b ) { return guid; } } return guid; } @Override protected Object tryTerminationBlock( String currentPart, GUID guid ) { Object obj = super.tryTerminationBlock( currentPart, guid ); if ( obj == null ) { boolean b = this.mExternalSymbolicManipulator.isSymbolicMatchedByNameGuid( currentPart, guid ); if ( b ) { return guid; } } return obj; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/marshaling/ClusterGroup.java ================================================ package com.pinecone.hydra.storage.file.marshaling; import com.pinecone.framework.system.prototype.Pinenut; public interface ClusterGroup extends Pinenut { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/marshaling/StripedClusterGroup.java ================================================ package com.pinecone.hydra.storage.file.marshaling; public interface StripedClusterGroup extends ClusterGroup { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/operator/ArchFileSystemOperator.java ================================================ package com.pinecone.hydra.storage.file.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.ArchElementNode; import com.pinecone.hydra.storage.file.source.FileSystemAttributeManipulator; import com.pinecone.hydra.storage.file.source.FileMasterManipulator; import com.pinecone.hydra.system.ko.UOIUtils; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.ImperialTree; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.framework.util.id.GuidAllocator; import java.time.LocalDateTime; public abstract class ArchFileSystemOperator implements FileSystemOperator{ protected KOMFileSystem fileSystem; protected FileSystemOperatorFactory factory; protected ImperialTree imperialTree; protected FileSystemAttributeManipulator fileSystemAttributeManipulator; protected FileMasterManipulator fileMasterManipulator; public ArchFileSystemOperator( FileSystemOperatorFactory factory ) { this( factory.getMasterManipulator(), (KOMFileSystem) factory.getFileSystem() ); this.factory = factory; } public ArchFileSystemOperator( FileMasterManipulator masterManipulator, KOMFileSystem fileSystem ) { this.imperialTree = fileSystem.getMasterTrieTree(); this.fileSystemAttributeManipulator = masterManipulator.getAttributeManipulator(); this.fileSystem = fileSystem; this.fileMasterManipulator = masterManipulator; } protected ImperialTreeNode affirmPreinsertionInitialize(TreeNode treeNode ) { ArchElementNode entityNode = (ArchElementNode) treeNode; GUID guid72 = entityNode.getGuid(); // Case 1: Dummy config node. GuidAllocator guidAllocator = this.fileSystem.getGuidAllocator(); if( guid72 == null ) { guid72 = guidAllocator.nextGUID(); entityNode.setGuid( guid72 ); entityNode.setCreateTime( LocalDateTime.now() ); } entityNode.setUpdateTime( LocalDateTime.now() ); ImperialTreeNode imperialTreeNode = new GUIDImperialTrieNode(); imperialTreeNode.setGuid( guid72 ); imperialTreeNode.setType( UOIUtils.createLocalJavaClass( entityNode.getClass().getName() ) ); return imperialTreeNode; } public FileSystemOperatorFactory getOperatorFactory() { return this.factory; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/operator/FileSystemOperator.java ================================================ package com.pinecone.hydra.storage.file.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.FileTreeNode; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; public interface FileSystemOperator extends TreeNodeOperator { @Override FileTreeNode get(GUID guid ); FileTreeNode get( GUID guid, int depth ); void rename( GUID fileGuid, String newName ); @Override FileTreeNode getAsRootDepth( GUID guid ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/operator/FileSystemOperatorFactory.java ================================================ package com.pinecone.hydra.storage.file.operator; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.ExternalSymbolic; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.file.entity.Folder; import com.pinecone.hydra.storage.file.source.FileMasterManipulator; import com.pinecone.hydra.unit.imperium.operator.OperatorFactory; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; public interface FileSystemOperatorFactory extends OperatorFactory { String DefaultFile = FileNode.class.getSimpleName(); String DefaultFolder = Folder.class.getSimpleName(); String DefaultExternalSymbolic = ExternalSymbolic.class.getSimpleName(); void register( String typeName, TreeNodeOperator functionalNodeOperation ); void registerMetaType( Class clazz, String metaType ); void registerMetaType( String classFullName, String metaType ); String getMetaType( String classFullName ); FileSystemOperator getOperator(String typeName ); KOMFileSystem getFileSystem(); FileMasterManipulator getMasterManipulator(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/operator/GenericExternalSymbolicOperator.java ================================================ package com.pinecone.hydra.storage.file.operator; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.ExternalSymbolic; import com.pinecone.hydra.storage.file.entity.FileSystemAttributes; import com.pinecone.hydra.storage.file.entity.FileTreeNode; import com.pinecone.hydra.storage.file.entity.GenericExternalSymbolic; import com.pinecone.hydra.storage.file.source.ExternalSymbolicManipulator; import com.pinecone.hydra.storage.file.source.FileMasterManipulator; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import java.lang.reflect.Field; import java.util.List; import java.util.Objects; public class GenericExternalSymbolicOperator extends ArchFileSystemOperator{ private ExternalSymbolicManipulator externalSymbolicManipulator; public GenericExternalSymbolicOperator( FileSystemOperatorFactory factory ) { this( factory.getMasterManipulator(), (KOMFileSystem) factory.getFileSystem() ); this.factory = factory; } public GenericExternalSymbolicOperator(FileMasterManipulator masterManipulator, KOMFileSystem fileSystem ) { super( masterManipulator, fileSystem ); this.externalSymbolicManipulator = this.fileMasterManipulator.getExternalSymbolicManipulator(); } @Override public GUID insert(TreeNode treeNode) { ExternalSymbolic externalSymbolic = (ExternalSymbolic) treeNode; ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize( treeNode ); GuidAllocator guidAllocator = this.fileSystem.getGuidAllocator(); GUID guid = externalSymbolic.getGuid(); FileSystemAttributes attributes = externalSymbolic.getAttributes(); GUID attrbutesGuid = guidAllocator.nextGUID(); if ( attributes != null ){ attributes.setGuid(attrbutesGuid); this.fileSystemAttributeManipulator.insert(attributes); } else { attrbutesGuid = null; } GUID fileMetaGuid = guidAllocator.nextGUID(); imperialTreeNode.setBaseDataGUID(attrbutesGuid); imperialTreeNode.setNodeMetadataGUID(fileMetaGuid); this.imperialTree.insert(imperialTreeNode); this.externalSymbolicManipulator.insert( externalSymbolic ); return guid; } @Override public void purge(GUID guid) { GUIDImperialTrieNode node = this.imperialTree.getNode(guid); this.imperialTree.purge( guid ); this.externalSymbolicManipulator.remove( guid ); this.imperialTree.removeCachePath(guid); } @Override public FileTreeNode get( GUID guid ) { return (ExternalSymbolic) this.getFileTreeNodeWideData( guid ); } @Override public FileTreeNode get( GUID guid, int depth ) { return this.get( guid ); } @Override public void rename(GUID fileGuid, String newName) { } @Override public FileTreeNode getAsRootDepth(GUID guid) { return this.getFileTreeNodeWideData(guid); } @Override public void update(TreeNode treeNode) { } @Override public void updateName(GUID guid, String name) { } protected FileTreeNode getFileTreeNodeWideData(GUID guid ){ GUIDImperialTrieNode node = this.imperialTree.getNode( guid ); ExternalSymbolic cn = this.externalSymbolicManipulator.getSymbolicByGuid( guid ); if( cn instanceof GenericExternalSymbolic) { ((GenericExternalSymbolic) cn).apply( this.externalSymbolicManipulator ); } //Notice: Registry attributes is difference from other tree, -- that is, same as DOM; // So in this case, this field is deprecated. //Attributes attributes = this.attributesManipulator.getAttributes( node.getAttributesGUID(), cn ); FileSystemAttributes attributes = this.fileSystemAttributeManipulator.getAttributes( guid, cn ); cn.setAttributes ( attributes ); return cn; } protected void inherit( FileTreeNode self, FileTreeNode prototype ){ Class clazz = self.getClass(); Field[] fields = clazz.getDeclaredFields(); for ( Field field : fields ){ field.setAccessible(true); try { Object value1 = field.get( self ); Object value2 = field.get( prototype ); if ( Objects.isNull(value1) || (value1 instanceof List && ((List) value1).isEmpty()) ){ field.set(self,value2); } } catch ( IllegalAccessException e ) { throw new ProxyProvokeHandleException(e); } } } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/operator/GenericFileOperator.java ================================================ package com.pinecone.hydra.storage.file.operator; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.FileMeta; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.file.entity.FileSystemAttributes; import com.pinecone.hydra.storage.file.entity.FileTreeNode; import com.pinecone.hydra.storage.file.entity.GenericFileNode; import com.pinecone.hydra.storage.file.source.FileManipulator; import com.pinecone.hydra.storage.file.source.FileMasterManipulator; import com.pinecone.hydra.storage.file.source.FileMetaManipulator; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.framework.util.id.GuidAllocator; import java.lang.reflect.Field; import java.util.List; import java.util.Objects; public class GenericFileOperator extends ArchFileSystemOperator { protected FileManipulator fileManipulator; protected FileMetaManipulator fileMetaManipulator; public GenericFileOperator( FileSystemOperatorFactory factory ) { this( factory.getMasterManipulator(), (KOMFileSystem) factory.getFileSystem() ); this.factory = factory; } public GenericFileOperator( FileMasterManipulator masterManipulator, KOMFileSystem fileSystem ) { super( masterManipulator, fileSystem ); this.fileManipulator = masterManipulator.getFileManipulator(); this.fileMetaManipulator = masterManipulator.getFileMetaManipulator(); } @Override public GUID insert(TreeNode treeNode) { FileNode file = (FileNode) treeNode; ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize( treeNode ); GuidAllocator guidAllocator = this.fileSystem.getGuidAllocator(); GUID guid = file.getGuid(); FileSystemAttributes attributes = file.getAttributes(); GUID attrbutesGuid = guidAllocator.nextGUID(); if ( attributes != null ){ attributes.setGuid(attrbutesGuid); this.fileSystemAttributeManipulator.insert(attributes); } else { attrbutesGuid = null; } FileMeta fileMeta = file.getFileMeta(); GUID fileMetaGuid = guidAllocator.nextGUID(); if ( fileMeta != null ){ fileMeta.setGuid(fileMetaGuid); this.fileMetaManipulator.insert(fileMeta); } else { fileMetaGuid = null; } imperialTreeNode.setBaseDataGUID(attrbutesGuid); imperialTreeNode.setNodeMetadataGUID(fileMetaGuid); this.imperialTree.insert(imperialTreeNode); this.fileManipulator.insert(file); return guid; } @Override public void purge(GUID guid) { GUIDImperialTrieNode node = this.imperialTree.getNode(guid); this.imperialTree.purge( guid ); this.fileManipulator.remove(guid); this.fileMetaManipulator.remove(node.getNodeMetadataGUID()); //this.fileSystemAttributeManipulator.remove(node.getAttributesGUID()); this.imperialTree.removeCachePath(guid); } @Override public FileTreeNode get(GUID guid) { FileNode fileTreeNode = this.getFileTreeNodeWideData( guid ).evinceFileNode(); FileNode thisNode = fileTreeNode; while ( true ) { GUID affinityGuid = thisNode.getDataAffinityGuid(); if ( affinityGuid != null ){ FileNode parent = this.getFileTreeNodeWideData( affinityGuid ).evinceFileNode(); this.inherit( thisNode, parent ); thisNode = parent; } else { break; } } return fileTreeNode; } @Override public FileTreeNode get(GUID guid, int depth) { return this.get( guid ); } @Override public FileTreeNode getAsRootDepth(GUID guid) { return this.getFileTreeNodeWideData(guid); } @Override public void rename(GUID fileGuid, String newName) { this.fileManipulator.rename( fileGuid, newName ); this.imperialTree.removeCachePath(fileGuid); } @Override public void update(TreeNode treeNode) { this.imperialTree.removeCachePath(treeNode.getGuid()); this.fileManipulator.update( (FileNode) treeNode ); } @Override public void updateName(GUID guid, String name) { } protected FileTreeNode getFileTreeNodeWideData(GUID guid ){ GUIDImperialTrieNode node = this.imperialTree.getNode( guid ); FileNode cn = this.fileManipulator.getFileNodeByGuid( guid ); if( cn instanceof GenericFileNode) { ((GenericFileNode) cn).apply( this.fileSystem ); } FileMeta fileMeta = this.fileMetaManipulator.getFileMetaByGuid( node.getNodeMetadataGUID() ); //Notice: Registry attributes is difference from other tree, -- that is, same as DOM; // So in this case, this field is deprecated. //Attributes attributes = this.attributesManipulator.getAttributes( node.getAttributesGUID(), cn ); FileSystemAttributes attributes = this.fileSystemAttributeManipulator.getAttributes( guid, cn ); cn.setAttributes ( attributes ); cn.startDistribution( fileMeta ); return cn; } protected void inherit( FileTreeNode self, FileTreeNode prototype ){ Class clazz = self.getClass(); Field[] fields = clazz.getDeclaredFields(); for ( Field field : fields ){ field.setAccessible(true); try { Object value1 = field.get( self ); Object value2 = field.get( prototype ); if ( Objects.isNull(value1) || (value1 instanceof List && ((List) value1).isEmpty()) ){ field.set(self,value2); } } catch ( IllegalAccessException e ) { throw new ProxyProvokeHandleException(e); } } } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/operator/GenericFileSystemOperatorFactory.java ================================================ package com.pinecone.hydra.storage.file.operator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.GenericFileNode; import com.pinecone.hydra.storage.file.entity.GenericFolder; import com.pinecone.hydra.storage.file.source.FileMasterManipulator; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; import java.util.HashMap; import java.util.Map; import java.util.TreeMap; public class GenericFileSystemOperatorFactory implements FileSystemOperatorFactory{ protected FileMasterManipulator fileMasterManipulator; @JsonIgnore protected KOMFileSystem fileSystem; protected Map registerer = new HashMap<>(); protected Map metaTypeMap = new TreeMap<>(); protected void registerDefaultMetaType( Class genericType ) { this.metaTypeMap.put( genericType.getName(), genericType.getSimpleName().replace( this.fileSystem.getConfig().getVersionSignature(),"" )); } protected void registerDefaultMetaTypes() { this.registerDefaultMetaType( GenericFolder.class ); this.registerDefaultMetaType( GenericFileNode.class ); } public GenericFileSystemOperatorFactory( KOMFileSystem fileSystem, FileMasterManipulator fileMasterManipulator ){ this.fileSystem = fileSystem; this.fileMasterManipulator = fileMasterManipulator; this.registerer.put( DefaultFile, new GenericFileOperator( this ) ); this.registerer.put( DefaultFolder, new GenericFolderOperator(this) ); this.registerer.put( DefaultExternalSymbolic, new GenericExternalSymbolicOperator(this) ); this.registerDefaultMetaTypes(); } @Override public void register( String typeName, TreeNodeOperator functionalNodeOperation ) { this.registerer.put( typeName, functionalNodeOperation ); } @Override public void registerMetaType( Class clazz, String metaType ){ this.registerMetaType( clazz.getName(), metaType ); } @Override public void registerMetaType( String classFullName, String metaType ){ this.metaTypeMap.put( classFullName, metaType ); } @Override public String getMetaType( String classFullName ) { return this.metaTypeMap.get( classFullName ); } @Override public FileSystemOperator getOperator(String typeName ) { //Debug.trace( this.registerer.toString() ); return (FileSystemOperator) this.registerer.get( typeName ); } @Override public KOMFileSystem getFileSystem() { return this.fileSystem; } @Override public FileMasterManipulator getMasterManipulator() { return this.fileMasterManipulator; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/operator/GenericFolderOperator.java ================================================ package com.pinecone.hydra.storage.file.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.FileSystemAttributes; import com.pinecone.hydra.storage.file.entity.FileTreeNode; import com.pinecone.hydra.storage.file.entity.Folder; import com.pinecone.hydra.storage.file.entity.FolderMeta; import com.pinecone.hydra.storage.file.entity.GenericFolder; import com.pinecone.hydra.storage.file.source.FileMasterManipulator; import com.pinecone.hydra.storage.file.source.FolderManipulator; import com.pinecone.hydra.storage.file.source.FolderMetaManipulator; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.framework.util.id.GuidAllocator; import java.util.ArrayList; import java.util.List; public class GenericFolderOperator extends ArchFileSystemOperator{ private FolderManipulator folderManipulator; private FolderMetaManipulator folderMetaManipulator; public GenericFolderOperator(FileSystemOperatorFactory factory ) { this( factory.getMasterManipulator(), factory.getFileSystem() ); this.factory = factory; } public GenericFolderOperator(FileMasterManipulator masterManipulator, KOMFileSystem fileSystem ) { super( masterManipulator, fileSystem ); this.folderManipulator = masterManipulator.getFolderManipulator(); this.folderMetaManipulator = masterManipulator.getFolderMetaManipulator(); } @Override public GUID insert(TreeNode treeNode) { Folder folder = (Folder) treeNode; ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize(treeNode); GuidAllocator guidAllocator = this.fileSystem.getGuidAllocator(); GUID guid = folder.getGuid(); FileSystemAttributes attributes = folder.getAttributes(); GUID attributesGuid = guidAllocator.nextGUID(); if ( attributes != null ){ attributes.setGuid(attributesGuid); this.fileSystemAttributeManipulator.insert(attributes); } else { attributesGuid = null; } FolderMeta folderMeta = folder.getFolderMeta(); GUID folderMetaGuid = guidAllocator.nextGUID(); if ( folderMeta != null ){ folderMeta.setGuid(folderMetaGuid); this.folderMetaManipulator.insert(folderMeta); } else { folderMetaGuid = null; } imperialTreeNode.setNodeMetadataGUID(folderMetaGuid); imperialTreeNode.setBaseDataGUID(attributesGuid); this.imperialTree.insert(imperialTreeNode); this.folderManipulator.insert(folder); return guid; } @Override public void purge(GUID guid) { //namespace节点需要递归删除其拥有节点若其引用节点,没有其他引用则进行清理 List childNodes = this.imperialTree.getChildren(guid); GUIDImperialTrieNode node = this.imperialTree.getNode(guid); if ( !childNodes.isEmpty() ){ List subordinates = this.imperialTree.getSubordinates(guid); if ( !subordinates.isEmpty() ){ for ( GUID subordinateGuid : subordinates ){ this.purge( subordinateGuid ); } } childNodes = this.imperialTree.getChildren( guid ); for( GUIDImperialTrieNode childNode : childNodes ){ List parentNodes = this.imperialTree.fetchParentGuids(childNode.getGuid()); if ( parentNodes.size() > 1 ){ this.imperialTree.removeInheritance(childNode.getGuid(),guid); } else { this.purge( childNode.getGuid() ); } } } if ( node.getType().getObjectName().equals(GenericFolder.class.getName()) ){ this.removeNode(guid); } else { UOI uoi = node.getType(); String metaType = this.getOperatorFactory().getMetaType( uoi.getObjectName() ); if( metaType == null ) { TreeNode newInstance = (TreeNode)uoi.newInstance( new Class[]{ KOMFileSystem.class }, this.fileSystem ); metaType = newInstance.getMetaType(); } FileSystemOperator operator = this.getOperatorFactory().getOperator( metaType ); operator.purge( guid ); } } @Override public FileTreeNode get(GUID guid) { return this.getFolderWideData(guid, 0); } @Override public FileTreeNode get(GUID guid, int depth) { return this.getFolderWideData(guid,depth); } @Override public FileTreeNode getAsRootDepth(GUID guid) { return this.getFolderWideData(guid,0); } @Override public void rename(GUID fileGuid, String newName) { this.folderManipulator.rename( fileGuid, newName ); this.imperialTree.removeCachePath(fileGuid); } @Override public void update(TreeNode treeNode) { this.imperialTree.removeCachePath(treeNode.getGuid()); FileTreeNode fileTreeNode = this.get(treeNode.getGuid()); this.folderManipulator.update( (Folder) fileTreeNode ); } @Override public void updateName(GUID guid, String name) { } private Folder getFolderWideData(GUID guid, int depth ){ Folder fd = this.folderManipulator.getFolderByGuid( guid ); if ( fd instanceof GenericFolder){ ((GenericFolder) fd).apply( this.fileSystem ); } GUIDImperialTrieNode node = this.imperialTree.getNode(guid); if( depth <= 0 ) { List childNode = this.imperialTree.getChildren(guid); ArrayList guids = new ArrayList<>(); for ( GUIDImperialTrieNode n : childNode ){ guids.add( n.getGuid() ); } ++depth; fd.setChildrenGuids( guids, depth ); } FileSystemAttributes attributes = this.fileSystemAttributeManipulator.getAttributes( guid, fd ); FolderMeta folderMeta = this.folderMetaManipulator.getFolderMetaByGuid( node.getNodeMetadataGUID() ); fd.setAttributes ( attributes ); fd.setFolderMeta ( folderMeta ); return fd; } private void removeNode( GUID guid ){ GUIDImperialTrieNode node = this.imperialTree.getNode(guid); this.imperialTree.purge( guid ); this.imperialTree.removeCachePath(guid); this.folderManipulator.remove(guid); this.folderMetaManipulator.remove(node.getNodeMetadataGUID()); //this.fileSystemAttributeManipulator.remove(node.getAttributesGUID()); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/ExternalSymbolicManipulator.java ================================================ package com.pinecone.hydra.storage.file.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.ExternalSymbolic; public interface ExternalSymbolicManipulator extends Pinenut { void insert( ExternalSymbolic externalSymbolic ); void remove( GUID guid ); ExternalSymbolic getSymbolicByGuid( GUID guid ); ExternalSymbolic getSymbolicByNameGuid( String nodeName, GUID nodeGUID ); boolean isSymbolicMatchedByNameGuid( String nodeName, GUID nodeGUID ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/FileManipulator.java ================================================ package com.pinecone.hydra.storage.file.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import java.util.List; public interface FileManipulator extends GUIDNameManipulator { FileNode getFileNode(GUID guid, ElementNode element); void insert( FileNode fileNode ); void remove( GUID guid ); FileNode getFileNodeByGuid(GUID guid); List getGuidsByName(String name ); List getGuidsByNameID( String name, GUID guid ); List dumpGuid(); void update( FileNode fileNode ); void rename( GUID guid, String newName ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/FileMasterManipulator.java ================================================ package com.pinecone.hydra.storage.file.source; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; public interface FileMasterManipulator extends KOIMasterManipulator { FileSystemAttributeManipulator getAttributeManipulator(); FileManipulator getFileManipulator(); FileMetaManipulator getFileMetaManipulator(); FolderManipulator getFolderManipulator(); FolderMetaManipulator getFolderMetaManipulator(); LocalClusterManipulator getLocalClusterManipulator(); RemoteClusterManipulator getRemoteClusterManipulator(); SymbolicManipulator getSymbolicManipulator(); SymbolicMetaManipulator getSymbolicMetaManipulator(); FolderVolumeMappingManipulator getFolderVolumeRelationManipulator(); ExternalSymbolicManipulator getExternalSymbolicManipulator(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/FileMetaManipulator.java ================================================ package com.pinecone.hydra.storage.file.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.FileMeta; public interface FileMetaManipulator extends Pinenut { FileMeta getFileMeta(GUID guid, ElementNode element); void insert( FileMeta fileMeta ); void remove( GUID guid ); FileMeta getFileMetaByGuid(GUID guid); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/FileSystemAttributeManipulator.java ================================================ package com.pinecone.hydra.storage.file.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.FileSystemAttributes; import com.pinecone.hydra.storage.file.entity.ElementNode; import java.util.List; import java.util.Map; public interface FileSystemAttributeManipulator extends Pinenut { void insertAttribute(GUID guid, String key, String value ); List> getAttributesByGuid(GUID guid ); void updateAttribute( GUID guid, String key, String value ); void remove( GUID guid ); FileSystemAttributes getAttributes(GUID guid, ElementNode element ); default void insert( FileSystemAttributes attributes) { for ( Map.Entry entry : attributes.getAttributes().entrySet() ) { this.insertAttribute( attributes.getGuid(), entry.getKey(), entry.getValue() ); } } default void update( FileSystemAttributes attributes) { for ( Map.Entry entry : attributes.getAttributes().entrySet() ) { this.updateAttribute( attributes.getGuid(), entry.getKey(), entry.getValue() ); } } boolean containsKey ( GUID guid, String key ); void clearAttributes( GUID guid ); void removeAttributeWithValue( GUID guid, String key, String value ); void removeAttribute( GUID guid, String key ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/FolderManipulator.java ================================================ package com.pinecone.hydra.storage.file.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.Folder; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import java.util.List; public interface FolderManipulator extends GUIDNameManipulator { Folder getFolder(GUID guid, ElementNode element); void insert( Folder folder ); void remove( GUID guid ); void update( Folder folder ); Folder getFolderByGuid(GUID guid); List getGuidsByName(String name ); List getGuidsByNameID( String name, GUID guid ); List dumpGuid(); boolean isFolder(GUID guid); void rename( GUID fileGuid, String newName ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/FolderMetaManipulator.java ================================================ package com.pinecone.hydra.storage.file.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.FolderMeta; public interface FolderMetaManipulator extends Pinenut { FolderMeta getFolderMeta(GUID guid, ElementNode element); void insert( FolderMeta folderMeta ); void remove( GUID guid ); FolderMeta getFolderMetaByGuid(GUID guid); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/FolderVolumeMappingManipulator.java ================================================ package com.pinecone.hydra.storage.file.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface FolderVolumeMappingManipulator extends Pinenut { void insert( GUID folderGuid, GUID volumeGuid ); void remove( GUID folderGuid, GUID volumeGuid ); GUID getVolumeGuid( GUID folderGuid ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/LocalClusterManipulator.java ================================================ package com.pinecone.hydra.storage.file.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.LocalCluster; import java.util.List; public interface LocalClusterManipulator extends Pinenut { LocalCluster getLocalCluster(GUID guid, ElementNode element); void insert( LocalCluster localCluster ); void remove( GUID guid ); void removeClustersByFile( GUID fileGuid ); LocalCluster getLocalClusterByGuid(GUID guid); List getLocalClusterByFileGuid(GUID guid ); LocalCluster getClusterByFileWithId(GUID fileGuid, long segId ); void update( LocalCluster localCluster ); void removeClusterByFileWithId( GUID fileGuid, long segId ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/RemoteClusterManipulator.java ================================================ package com.pinecone.hydra.storage.file.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.RemoteCluster; import java.util.List; public interface RemoteClusterManipulator extends Pinenut { RemoteCluster getRemoteCluster(GUID guid, ElementNode element); void insert( RemoteCluster remoteCluster ); void remove( GUID guid ); void removeClustersByFile( GUID fileGuid ); RemoteCluster fetchRemoteClustersByFileGuid(GUID guid); List fetchRemoteClusterByFileGuid( GUID guid ); List fetchRemoteClusterByFileGuid( GUID guid, long offset, int pageSize ); long countRemoteClustersByFileGuid( GUID guid ); RemoteCluster getLastCluster(GUID guid ); void removeClusterByFileWithId(GUID fileGuid, long segId ); long countFileClusters( GUID fileGuid ); RemoteCluster getClusterByFileWithId( GUID fileGuid, long segId ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/SymbolicManipulator.java ================================================ package com.pinecone.hydra.storage.file.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.Symbolic; public interface SymbolicManipulator extends Pinenut { Symbolic getSymbolic(GUID guid, ElementNode element); void insert( Symbolic symbolic ); void remove( GUID guid ); Symbolic getSymbolicByGuid(GUID guid); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/SymbolicMetaManipulator.java ================================================ package com.pinecone.hydra.storage.file.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.SymbolicMeta; public interface SymbolicMetaManipulator extends Pinenut { SymbolicMeta getSymbolicMeta(GUID guid, ElementNode element); void insert( SymbolicMeta symbolicMeta ); void remove( GUID guid ); SymbolicMeta getSymbolicMetaByGuid(GUID guid); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/UniformSourceLocator.java ================================================ package com.pinecone.hydra.storage.file.transmit; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; public class UniformSourceLocator implements Pinenut { private String volumeGuid; public UniformSourceLocator() { } public UniformSourceLocator(String volumeGuid, String sourceName) { this.volumeGuid = volumeGuid; } public String getVolumeGuid() { return volumeGuid; } public void setVolumeGuid(String volumeGuid) { this.volumeGuid = volumeGuid; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/exporter/ArchFileExporterEntity.java ================================================ package com.pinecone.hydra.storage.file.transmit.exporter; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.volume.VolumeManager; public abstract class ArchFileExporterEntity implements FileExportEntity { protected Chanface channel; protected VolumeManager volumeManager; private KOMFileSystem fileSystem; private FileNode file; public ArchFileExporterEntity(KOMFileSystem fileSystem, FileNode file, Chanface channel, VolumeManager volumeManager) { this.fileSystem = fileSystem; this.file = file; this.channel = channel; this.volumeManager = volumeManager; } @Override public KOMFileSystem getFileSystem() { return fileSystem; } @Override public void setFileSystem(KOMFileSystem fileSystem) { this.fileSystem = fileSystem; } @Override public FileNode getFile() { return file; } @Override public void setFile(FileNode file) { this.file = file; } @Override public VolumeManager getVolumeManager() { return this.volumeManager; } @Override public void setVolumeManager(VolumeManager volumeManager) { this.volumeManager = volumeManager; } @Override public Chanface getKChannel() { return this.channel; } @Override public void setKChannel(Chanface channel) { this.channel = channel; } public String toString() { return "ArchExporterEntity{fileSystem = " + fileSystem + ", file = " + file + "}"; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/exporter/FileExport.java ================================================ package com.pinecone.hydra.storage.file.transmit.exporter; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.storage.file.entity.Cluster; import java.io.IOException; public interface FileExport extends Pinenut { void export() throws IOException; void export(Cluster cluster) throws IOException; void export( Number offset, Number endSize ) throws IOException; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/exporter/FileExport64.java ================================================ package com.pinecone.hydra.storage.file.transmit.exporter; public interface FileExport64 extends FileExport { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/exporter/FileExportEntity.java ================================================ package com.pinecone.hydra.storage.file.transmit.exporter; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.storage.file.entity.Cluster; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.volume.VolumeManager; import java.io.IOException; public interface FileExportEntity extends Pinenut { KOMFileSystem getFileSystem(); void setFileSystem( KOMFileSystem fileSystem ); VolumeManager getVolumeManager(); void setVolumeManager( VolumeManager volumeManager ); FileNode getFile(); void setFile( FileNode file ); Chanface getKChannel(); void setKChannel( Chanface channel ); void export() throws IOException; void export(Cluster cluster) throws IOException; void export( Number offset, Number endSize ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/exporter/FileExportEntity64.java ================================================ package com.pinecone.hydra.storage.file.transmit.exporter; public interface FileExportEntity64 extends FileExportEntity { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/exporter/TitanFileExport64.java ================================================ package com.pinecone.hydra.storage.file.transmit.exporter; import com.pinecone.framework.util.json.JSON; import com.pinecone.hydra.storage.file.entity.Cluster; import com.pinecone.hydra.storage.file.entity.LocalCluster; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.TitanStorageExportIORequest; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.file.transmit.UniformSourceLocator; import com.pinecone.hydra.storage.volume.UnifiedTransmitConstructor; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.ExporterEntity; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.ulf.util.guid.GUIDs; import java.io.IOException; import java.util.TreeMap; public class TitanFileExport64 implements FileExport64{ protected Chanface channel; protected FileNode fileNode; protected VolumeManager volumeManager; protected UnifiedTransmitConstructor constructor; public TitanFileExport64( FileExportEntity64 entity ){ this.channel = entity.getKChannel(); this.fileNode = entity.getFile(); this.volumeManager = entity.getVolumeManager(); this.constructor = new UnifiedTransmitConstructor(); } @Override public void export() throws IOException { // 获取文件所有的簇 TreeMap framesMap = fileNode.getClusters(); for (long i = 0; i < framesMap.size(); i++) { LocalCluster frame = (LocalCluster) framesMap.get(i); TitanStorageExportIORequest titanExportStorageObject = new TitanStorageExportIORequest(); titanExportStorageObject.setSize( frame.getSize() ); titanExportStorageObject.setStorageObjectGuid( frame.getSegGuid() ); String sourceName = frame.getSourceName(); UniformSourceLocator uniformSourceLocator = JSON.unmarshal(sourceName, UniformSourceLocator.class); LogicVolume volume = this.volumeManager.get(GUIDs.GUID128(uniformSourceLocator.getVolumeGuid())); //volume.channelExport( titanExportStorageObject, this.channel ); ExporterEntity exportEntity = null; exportEntity = this.constructor.getExportEntity(volume.getClass(), volumeManager, titanExportStorageObject, this.channel, volume); volume.export( exportEntity ); } this.channel.close(); } @Override public void export(Cluster cluster) throws IOException { LocalCluster localCluster = (LocalCluster) cluster; TitanStorageExportIORequest titanExportStorageObject = new TitanStorageExportIORequest(); titanExportStorageObject.setSize( localCluster.getSize() ); titanExportStorageObject.setStorageObjectGuid( localCluster.getSegGuid() ); String sourceName = localCluster.getSourceName(); UniformSourceLocator uniformSourceLocator = JSON.unmarshal(sourceName, UniformSourceLocator.class); LogicVolume volume = this.volumeManager.get(GUIDs.GUID128(uniformSourceLocator.getVolumeGuid())); ExporterEntity exportEntity = null; exportEntity = this.constructor.getExportEntity(volume.getClass(), volumeManager, titanExportStorageObject, this.channel, volume); volume.export( exportEntity ); } @Override public void export(Number offset, Number endSize) throws IOException { TreeMap framesMap = fileNode.getClusters(); long startPosition = offset.longValue(); long endPosition = offset.longValue() + endSize.longValue(); long currentPosition = 0; for( long i = 0;i < framesMap.size(); i++ ){ LocalCluster frame = (LocalCluster) framesMap.get(i); if (startPosition < currentPosition + frame.getDefinitionSize() && endPosition > currentPosition) { TitanStorageExportIORequest titanExportStorageObject = new TitanStorageExportIORequest(); titanExportStorageObject.setSize(frame.getSize()); titanExportStorageObject.setStorageObjectGuid(frame.getSegGuid()); String sourceName = frame.getSourceName(); UniformSourceLocator uniformSourceLocator = JSON.unmarshal(sourceName, UniformSourceLocator.class); LogicVolume volume = this.volumeManager.get(GUIDs.GUID128(uniformSourceLocator.getVolumeGuid())); ExporterEntity exportEntity = null; exportEntity = this.constructor.getExportEntity(volume.getClass(), volumeManager, titanExportStorageObject, this.channel, volume); long startOffsetInCluster = Math.max(startPosition - currentPosition, 0); long sizeToExport = Math.min(endPosition - currentPosition, frame.getDefinitionSize()) - startOffsetInCluster; volume.export(exportEntity, startOffsetInCluster, sizeToExport); } currentPosition += frame.getDefinitionSize(); if (currentPosition >= endPosition){ break; } } } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/exporter/TitanFileExportEntity64.java ================================================ package com.pinecone.hydra.storage.file.transmit.exporter; import com.pinecone.hydra.storage.file.entity.Cluster; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.volume.VolumeManager; import java.io.IOException; public class TitanFileExportEntity64 extends ArchFileExporterEntity implements FileExportEntity64{ protected FileExport64 fileExport; public TitanFileExportEntity64(KOMFileSystem fileSystem, VolumeManager volumeManager, FileNode file, Chanface channel) { super(fileSystem, file, channel, volumeManager); this.fileExport = new TitanFileExport64( this ); } @Override public void export() throws IOException { this.fileExport.export(); } @Override public void export(Cluster cluster) throws IOException { this.fileExport.export(cluster); } @Override public void export(Number offset, Number endSize) { } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/receiver/ArchFileReceiveEntity.java ================================================ package com.pinecone.hydra.storage.file.transmit.receiver; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.volume.VolumeManager; public abstract class ArchFileReceiveEntity implements FileReceiveEntity { protected KOMFileSystem fileSystem; protected String destDirPath; protected FileNode file; protected Chanface channel; protected VolumeManager volumeManager; public ArchFileReceiveEntity(KOMFileSystem fileSystem, String destDirPath, FileNode file, Chanface channel, VolumeManager volumeManager ){ this.fileSystem = fileSystem; this.file = file; this.destDirPath = destDirPath; this.channel = channel; this.volumeManager = volumeManager; } @Override public KOMFileSystem getFileSystem() { return this.fileSystem; } @Override public void setFileSystem(KOMFileSystem fileSystem) { this.fileSystem = fileSystem; } @Override public String getDestDirPath() { return this.destDirPath; } @Override public void setDestDirPath(String destDirPath) { this.destDirPath = destDirPath; } @Override public FileNode getFile() { return this.file; } @Override public void setFile(FileNode file) { this.file = file; } @Override public Chanface getChannel() { return this.channel; } @Override public void setChannel(Chanface channel) { this.channel = channel; } @Override public VolumeManager getVolumeManager() { return this.volumeManager; } @Override public void setVolumeManager(VolumeManager volumeManager) { this.volumeManager = volumeManager; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/receiver/FileReceive.java ================================================ package com.pinecone.hydra.storage.file.transmit.receiver; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import java.io.IOException; public interface FileReceive extends Pinenut { void receive(LogicVolume volume) throws IOException; void receive( LogicVolume volume, long segId ) throws IOException; void receive(LogicVolume volume, Number offset, Number endSize ) throws IOException; void randomReceive( LogicVolume volume, Number offset, Number endSize ) throws IOException; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/receiver/FileReceive64.java ================================================ package com.pinecone.hydra.storage.file.transmit.receiver; public interface FileReceive64 extends FileReceive { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/receiver/FileReceiveEntity.java ================================================ package com.pinecone.hydra.storage.file.transmit.receiver; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.volume.VolumeManager; import java.io.IOException; public interface FileReceiveEntity extends Pinenut { KOMFileSystem getFileSystem(); void setFileSystem( KOMFileSystem fileSystem ); String getDestDirPath(); void setDestDirPath( String destDirPath ); FileNode getFile(); void setFile( FileNode file ); Chanface getChannel(); void setChannel( Chanface channel ); VolumeManager getVolumeManager(); void setVolumeManager( VolumeManager volumeManager ); void receive() throws IOException; void receive( Number offset, Number endSize )throws IOException; void receive( long segId ) throws IOException; void randomReceive( Number offset, Number endSize ) throws IOException; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/receiver/FileReceiveEntity64.java ================================================ package com.pinecone.hydra.storage.file.transmit.receiver; public interface FileReceiveEntity64 extends FileReceiveEntity { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/receiver/TitanFileReceive64.java ================================================ package com.pinecone.hydra.storage.file.transmit.receiver; import com.pinecone.framework.util.Bytes; import com.pinecone.hydra.storage.file.entity.LocalCluster; import com.pinecone.hydra.storage.file.entity.RemoteCluster; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.io.TitanFileChannelChanface; import com.pinecone.hydra.storage.TitanStorageReceiveIORequest; import com.pinecone.hydra.storage.file.ClusterSegmentNaming; import com.pinecone.hydra.storage.file.KOFSClusterSegmentNaming; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.Verification; import com.pinecone.hydra.storage.file.entity.FSNodeAllotment; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.file.transmit.UniformSourceLocator; import com.pinecone.hydra.storage.file.transmit.exporter.TitanFileExportEntity64; import com.pinecone.hydra.storage.volume.UnifiedTransmitConstructor; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.entity.ReceiveEntity; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.nio.channels.FileChannel; import java.nio.file.StandardOpenOption; import java.util.zip.CRC32; public class TitanFileReceive64 implements FileReceive64{ protected KOMFileSystem mKOMFileSystem; protected ClusterSegmentNaming mClusterSegmentNaming; protected UnifiedTransmitConstructor constructor; protected Chanface chanface; protected FileNode fileNode; protected String destDirPath; protected VolumeManager volumeManager; public TitanFileReceive64( FileReceiveEntity64 entity ){ this.mKOMFileSystem = entity.getFileSystem(); this.mClusterSegmentNaming = new KOFSClusterSegmentNaming(); this.constructor = new UnifiedTransmitConstructor(); this.chanface = entity.getChannel(); this.destDirPath = entity.getDestDirPath(); this.fileNode = entity.getFile(); this.volumeManager = entity.getVolumeManager(); } @Override public void receive( LogicVolume volume ) throws IOException { long frameSize = this.mKOMFileSystem.getConfig().getClusterSize().longValue(); this.fileNode.setGuid( mKOMFileSystem.queryGUIDByPath( this.destDirPath ) ); FSNodeAllotment allotment = mKOMFileSystem.getFSNodeAllotment(); long segId = 0; long currentPosition = 0; long endSize = frameSize; long parityCheck = 0; long checkSum = 0; long crc32 = 0; StorageIOResponse storageIOResponse = null; while ( true ) { if( currentPosition >= this.fileNode.getDefinitionSize() ){ break; } if( currentPosition + endSize > this.fileNode.getDefinitionSize() ){ endSize = this.fileNode.getDefinitionSize() - currentPosition; } LocalCluster localCluster = allotment.newLocalCluster(); RemoteCluster remoteCluster = allotment.newRemoteCluster( this.fileNode.getGuid(),(int)segId ); remoteCluster.setDeviceGuid(this.mKOMFileSystem.getConfig().getLocalhostGUID()); remoteCluster.setSegGuid( localCluster.getSegGuid() ); StorageReceiveIORequest storageReceiveIORequest = new TitanStorageReceiveIORequest(); storageReceiveIORequest.setSize( this.fileNode.getDefinitionSize() ); storageReceiveIORequest.setName( this.fileNode.getName() ); storageReceiveIORequest.setStorageObjectGuid( localCluster.getSegGuid() ); //storageIOResponse = volume.channelReceive(storageReceiveIORequest, kChannel, currentPosition, endSize); ReceiveEntity receiveEntity = null; receiveEntity = this.constructor.getReceiveEntity(volume.getClass(), this.volumeManager, storageReceiveIORequest, this.chanface, volume); storageIOResponse = volume.receive( receiveEntity, currentPosition, endSize ); UniformSourceLocator uniformSourceLocator = new UniformSourceLocator(); if( storageIOResponse != null ){ localCluster.setCrc32( storageIOResponse.getCre32().getValue() ); parityCheck += storageIOResponse.getParityCheck(); checkSum += storageIOResponse.getChecksum(); if( segId == 0 ){ crc32 = storageIOResponse.getCre32().getValue(); } else { crc32 = crc32 ^ storageIOResponse.getCre32().getValue(); } } uniformSourceLocator.setVolumeGuid( volume.getGuid().toString() ); localCluster.setSize( endSize ); localCluster.setSourceName( uniformSourceLocator.toJSONString() ); localCluster.setFileGuid( this.fileNode.getGuid() ); localCluster.setSegId( segId ); ++segId; localCluster.save(); remoteCluster.save(); currentPosition += endSize; } this.fileNode.setPhysicalSize( currentPosition ); this.fileNode.setLogicSize ( currentPosition ); this.fileNode.setChecksum ( checkSum ); this.fileNode.setCrc32Xor ( crc32 ); this.fileNode.setParityCheck ( (int) parityCheck ); this.mKOMFileSystem.update ( this.fileNode ); // Verification verification = this.getVerification(); // fileNode.setChecksum( verification.getChecksum() ); // fileNode.setParityCheck( verification.getParityCheck() ); // fileNode.setCrc32Xor( Long.toHexString(verification.getCrc32().getValue()) ); // mKOMFileSystem.update( fileNode ); } @Override public void receive(LogicVolume volume, long segId) throws IOException { long frameSize = this.mKOMFileSystem.getConfig().getClusterSize().longValue(); FSNodeAllotment allotment = mKOMFileSystem.getFSNodeAllotment(); //this.mKOMFileSystem.deleteCluster( this.fileNode, segId ); LocalCluster localCluster = (LocalCluster)this.mKOMFileSystem.getClusterByFileWithId(this.fileNode.getGuid(), segId); long endSize = frameSize; long currentPosition = 0; if( currentPosition + endSize > localCluster.getSize() ){ endSize = localCluster.getSize() - currentPosition; } //Debug.trace( "更新簇的大小:"+endSize ); RemoteCluster remoteCluster = allotment.newRemoteCluster( this.fileNode.getGuid(),(int)segId ); remoteCluster.setDeviceGuid(this.mKOMFileSystem.getConfig().getLocalhostGUID()); remoteCluster.setSegGuid( localCluster.getSegGuid() ); StorageReceiveIORequest storageReceiveIORequest = new TitanStorageReceiveIORequest(); storageReceiveIORequest.setSize( this.fileNode.getDefinitionSize() ); storageReceiveIORequest.setName( this.fileNode.getName() ); storageReceiveIORequest.setStorageObjectGuid( localCluster.getSegGuid() ); StorageIOResponse storageIOResponse = null; ReceiveEntity receiveEntity = null; receiveEntity = this.constructor.getReceiveEntity(volume.getClass(), this.volumeManager, storageReceiveIORequest, this.chanface, volume); storageIOResponse = volume.receive( receiveEntity, currentPosition, endSize ); UniformSourceLocator uniformSourceLocator = new UniformSourceLocator(); if( storageIOResponse != null ){ localCluster.setCrc32( storageIOResponse.getCre32().getValue() ); } uniformSourceLocator.setVolumeGuid( volume.getGuid().toString() ); localCluster.setSize( endSize ); localCluster.setSourceName( uniformSourceLocator.toJSONString() ); localCluster.setFileGuid( this.fileNode.getGuid() ); localCluster.setSegId( segId ); localCluster.save(); remoteCluster.save(); } @Override public void receive(LogicVolume volume, Number offset, Number endSize) throws IOException { } @Override public void randomReceive(LogicVolume volume, Number offset, Number endSize) throws IOException { long frameSize = this.mKOMFileSystem.getConfig().getClusterSize().longValue(); this.fileNode.setGuid( mKOMFileSystem.queryGUIDByPath( this.destDirPath ) ); FSNodeAllotment allotment = mKOMFileSystem.getFSNodeAllotment(); long segId = offset.longValue() / frameSize + 1; long startPosition = offset.longValue(); long endPosition = startPosition + endSize.longValue(); long frameTerminatePosition = segId * frameSize; LocalCluster frame = (LocalCluster) this.mKOMFileSystem.getClusterByFileWithId(this.fileNode.getGuid(), segId); if( frame == null ){ frame = allotment.newLocalCluster(); RemoteCluster remoteCluster = allotment.newRemoteCluster( this.fileNode.getGuid(),(int)segId ); remoteCluster.setDeviceGuid(this.mKOMFileSystem.getConfig().getLocalhostGUID()); remoteCluster.setSegGuid( frame.getSegGuid() ); remoteCluster.save(); } if( endPosition <= frameTerminatePosition + frameSize ){ StorageReceiveIORequest storageReceiveIORequest = new TitanStorageReceiveIORequest(); storageReceiveIORequest.setSize( this.fileNode.getDefinitionSize() ); storageReceiveIORequest.setName( this.fileNode.getName() ); storageReceiveIORequest.setStorageObjectGuid( frame.getSegGuid() ); ReceiveEntity receiveEntity = null; receiveEntity = this.constructor.getReceiveEntity(volume.getClass(), this.volumeManager, storageReceiveIORequest, this.chanface, volume); volume.randomReceive( receiveEntity, startPosition, endSize ); UniformSourceLocator uniformSourceLocator = new UniformSourceLocator(); uniformSourceLocator.setVolumeGuid( volume.getGuid().toString() ); frame.setSize( frame.getSize() + endSize.longValue() ); frame.setSourceName( uniformSourceLocator.toJSONString() ); frame.setFileGuid( this.fileNode.getGuid() ); frame.setSegId( segId ); frame.save(); } else { long midPosition = Math.min(frameTerminatePosition + frameSize, endPosition); this.randomReceive(volume, startPosition, midPosition - startPosition); if (midPosition < endPosition) { this.randomReceive(volume, midPosition, endPosition - midPosition); } } } // Verification getVerification() throws IOException { // File tempFile = File.createTempFile("temp",".temp"); // FileNode fileNode = (FileNode)this.mKOMFileSystem.get(this.mKOMFileSystem.queryGUIDByPath(this.destDirPath)); // FileChannel channel = FileChannel.open(tempFile.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND); // TitanFileChannelChanface kChannel = new TitanFileChannelChanface(channel); // TitanFileExportEntity64 exportEntity = new TitanFileExportEntity64(this.mKOMFileSystem, this.volumeManager, fileNode, kChannel); // this.mKOMFileSystem.export( exportEntity ); // // return getVerification(tempFile); // } private Verification getVerification(File tempFile) throws IOException { Verification verification = new Verification(); try (BufferedInputStream bis = new BufferedInputStream(new FileInputStream(tempFile))) { CRC32 crc = new CRC32(); long checksum = 0; int parityCheck = 0; // 使用一个缓冲区一次读取多个字节 byte[] buffer = new byte[8192]; // 8KB 缓冲区 int bytesRead; while ((bytesRead = bis.read(buffer)) != -1) { for (int i = 0; i < bytesRead; i++) { byte b = buffer[i]; // 批量处理每个字节 parityCheck += Bytes.calculateParity(b); checksum += b & 0xFF; crc.update(b); } } verification.setChecksum(checksum); verification.setCrc32(crc); verification.setParityCheck(parityCheck); } return verification; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/receiver/TitanFileReceiveEntity64.java ================================================ package com.pinecone.hydra.storage.file.transmit.receiver; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import java.io.IOException; public class TitanFileReceiveEntity64 extends ArchFileReceiveEntity implements FileReceiveEntity64{ protected FileReceive fileReceive; public TitanFileReceiveEntity64(KOMFileSystem fileSystem, String destDirPath, FileNode file, Chanface channel, VolumeManager volumeManager) { super(fileSystem, destDirPath, file, channel, volumeManager); this.fileReceive = new TitanFileReceive64( this ); } @Override public void receive() throws IOException { this.fileSystem.affirmFileNode( this.destDirPath ); GUID volumeGuid = this.fileSystem.getMappingVolume(this.destDirPath); LogicVolume volume = this.volumeManager.get(volumeGuid); if ( !volume.checkCapacity( this.file.getDefinitionSize() ) ){ this.fileSystem.remove( this.fileSystem.queryGUIDByPath( destDirPath ) ); Debug.trace("容量不足"); return; } volume.deductCapacity( this.file.getDefinitionSize() ); this.fileReceive.receive( volume ); } @Override public void receive( Number offset, Number endSize) throws IOException { this.fileSystem.affirmFileNode( this.destDirPath ); GUID volumeGuid = this.fileSystem.getMappingVolume(this.destDirPath); LogicVolume volume = this.volumeManager.get(volumeGuid); if ( !volume.checkCapacity( this.file.getDefinitionSize() ) ){ this.fileSystem.remove( this.fileSystem.queryGUIDByPath( destDirPath ) ); Debug.trace("容量不足"); return; } volume.deductCapacity( this.file.getDefinitionSize() ); this.fileReceive.receive( volume, offset, endSize ); } @Override public void receive(long segId) throws IOException { this.fileSystem.affirmFileNode( this.destDirPath ); GUID volumeGuid = this.fileSystem.getMappingVolume(this.destDirPath); LogicVolume volume = this.volumeManager.get(volumeGuid); if ( !volume.checkCapacity( this.file.getDefinitionSize() ) ){ this.fileSystem.remove( this.fileSystem.queryGUIDByPath( destDirPath ) ); Debug.trace("容量不足"); return; } volume.deductCapacity( this.file.getDefinitionSize() ); this.fileReceive.receive( volume,segId ); } @Override public void randomReceive(Number offset, Number endSize) throws IOException { this.fileSystem.affirmFileNode( this.destDirPath ); GUID volumeGuid = this.fileSystem.getMappingVolume(this.destDirPath); LogicVolume volume = this.volumeManager.get(volumeGuid); if ( !volume.checkCapacity( this.file.getDefinitionSize() ) ){ this.fileSystem.remove( this.fileSystem.queryGUIDByPath( destDirPath ) ); Debug.trace("容量不足"); return; } volume.deductCapacity( this.file.getDefinitionSize() ); this.fileReceive.randomReceive( volume, offset, endSize ); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/io/Chanface.java ================================================ package com.pinecone.hydra.storage.io; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; import java.nio.ByteBuffer; import java.util.List; public interface Chanface extends Pinenut { void position( long position ) throws IOException; int read( ByteBuffer buffer ) throws IOException; int read( ChanfaceReader reader, int size, long offset ) throws IOException; int read( byte[] buffer, int size, long offset ) throws IOException; // int read( ChanfaceReader reader, int size, long offset ) throws IOException; int write( ByteBuffer buffer ) throws IOException; int write( byte[] buffer, int startPosition, int endSize ) throws IOException; int write( byte[] buffer, List writableCacheBlocks ) throws IOException; long position() throws IOException; void close() throws IOException; Object getNativeFace(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/io/ChanfaceReader.java ================================================ package com.pinecone.hydra.storage.io; import java.nio.ByteBuffer; import com.pinecone.framework.system.prototype.Pinenut; public interface ChanfaceReader extends Pinenut { void afterRead( ByteBuffer out ); // void afterRead( byte[] out ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/io/ChannelRecalled.java ================================================ package com.pinecone.hydra.storage.io; import com.pinecone.framework.system.prototype.Pinenut; public interface ChannelRecalled extends Pinenut { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/io/TitanFileChannelChanface.java ================================================ package com.pinecone.hydra.storage.io; import com.pinecone.framework.system.NotImplementedException; import com.pinecone.hydra.storage.RandomAccessChanface; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlockStatus; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.util.List; import java.util.concurrent.locks.ReentrantLock; public class TitanFileChannelChanface implements RandomAccessChanface { private final FileChannel channel; private final ReentrantLock reentrantLock; public TitanFileChannelChanface(FileChannel channel ){ this.channel = channel; this.reentrantLock = new ReentrantLock(); } @Override public void position(long position) throws IOException { this.channel.position( position ); } @Override public int read( ByteBuffer buffer ) throws IOException { return this.channel.read( buffer ); } @Override public int read(ChanfaceReader reader, int size, long offset ) throws IOException { this.reentrantLock.lock(); int read = 0; try { this.channel.position( offset ); ByteBuffer buffer = ByteBuffer.allocateDirect(size); read = this.channel.read(buffer); reader.afterRead( buffer ); } finally { this.reentrantLock.unlock(); } return read; } @Override public int read(byte[] buffer, int size, long offset) throws IOException { return 0; } @Override public int write( ByteBuffer buffer ) throws IOException { return this.channel.write( buffer ); } @Override public int write(byte[] buffer, int startPosition, int endSize) throws IOException { ByteBuffer byteBuffer = this.copyToTemporaryBuffer(buffer, startPosition, endSize); return this.channel.write( byteBuffer ); } @Override public int write(byte[] buffer, List writableCacheBlocks) throws IOException { ByteBuffer byteBuffer = this.mergeArrays(buffer, writableCacheBlocks); return this.channel.write(byteBuffer); } @Override public long position() throws IOException { return this.channel.position(); } @Override public void close() throws IOException { this.channel.close(); } @Override public void mark(int readlimit) { throw new NotImplementedException(); } @Override public void reset() throws IOException { throw new NotImplementedException(); } private ByteBuffer copyToTemporaryBuffer(byte[] buffer, int startPosition, int endSize ){ ByteBuffer temporaryBuffer = ByteBuffer.allocate( endSize ); temporaryBuffer.put( buffer, startPosition, endSize ); return temporaryBuffer; } private ByteBuffer mergeArrays( byte[] buffer, List writableCacheBlocks ){ // 计算所有缓存块的总长度 int totalLength = 0; for (CacheBlock cacheBlock : writableCacheBlocks) { totalLength += cacheBlock.getValidByteEnd().intValue() - cacheBlock.getValidByteStart().intValue(); } // 创建一个 ByteBuffer 来存储合并的数据 ByteBuffer mergedBuffer = ByteBuffer.allocate(totalLength); // 将数据从 mBuffer 复制到 mergedBuffer for (CacheBlock cacheBlock : writableCacheBlocks) { int start = cacheBlock.getValidByteStart().intValue(); int end = cacheBlock.getValidByteEnd().intValue(); int bufferSize = end - start; // 将 mBuffer 中的数据复制到 mergedBuffer mergedBuffer.put(buffer, start, bufferSize); // 将缓存块状态设置为 Free cacheBlock.setStatus(CacheBlockStatus.Free); } // 准备将 mergedBuffer 用于读取 mergedBuffer.flip(); return mergedBuffer; } @Override public Object getNativeFace() { return this.channel; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/io/TitanInputStreamChanface.java ================================================ package com.pinecone.hydra.storage.io; import com.pinecone.framework.system.NotImplementedException; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; import java.util.List; public class TitanInputStreamChanface implements Chanface { protected InputStream stream; public TitanInputStreamChanface(InputStream stream ){ this.stream = stream; } @Override public void position(long position) throws IOException { this.stream.skip( position ); } @Override public int read( ByteBuffer buffer ) throws IOException { byte[] tempBuffer = new byte[buffer.remaining()]; int bytesRead = stream.read(tempBuffer); buffer.put(tempBuffer, 0, bytesRead); return bytesRead; } @Override public int read(ChanfaceReader reader, int size, long offset ) throws IOException { int bytesRead = 0; byte[] tempBuffer = new byte[ size ]; bytesRead = stream.read( tempBuffer ); ByteBuffer buffer = ByteBuffer.wrap(tempBuffer); reader.afterRead( buffer ); //buffer.put(tempBuffer, 0, bytesRead); return bytesRead; } @Override public int read(byte[] buffer, int size, long offset) throws IOException { return this.stream.read(buffer,(int)offset,size); } @Override public int write(ByteBuffer buffer) throws IOException { throw new NotImplementedException(); } @Override public int write(byte[] buffer, int startPosition, int endSize) throws IOException { throw new NotImplementedException(); } @Override public int write(byte[] buffer, List writableCacheBlocks) throws IOException { throw new NotImplementedException(); } @Override public long position() throws IOException { throw new NotImplementedException(); } @Override public void close() throws IOException { this.stream.close(); } @Override public Object getNativeFace() { return this.stream; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/io/TitanOutputStreamChanface.java ================================================ package com.pinecone.hydra.storage.io; import com.pinecone.framework.system.NotImplementedException; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlockStatus; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.List; import java.util.concurrent.locks.ReentrantLock; public class TitanOutputStreamChanface implements Chanface { protected OutputStream stream; protected final ReentrantLock reentrantLock; public TitanOutputStreamChanface(OutputStream stream ) { this.stream = stream; this.reentrantLock = new ReentrantLock(); } @Override public void position(long position) throws IOException { } @Override public int read( ByteBuffer buffer ) throws IOException { throw new NotImplementedException(); } @Override public int read(ChanfaceReader reader, int size, long offset ) throws IOException { throw new NotImplementedException(); } @Override public int read(byte[] buffer, int size, long offset) throws IOException { return 0; } @Override public int write(ByteBuffer buffer) throws IOException { return this.writeToByte( buffer ); } @Override public int write(byte[] buffer, int startPosition, int endSize) throws IOException { this.stream.write( buffer, startPosition, endSize ); return endSize; } @Override public int write(byte[] buffer, List writableCacheBlocks) throws IOException { int length = 0; for( CacheBlock cacheBlock : writableCacheBlocks ){ length += ( cacheBlock.getValidByteEnd().intValue() - cacheBlock.getValidByteStart().intValue() ); this.stream.write( buffer, cacheBlock.getValidByteStart().intValue(), cacheBlock.getValidByteEnd().intValue() - cacheBlock.getValidByteStart().intValue() ); cacheBlock.setStatus( CacheBlockStatus.Free ); } return length; } @Override public long position() throws IOException { return 0; } @Override public void close() throws IOException { this.stream.close(); } private int writeToByte(ByteBuffer buffer) throws IOException { if (buffer == null) { throw new NullPointerException("Buffer is null"); } int bytesWritten = 0; byte[] tempArray = new byte[buffer.remaining()]; buffer.get(tempArray); this.stream.write(tempArray); bytesWritten = tempArray.length; return bytesWritten; } @Override public Object getNativeFace() { return this.stream; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/io/TitanRandomInputStreamAccessChanface.java ================================================ package com.pinecone.hydra.storage.io; import com.pinecone.framework.system.NotImplementedException; import com.pinecone.hydra.storage.RandomAccessChanface; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.FilterInputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.util.List; import java.util.concurrent.locks.ReentrantLock; public class TitanRandomInputStreamAccessChanface implements RandomAccessChanface { protected FilterInputStream stream; protected final ReentrantLock reentrantLock; public TitanRandomInputStreamAccessChanface(FilterInputStream stream ){ this.stream = stream; this.reentrantLock = new ReentrantLock(); } @Override public void position(long position) throws IOException { this.stream.skip( position ); } @Override public int read( ByteBuffer buffer ) throws IOException { byte[] tempBuffer = new byte[buffer.remaining()]; int bytesRead = stream.read(tempBuffer); buffer.put(tempBuffer, 0, bytesRead); return bytesRead; } @Override public int read(ChanfaceReader reader, int size, long offset ) throws IOException { int bytesRead = 0; byte[] tempBuffer = new byte[ size ]; bytesRead = stream.read( tempBuffer ); ByteBuffer buffer = ByteBuffer.wrap(tempBuffer); reader.afterRead( buffer ); //buffer.put(tempBuffer, 0, bytesRead); return bytesRead; } @Override public int read(byte[] buffer, int size, long offset) throws IOException { return 0; } @Override public int write(ByteBuffer buffer) throws IOException { throw new NotImplementedException(); } @Override public int write(byte[] buffer, int startPosition, int endSize) throws IOException { throw new NotImplementedException(); } @Override public int write(byte[] buffer, List writableCacheBlocks) throws IOException { throw new NotImplementedException(); } @Override public long position() throws IOException { throw new NotImplementedException(); } @Override public void close() throws IOException { this.stream.close(); } @Override public void mark(int readlimit) { this.stream.mark( readlimit ); } @Override public void reset() throws IOException { this.stream.reset(); } @Override public Object getNativeFace() { return this.stream; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/io/UIOException.java ================================================ package com.pinecone.hydra.storage.io; import java.io.IOException; import com.pinecone.framework.system.prototype.Pinenut; public class UIOException extends IOException implements Pinenut { public UIOException() { super(); } public UIOException( String message ) { super(message); } public UIOException( String message, Throwable cause ) { super(message, cause); } public UIOException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/mfs/GenericNativeMFile.java ================================================ package com.pinecone.hydra.storage.mfs; import java.io.File; import java.net.URI; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.system.ko.handle.ArchKHandle; public class GenericNativeMFile extends ArchKHandle implements NativeMFile { protected File mNativeFile; public GenericNativeMFile( File file ) { this.mNativeFile = file; } public GenericNativeMFile( URI uri ) { this( new File( uri ) ); } @Override public URI toURI() { return this.mNativeFile.toURI(); } @Override public String getName() { return this.mNativeFile.getName(); } @Override public Number size() { return this.mNativeFile.length(); } @Override public String getURI() { return this.toURI().toString(); } @Override public String getPath() { return this.mNativeFile.getPath(); } @Override public boolean delete() { return this.mNativeFile.delete(); } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } @Override public File getNativeHandle() { return this.mNativeFile; } @Override public boolean exists() { return this.mNativeFile.exists(); } @Override public boolean isAbsolute() { return this.mNativeFile.isAbsolute(); } @Override public boolean isDirectory() { return this.mNativeFile.isDirectory(); } @Override public MFile[] listFiles() { File[] files = this.mNativeFile.listFiles(); if( files == null ){ return new MFile[0]; } MFile[] mFiles = new MFile[ files.length ]; for( int i = 0; i < files.length; ++i ){ mFiles[i] = new GenericNativeMFile( files[i] ); } return mFiles; } @Override public String getMetaType() { return MetaType; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/mfs/MFile.java ================================================ package com.pinecone.hydra.storage.mfs; import java.net.URI; import com.pinecone.hydra.storage.UFile; import com.pinecone.hydra.unit.imperium.entity.EntityNode; public interface MFile extends UFile, EntityNode { URI toURI(); @Override String getName(); String getPath(); String getURI(); boolean delete(); Object getNativeHandle(); boolean exists(); boolean isAbsolute(); boolean isDirectory(); MFile[] listFiles(); default String getMetaType() { return this.className().replace( MFile.class.getName(), "" ); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/mfs/MappingFileSystem.java ================================================ package com.pinecone.hydra.storage.mfs; import com.pinecone.hydra.system.ko.handle.AppliableKHandle; import com.pinecone.hydra.system.ko.handle.ObjectTreeAddressingSectionHandle; import java.io.IOException; public interface MappingFileSystem extends ObjectTreeAddressingSectionHandle, AppliableKHandle { void copy( String sourcePath, String destinationPath ) throws IOException; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/mfs/NativeMFile.java ================================================ package com.pinecone.hydra.storage.mfs; import java.io.File; public interface NativeMFile extends MFile { String MetaType = NativeMFile.class.getSimpleName(); @Override File getNativeHandle(); @Override default String getMetaType() { return MetaType; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/mfs/NativeMappingFileSystem.java ================================================ package com.pinecone.hydra.storage.mfs; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.external.GenericNativeExternalFile; import com.pinecone.hydra.storage.file.external.GenericNativeExternalFolder; import com.pinecone.hydra.storage.natives.NativeExternalFileSystems; import com.pinecone.hydra.system.ko.handle.ArchKHandle; import com.pinecone.hydra.unit.imperium.entity.EntityNode; import java.io.File; import java.io.IOException; import java.net.URI; public class NativeMappingFileSystem extends ArchKHandle implements MappingFileSystem { protected URI mMountPointURI; public NativeMappingFileSystem( URI mountPointURI, String treeNodeName, GUID treeNodeGuid ) { super( treeNodeName, treeNodeGuid ); this.mMountPointURI = mountPointURI; } public NativeMappingFileSystem( String localFileMountScope, String treeNodeName, GUID treeNodeGuid ) { this( URI.create( "file:///" + localFileMountScope ), treeNodeName, treeNodeGuid ); } public NativeMappingFileSystem( URI mountPointURI ) { this( mountPointURI, null, null ); } public NativeMappingFileSystem( String localFileMountScope ) { this( localFileMountScope, null, null ); } @Override public EntityNode queryNode( String path ) { URI fullURI = this.mMountPointURI.resolve( path ); return new GenericNativeMFile( fullURI ); } @Override public void copy( String sourcePath, String destinationPath ) throws IOException { NativeExternalFileSystems.copy( sourcePath, destinationPath ); } @Override public String getName() { return this.mszTreeNodeName; } @Override public GUID getGuid() { return this.mTreeNodeGuid; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/natives/NativeExternalFileSystems.java ================================================ package com.pinecone.hydra.storage.natives; import java.io.IOException; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.SimpleFileVisitor; import java.nio.file.StandardCopyOption; import java.nio.file.attribute.BasicFileAttributes; import com.pinecone.framework.util.io.FileUtils; public final class NativeExternalFileSystems { public static void copy( String sourcePath, String destinationPath ) throws IOException { // 注意参数语义交换:destinationPath是待复制的内容,sourcePath是目标容器目录 Path source = Paths.get(destinationPath); // 实际要复制的源内容 Path destinationDir = Paths.get(sourcePath); // 目标容器目录 // 校验源是否存在 if ( !Files.exists(source) ) { throw new IOException("Source to copy does not exist: " + source); } // 确保目标目录存在 if ( !Files.exists(destinationDir) ) { Files.createDirectories(destinationDir); } // 如果源是单个文件,直接复制到目标目录 if ( Files.isRegularFile(source) ) { Path target = destinationDir.resolve(source.getFileName()); Files.copy(source, target, StandardCopyOption.REPLACE_EXISTING); return; } // 处理目录复制(保留目录结构) Files.walkFileTree(source, new SimpleFileVisitor() { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { // 计算相对路径:从源根目录到当前目录 Path relative = source.relativize(dir); // 构建目标目录路径 Path targetDir = destinationDir.resolve(relative); // 创建目标目录(如果不存在) if (!Files.exists(targetDir)) { Files.createDirectories(targetDir); } return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { // 计算相对路径:从源根目录到当前文件 Path relative = source.relativize(file); // 构建目标文件路径 Path targetFile = destinationDir.resolve(relative); // 复制文件并覆盖已存在文件 Files.copy(file, targetFile, StandardCopyOption.REPLACE_EXISTING); return FileVisitResult.CONTINUE; } @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { if (exc != null) { throw exc; // 传播异常 } return FileVisitResult.CONTINUE; } }); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/policy/PolicyManage.java ================================================ package com.pinecone.hydra.storage.policy; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.policy.entity.Policy; import com.pinecone.hydra.storage.policy.source.PolicyMasterManipulator; import java.util.List; public interface PolicyManage extends Pinenut { void insertPolicy( Policy policy ); void removePolicy(GUID policyGuid ); Policy queryPolicy( GUID policyGuid ); void insertFilePolicyMapping( GUID policyGuid, String filePath ); void removeFilePolicyMapping( GUID policyGuid, String filePath ); List queryPolicyGuid(String pathPath ); PolicyMasterManipulator getMasterManipulator(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/policy/TitanPolicyManage.java ================================================ package com.pinecone.hydra.storage.policy; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.policy.entity.Policy; import com.pinecone.hydra.storage.policy.source.PolicyFileMappingManipulator; import com.pinecone.hydra.storage.policy.source.PolicyManipulator; import com.pinecone.hydra.storage.policy.source.PolicyMasterManipulator; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.ulf.util.guid.GUIDs; import java.util.List; public class TitanPolicyManage implements PolicyManage{ protected PolicyManipulator policyManipulator; protected Hydrogen hydrogen; protected PolicyMasterManipulator masterManipulator; protected GuidAllocator guidAllocator; protected PolicyFileMappingManipulator policyFileMappingManipulator; public TitanPolicyManage(Hydrogen hydrogen, KOIMasterManipulator masterManipulator, String name ){ this.hydrogen = hydrogen; this.masterManipulator = (PolicyMasterManipulator) masterManipulator; this.guidAllocator = GUIDs.newGuidAllocator(); this.policyManipulator = this.masterManipulator.getPolicyManipulator(); this.policyFileMappingManipulator = this.masterManipulator.getPolicyFileMappingManipulator(); } @Override public void insertPolicy(Policy policy) { this.policyManipulator.insert( policy ); } @Override public void removePolicy(GUID policyGuid) { this.policyManipulator.remove( policyGuid ); } @Override public Policy queryPolicy(GUID policyGuid) { return this.policyManipulator.queryPolicy( policyGuid ); } @Override public void insertFilePolicyMapping(GUID policyGuid, String filePath) { this.policyFileMappingManipulator.insert( policyGuid, filePath ); } @Override public void removeFilePolicyMapping(GUID policyGuid, String filePath) { this.policyFileMappingManipulator.remove( policyGuid, filePath ); } @Override public List queryPolicyGuid(String pathPath) { return this.policyFileMappingManipulator.queryPolicy(pathPath); } @Override public PolicyMasterManipulator getMasterManipulator() { return this.masterManipulator; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/policy/chain/PolicyChain.java ================================================ package com.pinecone.hydra.storage.policy.chain; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface PolicyChain extends Pinenut { GUID execution( String filePath, String version ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/policy/chain/VersionPolicyChain.java ================================================ package com.pinecone.hydra.storage.policy.chain; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.policy.PolicyManage; import com.pinecone.hydra.storage.version.source.VersionManipulator; public class VersionPolicyChain implements PolicyChain { protected PolicyManage policyManage; protected VersionManipulator versionManipulator; public VersionPolicyChain( PolicyManage policyManage ){ this.policyManage = policyManage; } @Override public GUID execution( String filePath, String version ) { return null; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/policy/chain/factory/PolicyChainFactory.java ================================================ package com.pinecone.hydra.storage.policy.chain.factory; import com.pinecone.framework.system.prototype.Pinenut; public interface PolicyChainFactory extends Pinenut { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/policy/entity/GenericPolicy.java ================================================ package com.pinecone.hydra.storage.policy.entity; import com.pinecone.framework.util.id.GUID; public class GenericPolicy implements Policy { protected long enumId; protected String policyName; protected GUID policyGuid; protected String policyDesc; @Override public long getEnumId() { return this.enumId; } @Override public void setEnumId(long enumId) { this.enumId = enumId; } @Override public String getPolicyName() { return this.policyName; } @Override public void setPolicyName(String policyName) { this.policyName = policyName; } @Override public GUID getPolicyGuid() { return this.policyGuid; } @Override public void setPolicyGuid(GUID policyGuid) { this.policyGuid = policyGuid; } @Override public String getPolicyDesc() { return this.policyDesc; } @Override public void setPolicyDesc(String policyDesc) { this.policyDesc = policyDesc; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/policy/entity/Policy.java ================================================ package com.pinecone.hydra.storage.policy.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface Policy extends Pinenut { long getEnumId(); void setEnumId(long enumId); String getPolicyName(); void setPolicyName(String policyName); GUID getPolicyGuid(); void setPolicyGuid(GUID policyGuid); String getPolicyDesc(); void setPolicyDesc(String policyDesc); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/policy/source/PolicyFileMappingManipulator.java ================================================ package com.pinecone.hydra.storage.policy.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.util.List; public interface PolicyFileMappingManipulator extends Pinenut { void insert(GUID policyGuid, String filePath); void remove(GUID policyGuid, String filePath); List queryPolicy(String filePath ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/policy/source/PolicyManipulator.java ================================================ package com.pinecone.hydra.storage.policy.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.policy.entity.Policy; public interface PolicyManipulator extends Pinenut { void insert(Policy policy); void remove(GUID policyGuid); Policy queryPolicy( GUID policyGuid ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/policy/source/PolicyMasterManipulator.java ================================================ package com.pinecone.hydra.storage.policy.source; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; public interface PolicyMasterManipulator extends KOIMasterManipulator { PolicyManipulator getPolicyManipulator(); PolicyFileMappingManipulator getPolicyFileMappingManipulator(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/remote/RemoteFSInstrument.java ================================================ package com.pinecone.hydra.storage.remote; public interface RemoteFSInstrument { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/remote/RemoteUOFSInstrument.java ================================================ package com.pinecone.hydra.storage.remote; public class RemoteUOFSInstrument implements RemoteFSInstrument { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/version/TitanVersionManage.java ================================================ package com.pinecone.hydra.storage.version; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.version.entity.TitanVersion; import com.pinecone.hydra.storage.version.entity.TitanVersionMapping; import com.pinecone.hydra.storage.version.entity.VersionMapping; import com.pinecone.hydra.storage.version.source.VersionManipulator; import com.pinecone.hydra.storage.version.source.VersionMappingManipulator; import com.pinecone.hydra.storage.version.source.VersionMasterManipulator; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.ulf.util.guid.GUIDs; import java.util.List; public class TitanVersionManage implements VersionManage { protected Hydrogen hydrogen; protected GuidAllocator guidAllocator; protected VersionMasterManipulator masterManipulator; protected VersionManipulator versionManipulator; protected VersionMappingManipulator versionMappingManipulator; public TitanVersionManage(Hydrogen hydrogen, KOIMasterManipulator masterManipulator, String name ){ this.hydrogen = hydrogen; this.masterManipulator = (VersionMasterManipulator) masterManipulator; this.guidAllocator = GUIDs.newGuidAllocator(); this.versionManipulator = this.masterManipulator.getVersionManipulator(); this.versionMappingManipulator = this.masterManipulator.getVersionMappingManipulator(); } public TitanVersionManage( Hydrogen hydrogen, KOIMasterManipulator masterManipulator ){ this(hydrogen, masterManipulator, VersionManage.class.getSimpleName() ); } public TitanVersionManage( KOIMappingDriver driver ) { this( driver.getSystem(), driver.getMasterManipulator() ); } @Override public void insert(TitanVersion version) { this.versionManipulator.insertObjectVersion( version); } @Override public void remove(String version, GUID fileGuid) { this.versionManipulator.removeObjectVersion( version, fileGuid ); } @Override public boolean queryIsManage(GUID targetStorageObjectGuid) { return this.versionManipulator.queryIsManage( targetStorageObjectGuid ); } @Override public GUID queryObjectGuid(String version, GUID fileGuid) { return this.versionManipulator.queryObjectGuid( version, fileGuid ); } @Override public List fetchVersions(GUID guid) { return this.versionManipulator.fetchVersions( guid ); } @Override public GUID getVersionFileByGuid(GUID fileGuid) { return this.versionManipulator.getVersionFileByGuid( fileGuid ); } @Override public TitanVersion queryByTargetStorageObjectGuid(GUID targetStorageObjectGuid) { return this.versionManipulator.queryByTargetStorageObjectGuid( targetStorageObjectGuid ); } @Override public boolean isExistEnableVersionMapping(GUID enableVersionGuid) { for (TitanVersionMapping versionMapping : this.versionMappingManipulator.queryAllVersionMapper()) if (versionMapping.getEnableVersionGuid().equals(enableVersionGuid)) return true; return false; } @Override public VersionMapping queryVersionMapping(GUID fileGuid) { for (TitanVersionMapping versionMapping : this.versionMappingManipulator.queryAllVersionMapper()) if (versionMapping.getFileGuid().equals(fileGuid)) return versionMapping; return null; } @Override public void UpdateVesionMapping(VersionMapping versionMapping) { this.versionMappingManipulator.update(versionMapping); } @Override public void insertVesionMapping(TitanVersionMapping versionMapping) { this.versionMappingManipulator.insert(versionMapping); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/version/VersionManage.java ================================================ package com.pinecone.hydra.storage.version; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.version.entity.TitanVersion; import com.pinecone.hydra.storage.version.entity.TitanVersionMapping; import com.pinecone.hydra.storage.version.entity.VersionMapping; import java.util.List; public interface VersionManage extends Pinenut { void insert(TitanVersion version); void remove(String version, GUID fileGuid); GUID queryObjectGuid(String version, GUID fileGuid ); boolean queryIsManage(GUID targetStorageObjectGuid); List fetchVersions(GUID guid); GUID getVersionFileByGuid( GUID fileGuid ); TitanVersion queryByTargetStorageObjectGuid(GUID targetStorageObjectGuid); boolean isExistEnableVersionMapping(GUID enableVersionGuid); VersionMapping queryVersionMapping(GUID fileGuid); void UpdateVesionMapping(VersionMapping versionMapping); void insertVesionMapping(TitanVersionMapping versionMapping); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/version/entity/TitanVersion.java ================================================ package com.pinecone.hydra.storage.version.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; public class TitanVersion implements Version{ private long enumId; private String version; private GUID targetStorageObjectGuid; private GUID fileGuid; private GUID versionGuid; private boolean enableCrc32; private long crc32; public TitanVersion() { } @Override public long getEnumId() { return this.enumId; } @Override public void setEnumId(long enumId) { this.enumId = enumId; } @Override public String getVersion() { return this.version; } @Override public void setVersion(String version) { this.version = version; } @Override public GUID getTargetStorageObjectGuid() { return this.targetStorageObjectGuid; } @Override public void setTargetStorageObjectGuid(GUID targetStorageObjectGuid) { this.targetStorageObjectGuid = targetStorageObjectGuid; } @Override public GUID getFileGuid() { return this.fileGuid; } @Override public void setFileGuid(GUID fileGuid) { this.fileGuid = fileGuid; } @Override public boolean getEnableCrc32() { return this.enableCrc32; } @Override public void setEnableCrc32(boolean enableCrc32) { this.enableCrc32 = enableCrc32; } @Override public long getCrc32() { return this.crc32; } @Override public void setCrc32(long crc32) { this.crc32 = crc32; } @Override public GUID getVersionGuid() { return versionGuid; } @Override public void setVersionGuid(GUID versionGuid) { this.versionGuid = versionGuid; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/version/entity/TitanVersionMapping.java ================================================ package com.pinecone.hydra.storage.version.entity; import com.pinecone.framework.util.id.GUID; public class TitanVersionMapping implements VersionMapping{ GUID versionGuid; GUID fileGuid; GUID enableVersionGuid; public TitanVersionMapping() { } public TitanVersionMapping(GUID versionGuid, GUID fileGuid, GUID enableVersionGuid) { this.versionGuid = versionGuid; this.fileGuid = fileGuid; this.enableVersionGuid = enableVersionGuid; } public GUID getVersionGuid() { return versionGuid; } public void setVersionGuid(GUID versionGuid) { this.versionGuid = versionGuid; } public GUID getFileGuid() { return fileGuid; } public void setFileGuid(GUID fileGuid) { this.fileGuid = fileGuid; } public GUID getEnableVersionGuid() { return enableVersionGuid; } public void setEnableVersionGuid(GUID enableVersionGuid) { this.enableVersionGuid = enableVersionGuid; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/version/entity/Version.java ================================================ package com.pinecone.hydra.storage.version.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface Version extends Pinenut { long getEnumId(); void setEnumId( long enumId ); String getVersion(); void setVersion( String version ); GUID getTargetStorageObjectGuid(); void setTargetStorageObjectGuid( GUID targetStorageObjectGuid ); GUID getFileGuid(); void setFileGuid( GUID fileGuid ); boolean getEnableCrc32(); void setEnableCrc32( boolean enableCrc32 ); long getCrc32(); void setCrc32( long crc32 ); void setVersionGuid(GUID versionGuid); GUID getVersionGuid(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/version/entity/VersionMapping.java ================================================ package com.pinecone.hydra.storage.version.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface VersionMapping extends Pinenut { GUID getVersionGuid(); void setVersionGuid(GUID versionGuid); GUID getFileGuid(); void setFileGuid(GUID fileGuid); GUID getEnableVersionGuid(); void setEnableVersionGuid(GUID enableVersionGuid); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/version/source/VersionManipulator.java ================================================ package com.pinecone.hydra.storage.version.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.version.entity.TitanVersion; import com.pinecone.hydra.storage.version.entity.Version; import java.util.List; public interface VersionManipulator extends Pinenut { void insertObjectVersion(Version version); void removeObjectVersion( String version, GUID fileGuid ); GUID queryObjectGuid( String version, GUID fileGuid ); boolean queryIsManage(GUID fileGuid); List fetchVersions(GUID guid); GUID getVersionFileByGuid(GUID fileGuid); TitanVersion queryByTargetStorageObjectGuid(GUID targetStorageObjectGuid); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/version/source/VersionMappingManipulator.java ================================================ package com.pinecone.hydra.storage.version.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.version.entity.TitanVersionMapping; import com.pinecone.hydra.storage.version.entity.VersionMapping; import java.util.List; public interface VersionMappingManipulator extends Pinenut { void insert(VersionMapping versionMapping); void remove(VersionMapping versionMapping); TitanVersionMapping queryVersionMapping(GUID fileGuid); void update(VersionMapping versionMapping); List queryAllVersionMapper(); boolean isExistEnableVersionMapping(GUID enableVersionGuid); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/version/source/VersionMasterManipulator.java ================================================ package com.pinecone.hydra.storage.version.source; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; public interface VersionMasterManipulator extends KOIMasterManipulator { VersionManipulator getVersionManipulator(); VersionMappingManipulator getVersionMappingManipulator(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/IUnifiedTransmitConstructor.java ================================================ package com.pinecone.hydra.storage.volume; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.storage.volume.entity.ExporterEntity; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.entity.ReceiveEntity; public interface IUnifiedTransmitConstructor extends Pinenut { ReceiveEntity getReceiveEntity(Class< ? extends LogicVolume> volumeClass, Object... params); ExporterEntity getExportEntity( Class< ? extends LogicVolume > volumeClass, Object... params ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/KernelVolumeConfig.java ================================================ package com.pinecone.hydra.storage.volume; import com.pinecone.hydra.storage.ArchStorageConfig; import com.pinecone.hydra.storage.StorageConstants; import com.pinecone.hydra.system.ko.ArchKernelObjectConfig; import java.util.Map; public class KernelVolumeConfig extends ArchStorageConfig implements VolumeConfig { protected String mszVersionSignature = StorageConstants.StorageVersionSignature; protected Number mnTinyFileStripSizing = VolumeConstants.TinyFileStripSizing ; protected Number mnSmallFileStripSizing = VolumeConstants.SmallFileStripSizing ; protected Number mnMegaFileStripSizing = VolumeConstants.MegaFileStripSizing ; protected Number mnDefaultStripSize = VolumeConstants.DefaultStripSize ; protected int mStripResidentCacheAllotRatio = VolumeConstants.StripResidentCacheAllotRatio; protected String mStorageObjectExtension = VolumeConstants.StorageObjectExtension; protected String mSqliteFileExtension = VolumeConstants.SqliteFileExtension; protected String mPathSeparator = VolumeConstants.PathSeparator; public KernelVolumeConfig(){ super(); } public KernelVolumeConfig(Map config){ super(config); this.mszVersionSignature = (String) config.getOrDefault("VersionSignature", StorageConstants.StorageVersionSignature); this.mnTinyFileStripSizing = (Number) config.getOrDefault("TinyFileStripSizing", VolumeConstants.TinyFileStripSizing); this.mnSmallFileStripSizing = (Number) config.getOrDefault("SmallFileStripSizing", VolumeConstants.SmallFileStripSizing); this.mnMegaFileStripSizing = (Number) config.getOrDefault("MegaFileStripSizing", VolumeConstants.MegaFileStripSizing); this.mnDefaultStripSize = (Number) config.getOrDefault("DefaultStripSize", VolumeConstants.DefaultStripSize); this.mStripResidentCacheAllotRatio = ((Number) config.getOrDefault("StripResidentCacheAllotRatio", VolumeConstants.StripResidentCacheAllotRatio)).intValue(); this.mStorageObjectExtension = (String) config.getOrDefault("StorageObjectExtension", VolumeConstants.StorageObjectExtension); this.mSqliteFileExtension = (String) config.getOrDefault("SqliteFileExtension", VolumeConstants.SqliteFileExtension); this.mPathSeparator = (String) config.getOrDefault("PathSeparator", VolumeConstants.PathSeparator); } @Override public String getVersionSignature() { return this.mszVersionSignature; } @Override public Number getTinyFileStripSizing() { return this.mnTinyFileStripSizing; } @Override public Number getSmallFileStripSizing() { return this.mnSmallFileStripSizing; } @Override public Number getMegaFileStripSizing() { return this.mnMegaFileStripSizing; } @Override public Number getDefaultStripSize() { return this.mnDefaultStripSize; } @Override public int getStripResidentCacheAllotRatio() { return this.mStripResidentCacheAllotRatio; } @Override public String getStorageObjectExtension() { return this.mStorageObjectExtension; } @Override public String getSqliteFileExtension() { return this.mSqliteFileExtension; } @Override public String getPathSeparator() { return this.mPathSeparator; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/TitanVolumeFile.java ================================================ package com.pinecone.hydra.storage.volume; import com.pinecone.hydra.storage.CheckedFile; public class TitanVolumeFile implements VolumeFile{ private String name; private Number size; private long checksum; private int parityCheck; public TitanVolumeFile( String name, Number size ){ this.name = name; this.size = size; } @Override public String getName() { return this.name; } @Override public Number size() { return this.size; } @Override public long getChecksum() { return this.checksum; } @Override public void setChecksum(long checksum) { this.checksum = checksum; } @Override public int getParityCheck() { return this.parityCheck; } @Override public void setParityCheck(int parityCheck) { this.parityCheck = parityCheck; } @Override public VolumeFile fromUniformFile(CheckedFile file) { this.name = file.getName(); this.size = file.size(); this.parityCheck = file.getParityCheck(); this.checksum = file.getChecksum(); return this; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/UnifiedTransmitConstructor.java ================================================ package com.pinecone.hydra.storage.volume; import com.pinecone.hydra.storage.volume.entity.ExporterEntity; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.entity.ReceiveEntity; import com.pinecone.hydra.storage.volume.entity.local.simple.TitanLocalSimpleVolume; import com.pinecone.hydra.storage.volume.entity.local.simple.export.TitanSimpleExportEntity64; import com.pinecone.hydra.storage.volume.entity.local.simple.recevice.TitanSimpleReceiveEntity64; import com.pinecone.hydra.storage.volume.entity.local.spanned.TitanLocalSpannedVolume; import com.pinecone.hydra.storage.volume.entity.local.spanned.export.TitanSpannedExportEntity64; import com.pinecone.hydra.storage.volume.entity.local.spanned.receive.TitanSpannedReceiveEntity64; import com.pinecone.hydra.storage.volume.entity.local.striped.TitanLocalStripedVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.export.TitanStripedExportEntity64; import com.pinecone.hydra.storage.volume.entity.local.striped.receive.TitanStripedReceiveEntity64; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.HashMap; import java.util.Map; public class UnifiedTransmitConstructor implements IUnifiedTransmitConstructor{ private Map< Class< ? extends LogicVolume >, Class< ? extends ReceiveEntity > > receiveMap = new HashMap<>(); private Map< Class< ? extends LogicVolume >, Class< ? extends ExporterEntity > > exportMap = new HashMap<>(); public UnifiedTransmitConstructor(){ this.receiveMap.put( TitanLocalSimpleVolume.class, TitanSimpleReceiveEntity64.class ); this.receiveMap.put( TitanLocalSpannedVolume.class, TitanSpannedReceiveEntity64.class ); this.receiveMap.put( TitanLocalStripedVolume.class, TitanStripedReceiveEntity64.class ); this.exportMap.put( TitanLocalSimpleVolume.class, TitanSimpleExportEntity64.class ); this.exportMap.put( TitanLocalSpannedVolume.class, TitanSpannedExportEntity64.class ); this.exportMap.put( TitanLocalStripedVolume.class, TitanStripedExportEntity64.class ); } @Override public ReceiveEntity getReceiveEntity(Class volumeClass, Object... params) { Class receiveEntityClass = receiveMap.get(volumeClass); if( receiveEntityClass == null ){ throw new IllegalArgumentException( "Class not found." ); } Constructor receiveConstructor = this.findReceiveConstructor(receiveEntityClass, params); try { return receiveConstructor.newInstance( params ); } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { throw new RuntimeException(e); } } @Override public ExporterEntity getExportEntity(Class volumeClass, Object... params) { Class exportEntityClass = exportMap.get(volumeClass); if( exportEntityClass == null ){ throw new IllegalArgumentException( "Class not found." ); } Constructor exportConstructor = this.findExportConstructor(exportEntityClass, params); try { return exportConstructor.newInstance( params ); } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { throw new RuntimeException(e); } } private Constructor searchConstructor( Class clazz, Object... params ) { for ( Constructor constructor : clazz.getConstructors() ) { if ( constructor.getParameterCount() == params.length ) { boolean matches = true; Class[] parameterTypes = constructor.getParameterTypes(); for ( int i = 0; i < params.length; ++i ) { if ( !parameterTypes[ i ].isInstance(params[ i ]) ) { matches = false; break; } } if ( matches ) { return constructor; } } } return null; } @SuppressWarnings( "unchecked" ) private Constructor findReceiveConstructor( Class clazz, Object... params ) { return (Constructor) this.searchConstructor( clazz, params ); } @SuppressWarnings( "unchecked" ) private Constructor findExportConstructor(Class clazz, Object... params) { return (Constructor) this.searchConstructor( clazz, params ); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/UniformVolumeManager.java ================================================ package com.pinecone.hydra.storage.volume; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSON; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.storage.file.entity.Cluster; import com.pinecone.hydra.storage.file.entity.LocalCluster; import com.pinecone.hydra.storage.file.transmit.UniformSourceLocator; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.entity.MountPoint; import com.pinecone.hydra.storage.volume.entity.PhysicalVolume; import com.pinecone.hydra.storage.volume.entity.SimpleVolume; import com.pinecone.hydra.storage.volume.entity.TitanVolumeAllotment; import com.pinecone.hydra.storage.volume.entity.Volume; import com.pinecone.hydra.storage.volume.entity.VolumeAllotment; import com.pinecone.hydra.storage.volume.entity.VolumeCapacity64; import com.pinecone.hydra.storage.volume.entity.local.VolumeCapacity; import com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem; import com.pinecone.hydra.storage.volume.kvfs.KenusPool; import com.pinecone.hydra.storage.volume.kvfs.ExecutorPool; import com.pinecone.hydra.storage.volume.operator.TitanVolumeOperatorFactory; import com.pinecone.hydra.storage.volume.operator.VolumeOperator; import com.pinecone.hydra.storage.volume.source.LogicVolumeManipulator; import com.pinecone.hydra.storage.volume.source.MirroredVolumeManipulator; import com.pinecone.hydra.storage.volume.source.MountPointManipulator; import com.pinecone.hydra.storage.volume.source.PhysicalVolumeManipulator; import com.pinecone.hydra.storage.volume.source.SimpleVolumeManipulator; import com.pinecone.hydra.storage.volume.source.SpannedVolumeManipulator; import com.pinecone.hydra.storage.volume.source.SQLiteVolumeManipulator; import com.pinecone.hydra.storage.volume.source.StripedVolumeManipulator; import com.pinecone.hydra.storage.volume.source.VolumeAllocateManipulator; import com.pinecone.hydra.storage.volume.source.VolumeCapacityManipulator; import com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.system.identifier.KOPathResolver; import com.pinecone.hydra.system.ko.CascadeInstrument; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.hydra.system.ko.kom.ArchKOMTree; import com.pinecone.hydra.system.ko.kom.SimplePathSelector; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.ImperialTree; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.EntityNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; import com.pinecone.ulf.util.guid.GUIDs; import com.pinecone.framework.util.id.GuidAllocator; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import java.util.Objects; public class UniformVolumeManager extends ArchKOMTree implements VolumeManager { protected VolumeAllotment volumeAllotment; protected MirroredVolumeManipulator mirroredVolumeManipulator; protected MountPointManipulator mountPointManipulator; protected PhysicalVolumeManipulator physicalVolumeManipulator; protected SimpleVolumeManipulator simpleVolumeManipulator; protected SpannedVolumeManipulator spannedVolumeManipulator; protected StripedVolumeManipulator stripedVolumeManipulator; protected VolumeCapacityManipulator volumeCapacityManipulator; protected VolumeMasterManipulator volumeMasterManipulator; protected VolumeAllocateManipulator volumeAllocateManipulator; protected SQLiteVolumeManipulator sqliteVolumeManipulator; protected LogicVolumeManipulator primeLogicVolumeManipulator; protected ExecutorPool kenusPool; protected KenVolumeFileSystem kenVolumeFileSystem; public UniformVolumeManager( Processum superiorProcess, KOIMasterManipulator masterManipulator, VolumeManager parent, String name, VolumeConfig config, String superiorPathScope, @Nullable GuidAllocator guidAllocator ) { super( superiorProcess, masterManipulator, config, parent, name, superiorPathScope, guidAllocator ); this.volumeMasterManipulator = ( VolumeMasterManipulator ) masterManipulator; this.pathResolver = new KOPathResolver( this.kernelObjectConfig ); this.volumeAllotment = new TitanVolumeAllotment( this,this.volumeMasterManipulator ); this.mirroredVolumeManipulator = this.volumeMasterManipulator.getMirroredVolumeManipulator(); this.mountPointManipulator = this.volumeMasterManipulator.getMountPointManipulator(); this.physicalVolumeManipulator = this.volumeMasterManipulator.getPhysicalVolumeManipulator(); this.simpleVolumeManipulator = this.volumeMasterManipulator.getSimpleVolumeManipulator(); this.spannedVolumeManipulator = this.volumeMasterManipulator.getSpannedVolumeManipulator(); this.stripedVolumeManipulator = this.volumeMasterManipulator.getStripedVolumeManipulator(); this.volumeCapacityManipulator = this.volumeMasterManipulator.getVolumeCapacityManipulator(); this.volumeAllocateManipulator = this.volumeMasterManipulator.getVolumeAllocateManipulator(); this.sqliteVolumeManipulator = this.volumeMasterManipulator.getSQLiteVolumeManipulator(); this.primeLogicVolumeManipulator = this.volumeMasterManipulator.getPrimeLogicVolumeManipulator(); this.kenusPool = new KenusPool(); this.pathSelector = new SimplePathSelector( this.pathResolver, this.imperialTree, this.primeLogicVolumeManipulator, new GUIDNameManipulator[] {} ); this.kenVolumeFileSystem = new KenVolumeFileSystem(this); this.operatorFactory = new TitanVolumeOperatorFactory( this, this.volumeMasterManipulator ); } public UniformVolumeManager( Processum superiorProcess, KOIMasterManipulator masterManipulator, VolumeManager parent, String name, VolumeConfig config ) { this( superiorProcess, masterManipulator, parent, name, config, CascadeInstrument.EmptySuperiorPathScope, null ); } public UniformVolumeManager( Processum superiorProcess, KOIMasterManipulator masterManipulator, VolumeConfig config ) { this( superiorProcess, masterManipulator, null, VolumeManager.class.getSimpleName(), config ); } public UniformVolumeManager( KOIMappingDriver driver, VolumeManager parent, String name, VolumeConfig config ){ this( driver.getSuperiorProcess(), driver.getMasterManipulator(), parent, name, config ); } public UniformVolumeManager( KOIMappingDriver driver, VolumeConfig config ) { this( driver.getSuperiorProcess(), driver.getMasterManipulator(), config ); } @Override public VolumeConfig getConfig() { return (VolumeConfig) super.getConfig(); } @Override public GuidAllocator getGuidAllocator() { return this.guidAllocator; } @Override public ImperialTree getMasterTrieTree() { return this.imperialTree; } @Override public KenVolumeFileSystem getKVFSystem() { return this.kenVolumeFileSystem; } public VolumeAllotment getVolumeAllotment(){ return this.volumeAllotment; } protected String getNS( GUID guid, String szSeparator ){ String path = this.imperialTree.getCachePath(guid); if ( path != null ) { return path; } ImperialTreeNode node = this.imperialTree.getNode(guid); String assemblePath = this.getNodeName(node); while ( !node.getParentGUIDs().isEmpty() && this.allNonNull( node.getParentGUIDs() ) ){ List parentGuids = node.getParentGUIDs(); for( int i = 0; i < parentGuids.size(); ++i ){ if ( parentGuids.get(i) != null ){ node = this.imperialTree.getNode( parentGuids.get(i) ); break; } } String nodeName = this.getNodeName(node); assemblePath = nodeName + szSeparator + assemblePath; } this.imperialTree.insertCachePath( guid, assemblePath ); return assemblePath; } @Override public String getPath( GUID guid ) { return this.getNS( guid, this.kernelObjectConfig.getPathNameSeparator() ); } @Override public String getFullName( GUID guid ) { return this.getNS( guid, this.kernelObjectConfig.getFullNameSeparator() ); } @Override public GUID queryGUIDByFN( String fullName ) { return null; } @Override public GUID put( TreeNode treeNode ) { TreeNodeOperator operator = this.operatorFactory.getOperator( this.getVolumeMetaType( treeNode ) ); return operator.insert( treeNode ); } protected TreeNodeOperator getOperatorByGuid( GUID guid ) { ImperialTreeNode node = this.imperialTree.getNode( guid ); TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class[]{this.getClass()}, this ); return this.operatorFactory.getOperator( this.getVolumeMetaType( newInstance ) ); } @Override public LogicVolume get( GUID guid ) { return (LogicVolume) this.getOperatorByGuid( guid ).get( guid ); } @Override public void update(LogicVolume logicVolume) { TreeNodeOperator operator = this.getOperatorByGuid(logicVolume.getGuid()); operator.update( logicVolume ); } @Override public void updateVolumeUsedSize(GUID guid, VolumeCapacity volumeCapacity) { this.volumeCapacityManipulator.update( guid, volumeCapacity.getUsedSize().longValue() ); } @Override public void updatePhysical(PhysicalVolume physicalVolume) { this.physicalVolumeManipulator.update( physicalVolume ); } @Override public TreeNode get(GUID guid, int depth) { return null; } @Override public TreeNode getAsRootDepth(GUID guid) { return null; } @Override public void remove(GUID guid) { GUIDImperialTrieNode node = this.imperialTree.getNode( guid ); TreeNode newInstance = (TreeNode)node.getType().newInstance(); TreeNodeOperator operator = this.operatorFactory.getOperator( this.getVolumeMetaType( newInstance ) ); operator.purge( guid ); } @Override public void remove(String path) { } //protected ReentrantLock reentrantLock = new ReentrantLock(); @Override public PhysicalVolume getPhysicalVolume(GUID guid) { // this.reentrantLock.lock(); // try{ //Debug.trace( Thread.currentThread().getName(), Thread.currentThread().getId() ); PhysicalVolume physicalVolume = this.physicalVolumeManipulator.getPhysicalVolume(guid); if( physicalVolume == null ){ return null; } MountPoint mountPoint = this.mountPointManipulator.getMountPointByVolumeGuid(guid); VolumeCapacity64 volumeCapacity = this.volumeCapacityManipulator.getVolumeCapacity(guid); physicalVolume.setMountPoint( mountPoint ); physicalVolume.setVolumeCapacity( volumeCapacity ); physicalVolume.applyVolumeManage( this ); return physicalVolume; // } // finally { // this.reentrantLock.unlock(); // } } @Override public SimpleVolume getPhysicalVolumeParent(GUID guid) { return null; } @Override public List getChildren(GUID guid) { return super.getChildren( guid ); } @Override public Object queryEntityHandleByNS(String path, String szBadSep, String szTargetSep) { return null; } @Override public EntityNode queryNode(String path) { return null; } @Override public List fetchRoot() { return null; } @Override public void rename(GUID guid, String name) { } private boolean allNonNull( List list ) { return list.stream().noneMatch( Objects::isNull ); } @Override public GUID insertPhysicalVolume(PhysicalVolume physicalVolume) { GUID guid = physicalVolume.getGuid(); VolumeCapacity64 volumeCapacity = physicalVolume.getVolumeCapacity(); if( volumeCapacity.getVolumeGuid() == null ){ volumeCapacity.setVolumeGuid( guid ); } MountPoint mountPoint = physicalVolume.getMountPoint(); if( mountPoint.getVolumeGuid() == null ){ mountPoint.setVolumeGuid( guid ); } this.physicalVolumeManipulator.insert( physicalVolume ); this.volumeCapacityManipulator.insert( volumeCapacity ); this.mountPointManipulator.insert( mountPoint ); return guid; } @Override public void purgePhysicalVolume(GUID guid) { this.physicalVolumeManipulator.remove( guid ); this.volumeCapacityManipulator.remove( guid ); this.mountPointManipulator.removeByVolumeGuid( guid ); } @Override public void insertAllocate(GUID objectGuid, GUID childVolumeGuid, GUID parentVolumeGuid) { this.volumeAllocateManipulator.insert( objectGuid, childVolumeGuid, parentVolumeGuid); } @Override public PhysicalVolume getSmallestCapacityPhysicalVolume() { PhysicalVolume smallestCapacityPhysicalVolume = this.physicalVolumeManipulator.getSmallestCapacityPhysicalVolume(); return this.getPhysicalVolume( smallestCapacityPhysicalVolume.getGuid() ); } @Override public VolumeMasterManipulator getMasterManipulator() { return this.volumeMasterManipulator; } @Override public void storageExpansion(GUID parentGuid, GUID childGuid) { this.treeMasterManipulator.getTrieTreeManipulator().addChild( childGuid, parentGuid ); } @Override public Hydrogen getHydrogen() { return this.hydrogen; } @Override public ExecutorPool getKenusPool() { return this.kenusPool; } @Override public List queryAllVolumes() { List physicalVolumes = this.physicalVolumeManipulator.queryAllPhysicalVolumes(); List simpleVolumes = this.simpleVolumeManipulator.queryAllSimpleVolumes(); List spannedVolumes = this.spannedVolumeManipulator.queryAllSpannedVolume(); List stripedVolumes = this.stripedVolumeManipulator.queryAllStripedVolume(); List fullPhysicalVolumes = new ArrayList<>(); List fullSimpleVolumes = new ArrayList<>(); List fullSpannedVolumes = new ArrayList<>(); List fullStripedVolumes = new ArrayList<>(); for( Volume volume : physicalVolumes ){ PhysicalVolume physicalVolume = this.getPhysicalVolume(volume.getGuid()); fullPhysicalVolumes.add(physicalVolume); } for( Volume volume : simpleVolumes ){ LogicVolume logicVolume = this.get(volume.getGuid()); fullSimpleVolumes.add(logicVolume); } for( Volume volume : spannedVolumes ){ LogicVolume logicVolume = this.get(volume.getGuid()); fullSpannedVolumes.add(logicVolume); } for( Volume volume : stripedVolumes ){ LogicVolume logicVolume = this.get(volume.getGuid()); fullStripedVolumes.add(logicVolume); } ArrayList volumes = new ArrayList<>(); volumes.addAll( fullPhysicalVolumes ); volumes.addAll(fullSimpleVolumes); volumes.addAll(fullSpannedVolumes); volumes.addAll(fullStripedVolumes); return volumes; } @Override public List listLogicVolumes() { List simpleVolumes = this.simpleVolumeManipulator.queryAllSimpleVolumes(); List spannedVolumes = this.spannedVolumeManipulator.queryAllSpannedVolume(); List stripedVolumes = this.stripedVolumeManipulator.queryAllStripedVolume(); ArrayList volumes = new ArrayList<>(); volumes.addAll(simpleVolumes); volumes.addAll(spannedVolumes); volumes.addAll(stripedVolumes); return volumes; } @Override public List listPhysicsVolumes() { List physicalVolumes = this.physicalVolumeManipulator.queryAllPhysicalVolumes(); ArrayList volumes = new ArrayList<>(); for( Volume volume : physicalVolumes ){ PhysicalVolume physicalVolume = this.getPhysicalVolume(volume.getGuid()); volumes.add(physicalVolume); } return new ArrayList<>(volumes); } @Override public void removeStorageObject( Cluster cluster ) throws SQLException { LocalCluster localCluster = (LocalCluster) cluster; String sourceName = localCluster.getSourceName(); UniformSourceLocator uniformSourceLocator = JSON.unmarshal(sourceName, UniformSourceLocator.class); LogicVolume volume = this.get(GUIDs.GUID128(uniformSourceLocator.getVolumeGuid())); VolumeOperator operator = (VolumeOperator) this.getOperatorByGuid(volume.getGuid()); operator.removeStorageObject( volume.getGuid(), cluster.getSegGuid(), cluster.getSize() ); } @Override public void removeStorageObject(GUID volumeGuid, GUID storageGuid, long size) { VolumeOperator operator = (VolumeOperator) this.getOperatorByGuid(volumeGuid); operator.removeStorageObject( volumeGuid, storageGuid, size ); } private String getNodeName(ImperialTreeNode node ){ UOI type = node.getType(); TreeNode newInstance = (TreeNode)type.newInstance(); TreeNodeOperator operator = this.operatorFactory.getOperator(this.getVolumeMetaType( newInstance )); TreeNode treeNode = operator.get(node.getGuid()); return treeNode.getName(); } private String getVolumeMetaType( TreeNode treeNode ){ return treeNode.className().replace("Titan",""); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/VolumeConfig.java ================================================ package com.pinecone.hydra.storage.volume; import com.pinecone.hydra.storage.StorageConfig; public interface VolumeConfig extends StorageConfig { String getVersionSignature(); Number getTinyFileStripSizing() ; Number getSmallFileStripSizing() ; Number getMegaFileStripSizing() ; Number getDefaultStripSize() ; int getStripResidentCacheAllotRatio(); String getStorageObjectExtension(); String getSqliteFileExtension(); String getPathSeparator(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/VolumeConstants.java ================================================ package com.pinecone.hydra.storage.volume; public final class VolumeConstants { public static final Number TinyFileStripSizing = 512 * 1024L; // 512 KB public static final Number SmallFileStripSizing = 4 * 1024 * 1024L; // 4 MB public static final Number MegaFileStripSizing = 10 * 1024 * 1024L; // 10 MB public static final Number DefaultStripSize = VolumeConstants.MegaFileStripSizing; public static final int StripResidentCacheAllotRatio = 2; public static final String StorageObjectExtension = ".storage"; public static final String SqliteFileExtension = ".db"; public static final String PathSeparator = "/"; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/VolumeFile.java ================================================ package com.pinecone.hydra.storage.volume; import com.pinecone.hydra.storage.CheckedFile; public interface VolumeFile extends CheckedFile { VolumeFile fromUniformFile( CheckedFile file ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/VolumeManager.java ================================================ package com.pinecone.hydra.storage.volume; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.Cluster; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.entity.PhysicalVolume; import com.pinecone.hydra.storage.volume.entity.SimpleVolume; import com.pinecone.hydra.storage.volume.entity.Volume; import com.pinecone.hydra.storage.volume.entity.local.VolumeCapacity; import com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem; import com.pinecone.hydra.storage.volume.kvfs.ExecutorPool; import com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.system.ko.kom.KOMInstrument; import java.sql.SQLException; import java.util.List; public interface VolumeManager extends KOMInstrument { @Override LogicVolume get( GUID guid ); void update( LogicVolume logicVolume ); void updateVolumeUsedSize(GUID guid, VolumeCapacity volumeCapacity); void updatePhysical( PhysicalVolume physicalVolume ); @Override VolumeConfig getConfig(); PhysicalVolume getPhysicalVolume( GUID guid ); SimpleVolume getPhysicalVolumeParent( GUID guid ); GUID insertPhysicalVolume( PhysicalVolume physicalVolume ); void purgePhysicalVolume( GUID guid ); void insertAllocate( GUID objectGuid, GUID childVolumeGuid, GUID parentVolumeGuid ); PhysicalVolume getSmallestCapacityPhysicalVolume(); VolumeMasterManipulator getMasterManipulator(); void storageExpansion( GUID parentGuid, GUID childGuid ); Hydrogen getHydrogen(); ExecutorPool getKenusPool(); List queryAllVolumes(); List listLogicVolumes(); List listPhysicsVolumes(); void removeStorageObject(Cluster cluster) throws SQLException; void removeStorageObject( GUID volumeGuid, GUID storageGuid, long size ); KenVolumeFileSystem getKVFSystem(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/VolumePoliceDog.java ================================================ package com.pinecone.hydra.storage.volume; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface VolumePoliceDog extends Pinenut { GUID simpleDfsSearch(String path,String szSeparator); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/ArchExportEntity.java ================================================ package com.pinecone.hydra.storage.volume.entity; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageExportIORequest; import com.pinecone.hydra.storage.volume.VolumeManager; public abstract class ArchExportEntity implements ExporterEntity{ protected VolumeManager volumeManager; protected StorageExportIORequest storageExportIORequest; protected Chanface channel; public ArchExportEntity(VolumeManager volumeManager, StorageExportIORequest storageExportIORequest, Chanface channel){ this.volumeManager = volumeManager; this.storageExportIORequest = storageExportIORequest; this.channel = channel; } @Override public VolumeManager getVolumeManager() { return this.volumeManager; } @Override public void setVolumeManager(VolumeManager volumeManager) { this.volumeManager = volumeManager; } @Override public StorageExportIORequest getStorageIORequest() { return this.storageExportIORequest; } @Override public void setStorageIORequest(StorageExportIORequest storageExportIORequest) { this.storageExportIORequest = storageExportIORequest; } @Override public Chanface getChannel() { return this.channel; } @Override public void setChannel(Chanface channel) { this.channel = channel; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/ArchLogicVolume.java ================================================ package com.pinecone.hydra.storage.volume.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.rdb.sqlite.SQLiteExecutor; import com.pinecone.hydra.storage.volume.VolumeConfig; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.List; public abstract class ArchLogicVolume extends ArchVolume implements LogicVolume{ protected List children; protected VolumeCapacity64 volumeCapacity; public ArchLogicVolume(VolumeManager volumeManager) { super(volumeManager); this.kenVolumeFileSystem = new KenVolumeFileSystem( this.volumeManager ); } public ArchLogicVolume(){} @Override public List queryChildren() { if ( this.children == null || this.children.isEmpty() ){ ArrayList logicVolumes = new ArrayList<>(); Collection nodes = this.volumeManager.getChildren( this.guid ); for( TreeNode node : nodes ){ LogicVolume volume = this.volumeManager.get(node.getGuid()); logicVolumes.add( volume ); } this.children = logicVolumes; } return this.children; } @Override public void setChildren(List children) { this.children = children; } @Override public VolumeCapacity64 getVolumeCapacity() { return this.volumeCapacity; } @Override public void setVolumeCapacity(VolumeCapacity64 volumeCapacity) { this.volumeCapacity = volumeCapacity; } @Override public SQLiteExecutor getSQLiteExecutor() throws SQLException { VolumeConfig config = this.volumeManager.getConfig(); GUID physicsVolumeGuid = this.kenVolumeFileSystem.getKVFSPhysicsVolume(this.getGuid()); PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(physicsVolumeGuid); String url = physicalVolume.getMountPoint().getMountPoint()+ config.getPathSeparator() +this.getGuid()+config.getSqliteFileExtension(); return (SQLiteExecutor) this.volumeManager.getKenusPool().allot(url); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/ArchReceiveEntity.java ================================================ package com.pinecone.hydra.storage.volume.entity; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.volume.VolumeManager; public abstract class ArchReceiveEntity implements ReceiveEntity{ protected VolumeManager volumeManager; protected StorageReceiveIORequest storageReceiveIORequest; protected Chanface channel; public ArchReceiveEntity(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel){ this.volumeManager = volumeManager; this.storageReceiveIORequest = storageReceiveIORequest; this.channel = channel; } @Override public VolumeManager getVolumeManager() { return this.volumeManager; } @Override public void setVolumeManager(VolumeManager volumeManager) { this.volumeManager = volumeManager; } @Override public StorageReceiveIORequest getReceiveStorageObject() { return this.storageReceiveIORequest; } @Override public void setReceiveStorageObject(StorageReceiveIORequest storageReceiveIORequest) { this.storageReceiveIORequest = storageReceiveIORequest; } @Override public Chanface getKChannel() { return this.channel; } @Override public void setKChannel(Chanface channel) { this.channel = channel; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/ArchVolume.java ================================================ package com.pinecone.hydra.storage.volume.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem; import com.pinecone.hydra.storage.volume.kvfs.OnVolumeFileSystem; import java.time.LocalDateTime; public abstract class ArchVolume implements Volume{ protected long enumId; protected GUID guid; protected LocalDateTime createTime; protected LocalDateTime updateTime; protected String name; protected String type; protected String extConfig; protected VolumeManager volumeManager; protected VolumeCapacity64 volumeCapacity; protected OnVolumeFileSystem kenVolumeFileSystem; public ArchVolume( VolumeManager volumeManager ){ this.volumeManager = volumeManager; this.guid = volumeManager.getGuidAllocator().nextGUID(); this.createTime = LocalDateTime.now(); this.updateTime = LocalDateTime.now(); this. kenVolumeFileSystem = new KenVolumeFileSystem( this.volumeManager ); } public ArchVolume(){} @Override public long getEnumId() { return this.enumId; } @Override public GUID getGuid() { return this.guid; } @Override public void setGuid(GUID guid) { this.guid = guid; } @Override public LocalDateTime getCreateTime() { return this.createTime; } @Override public void setCreateTime(LocalDateTime createTime) { this.createTime = createTime; } @Override public LocalDateTime getUpdateTime() { return this.updateTime; } @Override public void setUpdateTime(LocalDateTime updateTime) { this.updateTime = updateTime; } @Override public String getName() { return this.name; } @Override public void setName(String name) { this.name = name; } @Override public String getType() { return this.type; } @Override public void setType(String type) { this.type = type; } @Override public VolumeCapacity64 getVolumeCapacity() { return this.volumeCapacity; } @Override public void setVolumeCapacity(VolumeCapacity64 volumeCapacity) { this.volumeCapacity = volumeCapacity; } @Override public String getExtConfig() { return this.extConfig; } @Override public void setExtConfig(String extConfig) { this.extConfig = extConfig; } @Override public void setKenVolumeFileSystem() { this.kenVolumeFileSystem = new KenVolumeFileSystem( this.volumeManager ); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/DirectReceiver.java ================================================ package com.pinecone.hydra.storage.volume.entity; public interface DirectReceiver extends Receiver { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/Exporter.java ================================================ package com.pinecone.hydra.storage.volume.entity; import com.pinecone.framework.system.prototype.Pinenut; public interface Exporter extends Pinenut { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/ExporterEntity.java ================================================ package com.pinecone.hydra.storage.volume.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageExportIORequest; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; public interface ExporterEntity extends Pinenut { VolumeManager getVolumeManager(); void setVolumeManager(VolumeManager volumeManager); StorageExportIORequest getStorageIORequest(); void setStorageIORequest(StorageExportIORequest storageExportIORequest); StorageIOResponse export() throws IOException; StorageIOResponse export( Number offset, Number endSize ) throws IOException; StorageIOResponse export(CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) throws UIOException; Chanface getChannel(); void setChannel( Chanface channel ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/LogicVolume.java ================================================ package com.pinecone.hydra.storage.volume.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.rdb.sqlite.SQLiteExecutor; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import java.io.IOException; import java.sql.SQLException; import java.util.List; public interface LogicVolume extends Volume, TreeNode { String getName(); void setName( String name ); List queryChildren(); void setChildren( List children ); VolumeCapacity64 getVolumeCapacity(); void setVolumeCapacity( VolumeCapacity64 volumeCapacity ); void extendLogicalVolume( GUID physicalGuid ); List< GUID > listPhysicalVolume(); default MirroredVolume evinceMirroredVolume(){ return null; } default SimpleVolume evinceSimpleVolume(){ return null; } default SpannedVolume evinceSpannedVolume(){ return null; } default StripedVolume evinceStripeVolume(){ return null; } void setVolumeTree( VolumeManager volumeManager); StorageIOResponse receive( ReceiveEntity entity ) throws IOException; StorageIOResponse receive( ReceiveEntity entity, Number offset, Number endSize ) throws IOException; StorageIOResponse randomReceive( ReceiveEntity entity, Number offset, Number endSize ) throws IOException; StorageIOResponse receive( ReceiveEntity entity, CacheBlock cacheBlock, byte[] buffer ) throws IOException; StorageIOResponse export( ExporterEntity entity ) throws IOException; //敬请期待 StorageIOResponse export( ExporterEntity entity, Number offset, Number endSize ) throws IOException; StorageIOResponse export( ExporterEntity entity, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer ) throws UIOException; StorageIOResponse export( ExporterEntity entity, boolean accessRandom ) throws UIOException; //敬请期待 StorageIOResponse export( ExporterEntity entity, Number offset, Number endSize, boolean accessRandom ); StorageIOResponse export( ExporterEntity entity, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer, boolean accessRandom ) throws UIOException; boolean existStorageObject( GUID storageObject ) throws SQLException; void build() throws SQLException; void storageExpansion( GUID volumeGuid ); SQLiteExecutor getSQLiteExecutor() throws SQLException; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/MirroredVolume.java ================================================ package com.pinecone.hydra.storage.volume.entity; public interface MirroredVolume extends LogicVolume{ @Override default MirroredVolume evinceMirroredVolume() { return this; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/MountPoint.java ================================================ package com.pinecone.hydra.storage.volume.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public interface MountPoint extends Pinenut { long getEnumId(); GUID getGuid(); void setGuid(GUID guid); LocalDateTime getCreateTime(); void setCreateTime( LocalDateTime createTime ); LocalDateTime getUpdateTime(); void setUpdateTime( LocalDateTime updateTime ); String getName(); void setName(String name); GUID getVolumeGuid(); void setVolumeGuid( GUID volumeGuid ); String getMountPoint(); void setMountPoint( String mountPoint ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/PhysicalVolume.java ================================================ package com.pinecone.hydra.storage.volume.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.StorageExportIORequest; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; public interface PhysicalVolume extends Volume{ MountPoint getMountPoint(); void setMountPoint( MountPoint mountPoint ); GUID getParent(); void applyVolumeManage( VolumeManager volumeManager ); StorageIOResponse channelReceive(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel ) throws UIOException; StorageIOResponse channelReceive(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel, Number offset, Number endSize ) throws IOException; StorageIOResponse channelExport(VolumeManager volumeManager, StorageExportIORequest storageExportIORequest, Chanface channel ) throws IOException; StorageIOResponse channelRaid0Export(VolumeManager volumeManager, StorageExportIORequest storageExportIORequest, Chanface channel, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) throws IOException; StorageIOResponse receive( ReceiveEntity entity ) throws IOException; StorageIOResponse receive( ReceiveEntity entity, Number offset, Number endSize ) throws IOException; StorageIOResponse randomReceive( ReceiveEntity entity,Number offset, Number endSize) throws IOException; StorageIOResponse receive( ReceiveEntity entity, CacheBlock cacheBlock, byte[] buffer ) throws IOException; StorageIOResponse export( ExporterEntity entity ) throws IOException; //敬请期待 StorageIOResponse export( ExporterEntity entity, Number offset, Number endSize ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/ReceiveEntity.java ================================================ package com.pinecone.hydra.storage.volume.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; public interface ReceiveEntity extends Pinenut { VolumeManager getVolumeManager(); void setVolumeManager(VolumeManager volumeManager); StorageReceiveIORequest getReceiveStorageObject(); void setReceiveStorageObject( StorageReceiveIORequest storageReceiveIORequest); Chanface getKChannel(); void setKChannel( Chanface channel); StorageIOResponse receive() throws IOException; StorageIOResponse receive(Number offset, Number endSize ) throws IOException; StorageIOResponse randomReceive( Number offset, Number endSize ) throws IOException; StorageIOResponse receive(CacheBlock cacheBlock, byte[] buffer ) throws IOException; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/Receiver.java ================================================ package com.pinecone.hydra.storage.volume.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.RandomAccessChanface; import com.pinecone.hydra.storage.StorageIOResponse; import java.io.IOException; public interface Receiver extends Pinenut { StorageIOResponse receive(Chanface chanface) throws IOException; StorageIOResponse receive(Chanface chanface,Number offset, Number endSize) throws IOException; StorageIOResponse randomReceive(Chanface chanface,Number offset, Number endSize) throws IOException; StorageIOResponse receive(RandomAccessChanface randomAccessChanface) throws IOException; StorageIOResponse receive(RandomAccessChanface randomAccessChanface,Number offset, Number endSize) throws IOException; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/SimpleVolume.java ================================================ package com.pinecone.hydra.storage.volume.entity; import java.sql.SQLException; public interface SimpleVolume extends LogicVolume{ @Override default SimpleVolume evinceSimpleVolume() { return this; } void assembleSQLiteExecutor() throws SQLException; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/SpannedVolume.java ================================================ package com.pinecone.hydra.storage.volume.entity; public interface SpannedVolume extends LogicVolume{ @Override default SpannedVolume evinceSpannedVolume() { return this; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/StripedReceiver.java ================================================ package com.pinecone.hydra.storage.volume.entity; public interface StripedReceiver extends Receiver { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/StripedVolume.java ================================================ package com.pinecone.hydra.storage.volume.entity; public interface StripedVolume extends LogicVolume{ @Override default StripedVolume evinceStripeVolume() { return this; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/TitanMountPoint.java ================================================ package com.pinecone.hydra.storage.volume.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.source.MountPointManipulator; import java.time.LocalDateTime; public class TitanMountPoint implements MountPoint{ protected long enumId; protected GUID guid; protected LocalDateTime createTime; protected LocalDateTime updateTime; protected String name; protected GUID volumeGuid; protected String mountPoint; protected VolumeManager volumeManager; protected MountPointManipulator mountPointManipulator; public TitanMountPoint(VolumeManager volumeManager, MountPointManipulator mountPointManipulator ){ this.volumeManager = volumeManager; this.mountPointManipulator = mountPointManipulator; this.guid = volumeManager.getGuidAllocator().nextGUID(); this.createTime = LocalDateTime.now(); this.updateTime = LocalDateTime.now(); } public TitanMountPoint(){} @Override public long getEnumId() { return this.enumId; } @Override public GUID getGuid() { return this.guid; } @Override public void setGuid(GUID guid) { this.guid = guid; } @Override public LocalDateTime getCreateTime() { return this.createTime; } @Override public void setCreateTime(LocalDateTime createTime) { this.createTime = createTime; } @Override public LocalDateTime getUpdateTime() { return this.updateTime; } @Override public void setUpdateTime(LocalDateTime updateTime) { this.updateTime = updateTime; } @Override public String getName() { return this.name; } @Override public void setName(String name) { this.name = name; } @Override public GUID getVolumeGuid() { return this.volumeGuid; } @Override public void setVolumeGuid(GUID volumeGuid) { this.volumeGuid = volumeGuid; } @Override public String getMountPoint() { return this.mountPoint; } @Override public void setMountPoint(String mountPoint) { this.mountPoint = mountPoint; } public void setMountPointManipulator( MountPointManipulator mountPointManipulator ){ this.mountPointManipulator = mountPointManipulator; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/TitanVolumeAllotment.java ================================================ package com.pinecone.hydra.storage.volume.entity; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.local.LocalPhysicalVolume; import com.pinecone.hydra.storage.volume.entity.local.LocalSimpleVolume; import com.pinecone.hydra.storage.volume.entity.local.LocalSpannedVolume; import com.pinecone.hydra.storage.volume.entity.local.LocalStripedVolume; import com.pinecone.hydra.storage.volume.entity.local.physical.TitanLocalPhysicalVolume; import com.pinecone.hydra.storage.volume.entity.local.simple.TitanLocalSimpleVolume; import com.pinecone.hydra.storage.volume.entity.local.spanned.TitanLocalSpannedVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.TitanLocalStripedVolume; import com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator; public class TitanVolumeAllotment implements VolumeAllotment{ private VolumeManager volumeManager; private VolumeMasterManipulator masterManipulator; public TitanVolumeAllotment(VolumeManager volumeManager, VolumeMasterManipulator volumeMasterManipulator ){ this.volumeManager = volumeManager; this.masterManipulator= volumeMasterManipulator; } @Override public VolumeCapacity64 newVolumeCapacity() { return new TitanVolumeCapacity64( this.volumeManager,this.masterManipulator.getVolumeCapacityManipulator() ); } @Override public LocalStripedVolume newLocalStripedVolume() { return new TitanLocalStripedVolume( this.volumeManager, this.masterManipulator.getStripedVolumeManipulator() ); } @Override public LocalSpannedVolume newLocalSpannedVolume() { return new TitanLocalSpannedVolume( this.volumeManager, this.masterManipulator.getSpannedVolumeManipulator() ); } @Override public LocalSimpleVolume newLocalSimpleVolume() { return new TitanLocalSimpleVolume( this.volumeManager, this.masterManipulator.getSimpleVolumeManipulator() ); } @Override public LocalPhysicalVolume newLocalPhysicalVolume() { return new TitanLocalPhysicalVolume( this.volumeManager, this.masterManipulator.getPhysicalVolumeManipulator() ); } @Override public MountPoint newMountPoint() { return new TitanMountPoint( this.volumeManager, this.masterManipulator.getMountPointManipulator() ); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/TitanVolumeCapacity64.java ================================================ package com.pinecone.hydra.storage.volume.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.source.VolumeCapacityManipulator; public class TitanVolumeCapacity64 implements VolumeCapacity64 { private GUID volumeGuid; private long definitionCapacity; private long usedSize; private long quotaCapacity; private VolumeManager volumeManager; private VolumeCapacityManipulator volumeCapacityManipulator; public TitanVolumeCapacity64(VolumeManager volumeManager, VolumeCapacityManipulator volumeCapacityManipulator ){ this.volumeManager = volumeManager; this.volumeCapacityManipulator = volumeCapacityManipulator; } public TitanVolumeCapacity64( GUID volumeGuid, long definitionCapacity, long usedSize, long quotaCapacity ){ this.volumeGuid = volumeGuid; this.definitionCapacity = definitionCapacity; this.usedSize = usedSize; this.quotaCapacity = quotaCapacity; } @Override public Long getDefinitionCapacity() { return this.definitionCapacity; } @Override public void setDefinitionCapacity( Number definitionCapacity ) { this.definitionCapacity = definitionCapacity.longValue(); } @Override public Long getUsedSize() { return this.usedSize; } @Override public GUID getVolumeGuid() { return this.volumeGuid; } @Override public void setVolumeGuid( GUID volumeGuid ) { this.volumeGuid = volumeGuid; } @Override public void setUsedSize( Number usedSize ) { this.usedSize = usedSize.longValue(); } @Override public Long getQuotaCapacity() { return this.quotaCapacity; } @Override public void setQuotaCapacity( Number quotaCapacity ) { this.quotaCapacity = quotaCapacity.longValue(); } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/Volume.java ================================================ package com.pinecone.hydra.storage.volume.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public interface Volume extends Pinenut { long getEnumId(); GUID getGuid(); void setGuid(GUID guid); LocalDateTime getCreateTime(); void setCreateTime( LocalDateTime createTime ); LocalDateTime getUpdateTime(); void setUpdateTime( LocalDateTime updateTime ); String getName(); void setName(String name); String getType(); void setType( String type ); String getExtConfig(); void setExtConfig( String extConfig ); VolumeCapacity64 getVolumeCapacity(); void setVolumeCapacity( VolumeCapacity64 volumeCapacity ); void setKenVolumeFileSystem(); void deductCapacity( long deductCapacity ); void increaseCapacity( long increaseCapacity ); boolean checkCapacity( long size ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/VolumeAllotment.java ================================================ package com.pinecone.hydra.storage.volume.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.storage.volume.entity.local.LocalPhysicalVolume; import com.pinecone.hydra.storage.volume.entity.local.LocalSimpleVolume; import com.pinecone.hydra.storage.volume.entity.local.LocalSpannedVolume; import com.pinecone.hydra.storage.volume.entity.local.LocalStripedVolume; public interface VolumeAllotment extends Pinenut { VolumeCapacity64 newVolumeCapacity(); LocalStripedVolume newLocalStripedVolume(); LocalSpannedVolume newLocalSpannedVolume(); LocalSimpleVolume newLocalSimpleVolume(); LocalPhysicalVolume newLocalPhysicalVolume(); MountPoint newMountPoint(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/VolumeCapacity64.java ================================================ package com.pinecone.hydra.storage.volume.entity; import com.pinecone.hydra.storage.volume.entity.local.VolumeCapacity; public interface VolumeCapacity64 extends VolumeCapacity { @Override Long getDefinitionCapacity(); @Override void setDefinitionCapacity( Number definitionCapacity ); @Override Long getUsedSize(); @Override void setUsedSize( Number usedSize ); @Override Long getQuotaCapacity(); @Override void setQuotaCapacity( Number quotaCapacity ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/LocalMirroredVolume.java ================================================ package com.pinecone.hydra.storage.volume.entity.local; import com.pinecone.hydra.storage.volume.entity.MirroredVolume; public interface LocalMirroredVolume extends MirroredVolume { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/LocalPhysicalVolume.java ================================================ package com.pinecone.hydra.storage.volume.entity.local; import com.pinecone.hydra.storage.volume.entity.PhysicalVolume; public interface LocalPhysicalVolume extends PhysicalVolume { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/LocalSimpleVolume.java ================================================ package com.pinecone.hydra.storage.volume.entity.local; import com.pinecone.hydra.storage.volume.entity.SimpleVolume; public interface LocalSimpleVolume extends SimpleVolume { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/LocalSpannedVolume.java ================================================ package com.pinecone.hydra.storage.volume.entity.local; import com.pinecone.hydra.storage.volume.entity.SpannedVolume; public interface LocalSpannedVolume extends SpannedVolume { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/LocalStripedVolume.java ================================================ package com.pinecone.hydra.storage.volume.entity.local; import com.pinecone.hydra.storage.volume.entity.StripedVolume; public interface LocalStripedVolume extends StripedVolume { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/VolumeCapacity.java ================================================ package com.pinecone.hydra.storage.volume.entity.local; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface VolumeCapacity extends Pinenut { GUID getVolumeGuid(); void setVolumeGuid( GUID volumeGuid ); Number getDefinitionCapacity(); void setDefinitionCapacity( Number definitionCapacity ); Number getUsedSize(); void setUsedSize( Number usedSize ); Number getQuotaCapacity(); void setQuotaCapacity( Number quotaCapacity ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/mirrored/TitanLocalMirroredVolume.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.mirrored; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.ArchLogicVolume; import com.pinecone.hydra.storage.volume.entity.ExporterEntity; import com.pinecone.hydra.storage.volume.entity.ReceiveEntity; import com.pinecone.hydra.storage.volume.entity.local.LocalMirroredVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import com.pinecone.hydra.storage.volume.source.MirroredVolumeManipulator; import java.sql.SQLException; import java.util.List; public class TitanLocalMirroredVolume extends ArchLogicVolume implements LocalMirroredVolume { private MirroredVolumeManipulator mirroredVolumeManipulator; public void setMirroredVolumeManipulator( MirroredVolumeManipulator mirroredVolumeManipulator ){ this.mirroredVolumeManipulator = mirroredVolumeManipulator; } public TitanLocalMirroredVolume(VolumeManager volumeManager, MirroredVolumeManipulator mirroredVolumeManipulator) { super(volumeManager); this.mirroredVolumeManipulator = mirroredVolumeManipulator; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } @Override public void extendLogicalVolume(GUID physicalGuid) { } @Override public List listPhysicalVolume() { return null; } @Override public void setVolumeTree(VolumeManager volumeManager) { this.volumeManager = volumeManager; } @Override public StorageIOResponse receive(ReceiveEntity entity) throws UIOException { return null; } @Override public StorageIOResponse receive(ReceiveEntity entity, Number offset, Number endSize) throws UIOException { return null; } @Override public StorageIOResponse randomReceive(ReceiveEntity entity, Number offset, Number endSize) throws UIOException { return null; } @Override public StorageIOResponse receive(ReceiveEntity entity, CacheBlock cacheBlock, byte[] buffer) throws UIOException { return null; } @Override public StorageIOResponse export(ExporterEntity entity) throws UIOException { return null; } @Override public StorageIOResponse export(ExporterEntity entity, Number offset, Number endSize) { return null; } @Override public StorageIOResponse export(ExporterEntity entity, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) throws UIOException { return null; } @Override public StorageIOResponse export(ExporterEntity entity, boolean accessRandom) throws UIOException { return null; } @Override public StorageIOResponse export(ExporterEntity entity, Number offset, Number endSize, boolean accessRandom) { return null; } @Override public StorageIOResponse export(ExporterEntity entity, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer, boolean accessRandom) throws UIOException { return null; } @Override public boolean existStorageObject(GUID storageObject) throws SQLException { return false; } @Override public void build() throws SQLException { } @Override public void storageExpansion(GUID volumeGuid) { } @Override public void deductCapacity(long deductCapacity) { this.volumeCapacity.setUsedSize( this.volumeCapacity.getUsedSize() + deductCapacity ); this.volumeManager.updateVolumeUsedSize( this.guid, this.volumeCapacity ); } @Override public boolean checkCapacity(long size) { long freeSpace = this.volumeCapacity.getDefinitionCapacity() - this.volumeCapacity.getUsedSize(); return freeSpace > size; } @Override public void increaseCapacity(long increaseCapacity) { this.volumeCapacity.setUsedSize( this.volumeCapacity.getUsedSize() - increaseCapacity ); this.volumeManager.updateVolumeUsedSize( this.guid, this.volumeCapacity ); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/TitanLocalPhysicalVolume.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.physical; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.ArchVolume; import com.pinecone.hydra.storage.StorageExportIORequest; import com.pinecone.hydra.storage.volume.entity.ExporterEntity; import com.pinecone.hydra.storage.volume.entity.MountPoint; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.volume.entity.ReceiveEntity; import com.pinecone.hydra.storage.volume.entity.local.LocalPhysicalVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import com.pinecone.hydra.storage.volume.source.PhysicalVolumeManipulator; import java.io.IOException; public class TitanLocalPhysicalVolume extends ArchVolume implements LocalPhysicalVolume { private MountPoint mountPoint; private PhysicalVolumeManipulator physicalVolumeManipulator; public TitanLocalPhysicalVolume(VolumeManager volumeManager, PhysicalVolumeManipulator physicalVolumeManipulator) { super(volumeManager); this.physicalVolumeManipulator = physicalVolumeManipulator; } public TitanLocalPhysicalVolume(){} @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } @Override public MountPoint getMountPoint() { return this.mountPoint; } @Override public void setMountPoint(MountPoint mountPoint) { this.mountPoint = mountPoint; } @Override public GUID getParent() { return this.physicalVolumeManipulator.getParent( this.guid ); } @Override public void applyVolumeManage(VolumeManager volumeManager) { this.volumeManager = volumeManager; } public void setPhysicalVolumeManipulator(PhysicalVolumeManipulator physicalVolumeManipulator ){ this.physicalVolumeManipulator = physicalVolumeManipulator; } @Override public StorageIOResponse channelReceive(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel) throws UIOException { // TitanDirectChannelReceiveEntity64 titanDirectChannelReceiveEntity64 = new TitanDirectChannelReceiveEntity64(volumeManager, storageReceiveIORequest, this.mountPoint.getMountPoint(), channel); // StorageIOResponse storageIOResponse = titanDirectChannelReceiveEntity64.receive(); // storageIOResponse.setBottomGuid( this.guid ); // // return storageIOResponse; return null; } @Override public StorageIOResponse channelReceive(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel, Number offset, Number endSize) throws IOException { // TitanDirectChannelReceiveEntity64 titanDirectChannelReceiveEntity64 = new TitanDirectChannelReceiveEntity64(volumeManager, storageReceiveIORequest, this.mountPoint.getMountPoint(), channel); // StorageIOResponse storageIOResponse = titanDirectChannelReceiveEntity64.receive(offset, endSize); // storageIOResponse.setBottomGuid( this.getGuid() ); // return storageIOResponse; return null; } @Override public StorageIOResponse channelExport(VolumeManager volumeManager, StorageExportIORequest storageExportIORequest, Chanface channel) throws IOException { // TitanDirectChannelExportEntity64 titanDirectChannelExportEntity64 = new TitanDirectChannelExportEntity64(volumeManager, storageExportIORequest,channel ); // StorageIOResponse storageIOResponse = titanDirectChannelExportEntity64.export(); // storageIOResponse.setBottomGuid( this.getGuid() ); // return storageIOResponse; return null; } @Override public StorageIOResponse channelRaid0Export(VolumeManager volumeManager, StorageExportIORequest storageExportIORequest, Chanface channel, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) throws IOException { // TitanDirectChannelExportEntity64 titanDirectChannelExportEntity64 = new TitanDirectChannelExportEntity64(volumeManager, storageExportIORequest,channel ); // StorageIOResponse storageIOResponse = titanDirectChannelExportEntity64.export(cacheBlock, offset, endSize, buffer); // storageIOResponse.setBottomGuid( this.getGuid() ); // return storageIOResponse; return null; } @Override public StorageIOResponse receive(ReceiveEntity entity) throws IOException { this.deductCapacity( entity.getReceiveStorageObject().getSize().longValue() ); return entity.receive(); } @Override public StorageIOResponse receive(ReceiveEntity entity, Number offset, Number endSize) throws IOException { this.deductCapacity( entity.getReceiveStorageObject().getSize().longValue() ); return entity.receive( offset, endSize ); } @Override public StorageIOResponse randomReceive(ReceiveEntity entity, Number offset, Number endSize) throws IOException { this.deductCapacity( entity.getReceiveStorageObject().getSize().longValue() ); return entity.randomReceive( offset,endSize ); } @Override public StorageIOResponse receive(ReceiveEntity entity, CacheBlock cacheBlock, byte[] buffer) throws IOException { this.deductCapacity( entity.getReceiveStorageObject().getSize().longValue() ); return entity.receive( cacheBlock, buffer ); } @Override public StorageIOResponse export(ExporterEntity entity) throws IOException { return entity.export(); } @Override public StorageIOResponse export(ExporterEntity entity, Number offset, Number endSize) { return null; } @Override public void deductCapacity(long deductCapacity) { this.volumeCapacity.setUsedSize( this.volumeCapacity.getUsedSize() + deductCapacity ); this.volumeManager.updateVolumeUsedSize( this.guid, this.volumeCapacity ); } @Override public void increaseCapacity(long increaseCapacity) { this.volumeCapacity.setUsedSize( this.volumeCapacity.getUsedSize() - increaseCapacity ); this.volumeManager.updateVolumeUsedSize( this.guid, this.volumeCapacity ); } @Override public boolean checkCapacity(long size) { long freeSpace = this.volumeCapacity.getDefinitionCapacity() - this.volumeCapacity.getUsedSize(); return freeSpace > size; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/export/DirectExport.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.physical.export; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.RandomAccessChanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.volume.entity.Exporter; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; public interface DirectExport extends Exporter { StorageIOResponse export(Chanface chanface) throws IOException; StorageIOResponse export(Chanface chanface, Number offset, Number endSize) throws IOException; StorageIOResponse export(RandomAccessChanface randomAccessChanface) throws IOException; StorageIOResponse export( CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/export/DirectExport64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.physical.export; public interface DirectExport64 extends DirectExport { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/export/DirectExportEntity.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.physical.export; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.volume.entity.ExporterEntity; import java.io.IOException; public interface DirectExportEntity extends ExporterEntity { StorageIOResponse export(Number offset, Number endSize) throws IOException; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/export/DirectExportEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.physical.export; public interface DirectExportEntity64 extends DirectExportEntity { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/export/TitanDirectExport64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.physical.export; import com.pinecone.framework.util.Bytes; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.RandomAccessChanface; import com.pinecone.hydra.storage.StorageExportIORequest; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.TitanStorageIOResponse; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlockStatus; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.nio.file.StandardOpenOption; import java.util.zip.CRC32; public class TitanDirectExport64 implements DirectExport64{ protected VolumeManager volumeManager; protected StorageExportIORequest storageExportIORequest; public TitanDirectExport64( DirectExportEntity64 entity ){ this.volumeManager = entity.getVolumeManager(); this.storageExportIORequest = entity.getStorageIORequest(); } @Override public StorageIOResponse export( Chanface chanface ) throws IOException { String sourceName = this.storageExportIORequest.getSourceName(); long size = this.storageExportIORequest.getSize().longValue(); TitanStorageIOResponse titanMiddleStorageObject = new TitanStorageIOResponse(); long parityCheck = 0; long checksum = 0; File file = new File(sourceName); try (FileChannel frameChannel = FileChannel.open(file.toPath(), StandardOpenOption.READ)) { ByteBuffer buffer = ByteBuffer.allocate((int) size); frameChannel.read(buffer); buffer.flip(); CRC32 crc = new CRC32(); while ( buffer.hasRemaining() ) { byte b = buffer.get(); parityCheck += Bytes.calculateParity( b ); checksum += b & 0xFF; crc.update(b); } buffer.rewind(); chanface.write(buffer); buffer.clear(); titanMiddleStorageObject.setChecksum( checksum ); titanMiddleStorageObject.setCrc32( crc ); titanMiddleStorageObject.setParityCheck( parityCheck ); } return titanMiddleStorageObject; } @Override public StorageIOResponse export(Chanface chanface, Number offset, Number endSize) throws IOException { String sourceName = this.storageExportIORequest.getSourceName(); long size = endSize.longValue(); TitanStorageIOResponse titanMiddleStorageObject = new TitanStorageIOResponse(); long parityCheck = 0; long checksum = 0; File file = new File(sourceName); try (FileChannel frameChannel = FileChannel.open(file.toPath(), StandardOpenOption.READ)) { long actualSize = Math.min(size, frameChannel.size() - offset.longValue()); ByteBuffer buffer = ByteBuffer.allocate((int) actualSize); frameChannel.read(buffer, offset.longValue()); buffer.flip(); CRC32 crc = new CRC32(); while (buffer.hasRemaining()) { byte b = buffer.get(); parityCheck += Bytes.calculateParity(b); checksum += b & 0xFF; crc.update(b); } buffer.rewind(); chanface.write(buffer); buffer.clear(); titanMiddleStorageObject.setChecksum(checksum); titanMiddleStorageObject.setCrc32(crc ); titanMiddleStorageObject.setParityCheck(parityCheck); } return titanMiddleStorageObject; } @Override public StorageIOResponse export(RandomAccessChanface randomAccessChanface) throws IOException { return null; } @Override public StorageIOResponse export( CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) { String sourceName = this.storageExportIORequest.getSourceName(); TitanStorageIOResponse titanMiddleStorageObject = new TitanStorageIOResponse(); long parityCheck = 0; long checksum = 0; File file = new File(sourceName); try ( FileChannel frameChannel = FileChannel.open(file.toPath(), StandardOpenOption.READ) ) { long bufferSize = endSize.longValue(); // 定位到文件的 offset 位置 frameChannel.position(offset.longValue()); // 读取 endSize 大小的字节 ByteBuffer byteBuffer = ByteBuffer.allocate(endSize.intValue()); int read = frameChannel.read(byteBuffer); byteBuffer.flip(); // 将读取的数据从 bufferStartPosition 开始写入到 buffer if( read < bufferSize ){ bufferSize = read; } Debug.trace( "起始位置" + offset.longValue()+"终止大小"+bufferSize+"缓存大小"+endSize.intValue() ); byteBuffer.get(buffer, cacheBlock.getByteStart().intValue(), (int) bufferSize); cacheBlock.setStatus( CacheBlockStatus.Full ); cacheBlock.setValidByteStart( cacheBlock.getByteStart().intValue() ); cacheBlock.setValidByteEnd( cacheBlock.getByteStart().intValue()+bufferSize ); // 计算校验和和奇偶校验 CRC32 crc = new CRC32(); for (int i = 0; i < endSize.intValue(); i++) { byte b = buffer[cacheBlock.getByteStart().intValue()+i]; parityCheck += Bytes.calculateParity(b); checksum += b & 0xFF; crc.update(b); } titanMiddleStorageObject.setChecksum(checksum); titanMiddleStorageObject.setCrc32(crc); titanMiddleStorageObject.setParityCheck(parityCheck); } catch (IOException e) { throw new RuntimeException(e); } return titanMiddleStorageObject; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/export/TitanDirectExportEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.physical.export; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageExportIORequest; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.ArchExportEntity; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; public class TitanDirectExportEntity64 extends ArchExportEntity implements DirectExportEntity64{ protected DirectExport64 directExport; public TitanDirectExportEntity64(VolumeManager volumeManager, StorageExportIORequest storageExportIORequest, Chanface channel) { super(volumeManager, storageExportIORequest, channel); this.directExport = new TitanDirectExport64( this ); } @Override public StorageIOResponse export() throws IOException { return this.directExport.export(this.channel); } @Override public StorageIOResponse export(Number offset, Number endSize) throws IOException { return this.directExport.export( this.channel,offset,endSize ); } @Override public StorageIOResponse export(CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) throws UIOException { return this.directExport.export(cacheBlock, offset, endSize, buffer); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/receive/DirectReceive.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.physical.receive; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.volume.entity.Receiver; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; public interface DirectReceive extends Receiver { StorageIOResponse receive(Chanface chanface,CacheBlock cacheBlock, byte[] buffer ) throws IOException; StorageIOResponse randomReceive( Chanface chanface, Number offset, Number endSize) throws IOException; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/receive/DirectReceive64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.physical.receive; public interface DirectReceive64 extends DirectReceive { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/receive/DirectReceiveEntity.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.physical.receive; import com.pinecone.hydra.storage.volume.entity.ReceiveEntity; public interface DirectReceiveEntity extends ReceiveEntity { String getDestDirPath(); void setDestDirPath( String destDirPath ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/receive/DirectReceiveEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.physical.receive; public interface DirectReceiveEntity64 extends DirectReceiveEntity { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/receive/TitanDirectReceive64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.physical.receive; import com.pinecone.framework.util.Bytes; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.RandomAccessChanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.StorageNaming; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.TitanStorageIOResponse; import com.pinecone.hydra.storage.TitanStorageNaming; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; import java.io.OutputStream; import java.net.URI; import java.net.URISyntaxException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.util.zip.CRC32; public class TitanDirectReceive64 implements DirectReceive64{ protected StorageNaming storageNaming; protected StorageReceiveIORequest storageReceiveIORequest; protected VolumeManager volumeManager; protected String destDirPath; public TitanDirectReceive64( DirectReceiveEntity entity ){ this.storageReceiveIORequest = entity.getReceiveStorageObject(); this.volumeManager = entity.getVolumeManager(); this.destDirPath = entity.getDestDirPath(); this.storageNaming = new TitanStorageNaming(); } @Override public StorageIOResponse receive(Chanface chanface) throws IOException { return this.receiveWithOffsetAndSize( chanface, 0, this.storageReceiveIORequest.getSize().intValue() ); } @Override public StorageIOResponse receive( Chanface chanface, Number offset, Number endSize) throws IOException { return this.receiveWithOffsetAndSize( chanface,offset.intValue(),endSize.intValue() ); } @Override public StorageIOResponse randomReceive(Chanface chanface,Number offset, Number endSize) throws IOException { long startPosition = offset.longValue(); long endPosition = startPosition + endSize.longValue(); TitanStorageIOResponse titanMiddleStorageObject = new TitanStorageIOResponse(); titanMiddleStorageObject.setObjectGuid(storageReceiveIORequest.getStorageObjectGuid()); URI uri; try { uri = new URI(this.destDirPath); } catch (URISyntaxException e) { throw new IOException(e); } Path path = Paths.get(uri); String sourceName = this.storageNaming.naming( this.storageReceiveIORequest.getName(), this.storageReceiveIORequest.getStorageObjectGuid().toString() ); path = path.resolve(sourceName); ByteBuffer buffer = ByteBuffer.allocate(1024); try (FileChannel chunkChannel = FileChannel.open(path, StandardOpenOption.CREATE, StandardOpenOption.WRITE)) { while (startPosition < endPosition && chanface.read(buffer) != -1) { buffer.flip(); chunkChannel.position(startPosition); int write = chunkChannel.write(buffer); startPosition += write; buffer.clear(); } } titanMiddleStorageObject.setSourceName(path.toString()); return titanMiddleStorageObject; } @Override public StorageIOResponse receive(RandomAccessChanface randomAccessChanface) throws IOException { return this.receiveWithOffsetAndSize( randomAccessChanface, 0, this.storageReceiveIORequest.getSize().intValue() ); } @Override public StorageIOResponse receive(RandomAccessChanface randomAccessChanface, Number offset, Number endSize) throws IOException { return this.receiveWithOffsetAndSize( randomAccessChanface,offset.intValue(),endSize.intValue() ); } @Override public StorageIOResponse receive(Chanface chanface,CacheBlock cacheBlock, byte[] buffer) throws IOException { int start = cacheBlock.getValidByteStart().intValue(); int end = cacheBlock.getValidByteEnd().intValue(); if (start < 0 || end > buffer.length || start >= end) { throw new IllegalArgumentException("Invalid cacheBlock range or buffer size."); } int size = end - start; int parityCheck = 0; long checksum = 0; CRC32 crc = new CRC32(); TitanStorageIOResponse titanMiddleStorageObject = new TitanStorageIOResponse(); titanMiddleStorageObject.setObjectGuid(storageReceiveIORequest.getStorageObjectGuid()); String sourceName = this.storageNaming.naming(storageReceiveIORequest.getName(), storageReceiveIORequest.getStorageObjectGuid().toString()); Path path = Paths.get(destDirPath, sourceName); Files.createDirectories(path.getParent()); try (OutputStream outputStream = Files.newOutputStream(path, StandardOpenOption.CREATE, StandardOpenOption.APPEND)) { for (int i = start; i < end; i++) { byte b = buffer[i]; parityCheck += Bytes.calculateParity(b); checksum += b & 0xFF; crc.update(b); } outputStream.write(buffer, start, size); } catch (IOException e) { throw new IOException("Failed to write to file: " + path.toString(), e); } titanMiddleStorageObject.setChecksum(checksum); titanMiddleStorageObject.setCrc32(crc); titanMiddleStorageObject.setParityCheck(parityCheck); titanMiddleStorageObject.setSourceName(path.toString()); return titanMiddleStorageObject; } private StorageIOResponse receiveWithOffsetAndSize(Chanface chanface,long offset, int size) throws IOException { int parityCheck = 0; long checksum = 0; //ByteBuffer buffer = ByteBuffer.allocateDirect(size); TitanStorageIOResponse titanMiddleStorageObject = new TitanStorageIOResponse(); titanMiddleStorageObject.setObjectGuid(storageReceiveIORequest.getStorageObjectGuid()); //buffer.clear(); ByteBuffer[] lpBuf = new ByteBuffer[ 1 ]; chanface.read( (out)->{ lpBuf[0] = out; }, size, offset ); ByteBuffer buffer = lpBuf[ 0 ]; buffer.flip(); CRC32 crc = new CRC32(); while (buffer.hasRemaining()) { byte b = buffer.get(); parityCheck += Bytes.calculateParity(b); checksum += b & 0xFF; crc.update(b); } URI uri = null; try { uri = new URI( this.destDirPath ); } catch ( URISyntaxException e ) { throw new IOException(e); } Path path = Paths.get(uri); String sourceName = this.storageNaming.naming( this.storageReceiveIORequest.getName(), this.storageReceiveIORequest.getStorageObjectGuid().toString() ); path = path.resolve(sourceName); try (FileChannel chunkChannel = FileChannel.open(path, StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND)) { buffer.rewind(); chunkChannel.position(chunkChannel.size()); // 从文件末尾开始写入 chunkChannel.write(buffer); } titanMiddleStorageObject.setChecksum(checksum); titanMiddleStorageObject.setCrc32(crc); titanMiddleStorageObject.setParityCheck(parityCheck); titanMiddleStorageObject.setSourceName(path.toString()); return titanMiddleStorageObject; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/receive/TitanDirectReceiveEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.physical.receive; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.ArchReceiveEntity; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; public class TitanDirectReceiveEntity64 extends ArchReceiveEntity implements DirectReceiveEntity64{ protected String destDirPath; protected DirectReceive64 directReceive; protected Chanface chanface; public TitanDirectReceiveEntity64(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel, String destDirPath) { super(volumeManager, storageReceiveIORequest, channel); this.destDirPath = destDirPath; this.directReceive = new TitanDirectReceive64( this ); this.chanface = channel; } @Override public StorageIOResponse receive() throws IOException { return this.directReceive.receive( this.chanface ); } @Override public StorageIOResponse receive(Number offset, Number endSize) throws IOException { return this.directReceive.receive( this.chanface, offset, endSize ); } @Override public StorageIOResponse receive(CacheBlock cacheBlock, byte[] buffer) throws IOException { return this.directReceive.receive( this.chanface,cacheBlock, buffer ); } @Override public StorageIOResponse randomReceive(Number offset, Number endSize) throws IOException { return this.directReceive.randomReceive( this.chanface, offset,endSize ); } @Override public String getDestDirPath() { return this.destDirPath; } @Override public void setDestDirPath(String destDirPath) { this.destDirPath = destDirPath; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/TitanLocalSimpleVolume.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.framework.util.rdb.MappedExecutor; import com.pinecone.ulf.rdb.sqlite.SQLiteExecutor; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeConfig; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.ArchLogicVolume; import com.pinecone.hydra.storage.volume.entity.ExporterEntity; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.entity.PhysicalVolume; import com.pinecone.hydra.storage.volume.entity.ReceiveEntity; import com.pinecone.hydra.storage.volume.entity.local.LocalSimpleVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import com.pinecone.hydra.storage.volume.source.SimpleVolumeManipulator; import java.io.IOException; import java.sql.SQLException; import java.util.List; public class TitanLocalSimpleVolume extends ArchLogicVolume implements LocalSimpleVolume { protected SimpleVolumeManipulator simpleVolumeManipulator; protected MappedExecutor mappedExecutor; public TitanLocalSimpleVolume(VolumeManager volumeManager, SimpleVolumeManipulator simpleVolumeManipulator) { super(volumeManager); this.simpleVolumeManipulator = simpleVolumeManipulator; } public TitanLocalSimpleVolume( VolumeManager volumeManager){ super(volumeManager); } public TitanLocalSimpleVolume(){ } public void setSimpleVolumeManipulator( SimpleVolumeManipulator simpleVolumeManipulator ){ this.simpleVolumeManipulator = simpleVolumeManipulator; } @Override public List queryChildren() { return super.queryChildren(); } @Override public void extendLogicalVolume(GUID physicalGuid) { this.simpleVolumeManipulator.extendLogicalVolume( this.guid, physicalGuid ); } @Override public List listPhysicalVolume() { return this.simpleVolumeManipulator.listPhysicalVolume( this.guid ); } @Override public StorageIOResponse receive(ReceiveEntity entity) throws IOException { StorageIOResponse response = entity.receive(); try { this.saveMate( response, entity.getReceiveStorageObject().getName() ); } catch (SQLException e) { throw new UIOException(e); } return response ; } @Override public StorageIOResponse receive(ReceiveEntity entity, Number offset, Number endSize) throws IOException{ StorageIOResponse response = entity.receive( offset, endSize ); try { this.saveMate( response, entity.getReceiveStorageObject().getName() ); } catch (SQLException e) { throw new UIOException(e); } return response; } @Override public StorageIOResponse randomReceive(ReceiveEntity entity, Number offset, Number endSize) throws IOException { StorageIOResponse response = entity.randomReceive( offset,endSize ); try { this.saveMate( response, entity.getReceiveStorageObject().getName() ); } catch (SQLException e) { throw new UIOException(e); } return response; } @Override public StorageIOResponse receive(ReceiveEntity entity, CacheBlock cacheBlock, byte[] buffer) throws IOException { StorageIOResponse response = entity.receive(cacheBlock, buffer); try { this.saveMate( response, entity.getReceiveStorageObject().getName() ); } catch (SQLException e) { throw new UIOException(e); } return response; } @Override public StorageIOResponse export(ExporterEntity entity) throws IOException { return entity.export(); } @Override public StorageIOResponse export(ExporterEntity entity, Number offset, Number endSize) throws IOException { return entity.export( offset,endSize ); } @Override public StorageIOResponse export(ExporterEntity entity, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) throws UIOException { return entity.export( cacheBlock, offset, endSize, buffer ); } @Override public StorageIOResponse export(ExporterEntity entity, boolean accessRandom) throws UIOException { return null; } @Override public StorageIOResponse export(ExporterEntity entity, Number offset, Number endSize, boolean accessRandom) { return null; } @Override public StorageIOResponse export(ExporterEntity entity, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer, boolean accessRandom) throws UIOException { return null; } @Override public void setVolumeTree( VolumeManager volumeManager ) { this.volumeManager = volumeManager; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } @Override public boolean existStorageObject(GUID storageObject) throws SQLException { return this.kenVolumeFileSystem.existStorageObject( this.mappedExecutor, storageObject ); } private synchronized void saveMate(StorageIOResponse storageIOResponse, String storageObjectName) throws SQLException { if( !kenVolumeFileSystem.existStorageObject( this.mappedExecutor, storageIOResponse.getObjectGuid() ) ){ this.kenVolumeFileSystem.insertSimpleTargetMappingSoloRecord( storageIOResponse.getObjectGuid(), storageObjectName, storageIOResponse.getSourceName(), this.mappedExecutor ); } } @Override public void build() throws SQLException { VolumeConfig config = this.volumeManager.getConfig(); PhysicalVolume smallestCapacityPhysicalVolume = this.volumeManager.getSmallestCapacityPhysicalVolume(); String url = smallestCapacityPhysicalVolume.getMountPoint().getMountPoint() + config.getPathSeparator() + this.guid + config.getSqliteFileExtension(); SQLiteExecutor sqLiteExecutor = (SQLiteExecutor) this.volumeManager.getKenusPool().allot(url); this.mappedExecutor = sqLiteExecutor; this.kenVolumeFileSystem.createSimpleTargetMappingTab( sqLiteExecutor ); this.volumeManager.put( this ); this.kenVolumeFileSystem.insertSimpleTargetMappingTab( smallestCapacityPhysicalVolume.getGuid(), this.getGuid() ); } @Override public void storageExpansion(GUID volumeGuid) { this.extendLogicalVolume( volumeGuid ); PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(volumeGuid); this.simpleVolumeManipulator.updateDefinitionCapacity( this.guid, physicalVolume.getVolumeCapacity().getDefinitionCapacity() ); } @Override public SQLiteExecutor getSQLiteExecutor() throws SQLException { VolumeConfig config = this.volumeManager.getConfig(); GUID physicsGuid = this.kenVolumeFileSystem.getKVFSPhysicsVolume(this.guid); if( physicsGuid == null ){ return null; } PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(physicsGuid); String url = physicalVolume.getMountPoint().getMountPoint()+ config.getPathSeparator() +this.guid+ config.getSqliteFileExtension(); return (SQLiteExecutor) this.volumeManager.getKenusPool().allot(url); } @Override public void deductCapacity(long deductCapacity) { this.volumeCapacity.setUsedSize( this.volumeCapacity.getUsedSize() + deductCapacity ); this.volumeManager.updateVolumeUsedSize( this.guid, this.volumeCapacity ); } @Override public void increaseCapacity(long increaseCapacity) { this.volumeCapacity.setUsedSize( this.volumeCapacity.getUsedSize() - increaseCapacity ); this.volumeManager.updateVolumeUsedSize( this.guid, this.volumeCapacity ); } @Override public boolean checkCapacity(long size) { long freeSpace = this.volumeCapacity.getDefinitionCapacity() - this.volumeCapacity.getUsedSize(); return freeSpace > size; } public void assembleSQLiteExecutor() throws SQLException { this.mappedExecutor = this.getSQLiteExecutor(); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/export/SimpleExport.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.export; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.RandomAccessChanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.entity.Exporter; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; public interface SimpleExport extends Exporter { StorageIOResponse export(Chanface chanface) throws IOException; StorageIOResponse export(Chanface chanface, Number offset, Number endSize) throws IOException; StorageIOResponse export(Chanface chanface,CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer ) throws UIOException; StorageIOResponse export(RandomAccessChanface randomAccessChanface) throws UIOException; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/export/SimpleExport64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.export; public interface SimpleExport64 extends SimpleExport { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/export/SimpleExportEntity.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.export; import com.pinecone.hydra.storage.volume.entity.ExporterEntity; import com.pinecone.hydra.storage.volume.entity.SimpleVolume; public interface SimpleExportEntity extends ExporterEntity { SimpleVolume getSimpleVolume(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/export/SimpleExportEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.export; public interface SimpleExportEntity64 extends SimpleExportEntity { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/export/TitanSimpleExport64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.export; import com.pinecone.ulf.rdb.sqlite.SQLiteExecutor; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.RandomAccessChanface; import com.pinecone.hydra.storage.StorageExportIORequest; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.SimpleVolume; import com.pinecone.hydra.storage.volume.entity.local.physical.export.TitanDirectExportEntity64; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem; import java.io.IOException; import java.sql.SQLException; public class TitanSimpleExport64 implements SimpleExport64{ private VolumeManager volumeManager; private StorageExportIORequest storageExportIORequest; private SimpleVolume simpleVolume; private KenVolumeFileSystem kenVolumeFileSystem; public TitanSimpleExport64( SimpleExportEntity entity ){ this.volumeManager = entity.getVolumeManager(); this.storageExportIORequest = entity.getStorageIORequest(); this.simpleVolume = entity.getSimpleVolume(); this.kenVolumeFileSystem = new KenVolumeFileSystem( this.volumeManager ); } @Override public StorageIOResponse export(Chanface chanface) throws IOException { try { SQLiteExecutor sqLiteExecutor = simpleVolume.getSQLiteExecutor(); String sourceName = this.kenVolumeFileSystem.getSimpleStorageObjectSourceName(this.storageExportIORequest.getStorageObjectGuid(), sqLiteExecutor); this.storageExportIORequest.setSourceName(sourceName); TitanDirectExportEntity64 exportEntity = new TitanDirectExportEntity64( this.volumeManager, this.storageExportIORequest, chanface ); return exportEntity.export(); } catch (SQLException e) { throw new UIOException(e); } } @Override public StorageIOResponse export(Chanface chanface, Number offset, Number endSize) throws IOException { try { SQLiteExecutor sqLiteExecutor = simpleVolume.getSQLiteExecutor(); String sourceName = this.kenVolumeFileSystem.getSimpleStorageObjectSourceName(this.storageExportIORequest.getStorageObjectGuid(), sqLiteExecutor); this.storageExportIORequest.setSourceName(sourceName); TitanDirectExportEntity64 exportEntity = new TitanDirectExportEntity64( this.volumeManager, this.storageExportIORequest, chanface ); return exportEntity.export( offset,endSize ); } catch (SQLException e) { throw new UIOException(e); } } @Override public StorageIOResponse export(RandomAccessChanface randomAccessChanface) throws UIOException { return null; } @Override public StorageIOResponse export(Chanface chanface,CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) throws UIOException { TitanDirectExportEntity64 exportEntity = new TitanDirectExportEntity64( this.volumeManager, this.storageExportIORequest, chanface ); return exportEntity.export( cacheBlock, offset, endSize, buffer ); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/export/TitanSimpleExportEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.export; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageExportIORequest; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.ArchExportEntity; import com.pinecone.hydra.storage.volume.entity.SimpleVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; public class TitanSimpleExportEntity64 extends ArchExportEntity implements SimpleExportEntity64{ protected SimpleExport64 simpleExportEntity; protected SimpleVolume simpleVolume; public TitanSimpleExportEntity64(VolumeManager volumeManager, StorageExportIORequest storageExportIORequest, Chanface channel, SimpleVolume simpleVolume) { super(volumeManager, storageExportIORequest, channel); this.simpleVolume = simpleVolume; this.simpleExportEntity = new TitanSimpleExport64( this ); } @Override public StorageIOResponse export() throws IOException { return this.simpleExportEntity.export(this.channel); } @Override public StorageIOResponse export(Number offset, Number endSize) throws IOException { return this.simpleExportEntity.export( this.channel, offset, endSize ); } @Override public StorageIOResponse export(CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) throws UIOException { return this.simpleExportEntity.export( this.channel, cacheBlock, offset, endSize, buffer ); } @Override public SimpleVolume getSimpleVolume() { return this.simpleVolume; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/SimpleReceive.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.recevice; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.RandomAccessChanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.volume.entity.Receiver; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; public interface SimpleReceive extends Receiver { StorageIOResponse receive(Chanface chanface,CacheBlock cacheBlock, byte[] buffer ) throws IOException; StorageIOResponse receive(RandomAccessChanface randomAccessChanface, CacheBlock cacheBlock, byte[] buffer ) throws IOException; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/SimpleReceive64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.recevice; public interface SimpleReceive64 extends SimpleReceive { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/SimpleReceiveEntity.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.recevice; import com.pinecone.hydra.storage.volume.entity.ReceiveEntity; import com.pinecone.hydra.storage.volume.entity.SimpleVolume; public interface SimpleReceiveEntity extends ReceiveEntity { SimpleVolume getSimpleVolume(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/SimpleReceiveEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.recevice; public interface SimpleReceiveEntity64 extends SimpleReceiveEntity { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/SimpleReceiver.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.recevice; import com.pinecone.hydra.storage.volume.entity.Receiver; public interface SimpleReceiver extends Receiver { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/SimpleReceiverEntity.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.recevice; import com.pinecone.hydra.storage.volume.entity.ReceiveEntity; public interface SimpleReceiverEntity extends ReceiveEntity { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/TitanSimpleReceive64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.recevice; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.RandomAccessChanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.PhysicalVolume; import com.pinecone.hydra.storage.volume.entity.SimpleVolume; import com.pinecone.hydra.storage.volume.entity.local.physical.receive.TitanDirectReceiveEntity64; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; import java.util.List; public class TitanSimpleReceive64 implements SimpleReceive64{ private SimpleVolume simpleVolume; private VolumeManager volumeManager; private StorageReceiveIORequest storageReceiveIORequest; public TitanSimpleReceive64( SimpleReceiveEntity entity ){ this.simpleVolume = entity.getSimpleVolume(); this.volumeManager = entity.getVolumeManager(); this.storageReceiveIORequest = entity.getReceiveStorageObject(); } @Override public StorageIOResponse receive(Chanface chanface) throws IOException { List guids = simpleVolume.listPhysicalVolume(); PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(guids.get(0)); TitanDirectReceiveEntity64 receiveEntity = new TitanDirectReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, chanface, physicalVolume.getMountPoint().getMountPoint() ); return physicalVolume.receive( receiveEntity ); } @Override public StorageIOResponse receive(Chanface chanface,Number offset, Number endSize) throws IOException { List guids = simpleVolume.listPhysicalVolume(); PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(guids.get(0)); TitanDirectReceiveEntity64 receiveEntity = new TitanDirectReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, chanface, physicalVolume.getMountPoint().getMountPoint() ); return physicalVolume.receive( receiveEntity, offset, endSize ); } @Override public StorageIOResponse randomReceive(Chanface chanface, Number offset, Number endSize) throws IOException { List guids = simpleVolume.listPhysicalVolume(); PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(guids.get(0)); TitanDirectReceiveEntity64 receiveEntity = new TitanDirectReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, chanface, physicalVolume.getMountPoint().getMountPoint() ); return physicalVolume.randomReceive( receiveEntity, offset, endSize ); } @Override public StorageIOResponse receive(Chanface chanface,CacheBlock cacheBlock, byte[] buffer) throws IOException { List guids = simpleVolume.listPhysicalVolume(); PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(guids.get(0)); TitanDirectReceiveEntity64 receiveEntity = new TitanDirectReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, chanface, physicalVolume.getMountPoint().getMountPoint() ); return physicalVolume.receive( receiveEntity, cacheBlock, buffer ); } @Override public StorageIOResponse receive(RandomAccessChanface randomAccessChanface) throws IOException { List guids = simpleVolume.listPhysicalVolume(); PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(guids.get(0)); TitanDirectReceiveEntity64 receiveEntity = new TitanDirectReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, randomAccessChanface, physicalVolume.getMountPoint().getMountPoint() ); return physicalVolume.receive( receiveEntity ); } @Override public StorageIOResponse receive(RandomAccessChanface randomAccessChanface, Number offset, Number endSize) throws IOException { List guids = simpleVolume.listPhysicalVolume(); PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(guids.get(0)); TitanDirectReceiveEntity64 receiveEntity = new TitanDirectReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, randomAccessChanface, physicalVolume.getMountPoint().getMountPoint() ); return physicalVolume.receive( receiveEntity, offset, endSize ); } @Override public StorageIOResponse receive(RandomAccessChanface randomAccessChanface, CacheBlock cacheBlock, byte[] buffer) throws IOException { List guids = simpleVolume.listPhysicalVolume(); PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(guids.get(0)); TitanDirectReceiveEntity64 receiveEntity = new TitanDirectReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, randomAccessChanface, physicalVolume.getMountPoint().getMountPoint() ); return physicalVolume.receive( receiveEntity, cacheBlock, buffer ); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/TitanSimpleReceiveEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.recevice; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.ArchReceiveEntity; import com.pinecone.hydra.storage.volume.entity.SimpleVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; public class TitanSimpleReceiveEntity64 extends ArchReceiveEntity implements SimpleReceiveEntity64{ protected SimpleVolume simpleVolume; protected SimpleReceive simpleReceive; protected Chanface chanface; public TitanSimpleReceiveEntity64(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel, SimpleVolume volume) { super(volumeManager, storageReceiveIORequest, channel); this.simpleVolume = volume; this.simpleReceive = new TitanSimpleReceive64( this ); this.chanface = channel; } @Override public StorageIOResponse receive() throws IOException { return this.simpleReceive.receive(this.chanface); } @Override public StorageIOResponse receive(Number offset, Number endSize) throws IOException{ return this.simpleReceive.receive(this.chanface, offset, endSize ); } @Override public StorageIOResponse randomReceive(Number offset, Number endSize) throws IOException { return this.simpleReceive.randomReceive( this.chanface, offset, endSize ); } @Override public StorageIOResponse receive(CacheBlock cacheBlock, byte[] buffer) throws IOException { return this.simpleReceive.receive(this.chanface, cacheBlock, buffer ); } @Override public SimpleVolume getSimpleVolume() { return this.simpleVolume; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/channel/SimpleChannelReceiver.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.channel; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.entity.local.simple.recevice.SimpleReceiver; import java.io.IOException; public interface SimpleChannelReceiver extends SimpleReceiver { StorageIOResponse channelReceive( ) throws UIOException; StorageIOResponse channelReceive(Number offset, Number endSize) throws IOException; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/channel/SimpleChannelReceiver64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.channel; public interface SimpleChannelReceiver64 extends SimpleChannelReceiver{ } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/channel/SimpleChannelReceiverEntity.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.channel; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.volume.entity.SimpleVolume; import com.pinecone.hydra.storage.volume.entity.local.simple.recevice.SimpleReceiverEntity; public interface SimpleChannelReceiverEntity extends SimpleReceiverEntity { Chanface getChannel(); void setChannel( Chanface channel ); SimpleVolume getSimpleVolume(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/channel/SimpleChannelReceiverEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.channel; public interface SimpleChannelReceiverEntity64 extends SimpleChannelReceiverEntity{ } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/channel/TitanSimpleChannelReceiver64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.channel; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.RandomAccessChanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.PhysicalVolume; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.volume.entity.SimpleVolume; import java.io.IOException; import java.util.List; public class TitanSimpleChannelReceiver64 implements SimpleChannelReceiver64{ private SimpleVolume simpleVolume; private Chanface fileChannel; private VolumeManager volumeManager; private StorageReceiveIORequest storageReceiveIORequest; public TitanSimpleChannelReceiver64( SimpleChannelReceiverEntity entity ){ this.volumeManager = entity.getVolumeManager(); this.simpleVolume = entity.getSimpleVolume(); this.fileChannel = entity.getChannel(); this.storageReceiveIORequest = entity.getReceiveStorageObject(); } @Override public StorageIOResponse channelReceive() throws UIOException { List guids = simpleVolume.listPhysicalVolume(); PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(guids.get(0)); return physicalVolume.channelReceive( this.volumeManager,this.storageReceiveIORequest,this.fileChannel ); } @Override public StorageIOResponse channelReceive(Number offset, Number endSize) throws IOException { List guids = simpleVolume.listPhysicalVolume(); PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(guids.get(0)); return physicalVolume.channelReceive( this.volumeManager,this.storageReceiveIORequest,this.fileChannel, offset,endSize ); } // @Override // public StorageIOResponse receive() throws UIOException { // return null; // } // // @Override // public StorageIOResponse receive(Number offset, Number endSize) throws UIOException { // return null; // } @Override public StorageIOResponse receive(Chanface chanface) throws IOException { return null; } @Override public StorageIOResponse receive(Chanface chanface, Number offset, Number endSize) throws IOException { return null; } @Override public StorageIOResponse randomReceive(Chanface chanface, Number offset, Number endSize) { return null; } @Override public StorageIOResponse receive(RandomAccessChanface randomAccessChanface) throws IOException { return null; } @Override public StorageIOResponse receive(RandomAccessChanface randomAccessChanface, Number offset, Number endSize) throws IOException{ return null; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/channel/TitanSimpleChannelReceiverEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.channel; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.ArchReceiveEntity; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.volume.entity.SimpleVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; public class TitanSimpleChannelReceiverEntity64 extends ArchReceiveEntity implements SimpleChannelReceiverEntity64{ private Chanface channel; private SimpleVolume simpleVolume; private SimpleChannelReceiver64 titanSimpleChannelReceiver64; public TitanSimpleChannelReceiverEntity64(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel, SimpleVolume simpleVolume) { super(volumeManager, storageReceiveIORequest, null); this.channel = channel; this.simpleVolume = simpleVolume; this.titanSimpleChannelReceiver64 = new TitanSimpleChannelReceiver64( this ); } @Override public Chanface getChannel() { return this.channel; } @Override public void setChannel(Chanface channel) { this.channel = channel; } @Override public SimpleVolume getSimpleVolume() { return this.simpleVolume; } @Override public StorageIOResponse receive() throws UIOException { return this.titanSimpleChannelReceiver64.channelReceive(); } @Override public StorageIOResponse receive(Number offset, Number endSize) throws IOException { return this.titanSimpleChannelReceiver64.channelReceive( offset, endSize ); } @Override public StorageIOResponse randomReceive(Number offset, Number endSize) throws IOException { return null; } @Override public StorageIOResponse receive(CacheBlock cacheBlock, byte[] buffer) throws UIOException { return null; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/stream/SimpleStreamReceiveEntity.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.stream; import com.pinecone.hydra.storage.volume.entity.SimpleVolume; import com.pinecone.hydra.storage.volume.entity.local.simple.recevice.SimpleReceiverEntity; import java.io.InputStream; public interface SimpleStreamReceiveEntity extends SimpleReceiverEntity { InputStream getStream(); void setStream( InputStream stream ); SimpleVolume getSimpleVolume(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/stream/SimpleStreamReceiveEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.stream; public interface SimpleStreamReceiveEntity64 extends SimpleStreamReceiveEntity{ } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/stream/SimpleStreamReceiver.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.stream; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.entity.local.simple.recevice.SimpleReceiver; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; public interface SimpleStreamReceiver extends SimpleReceiver { StorageIOResponse streamReceive( ) throws UIOException; StorageIOResponse streamReceive(Number offset, Number endSize) throws UIOException; StorageIOResponse streamReceive( CacheBlock cacheBlock, byte[] buffer ) throws UIOException; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/stream/SimpleStreamReceiver64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.stream; public interface SimpleStreamReceiver64 extends SimpleStreamReceiver { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/stream/TitanSimpleStreamReceive64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.stream; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.RandomAccessChanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.PhysicalVolume; import com.pinecone.hydra.storage.volume.entity.SimpleVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; import java.io.InputStream; import java.util.List; public class TitanSimpleStreamReceive64 implements SimpleStreamReceiver64{ protected SimpleVolume simpleVolume; protected InputStream stream; protected VolumeManager volumeManager; protected StorageReceiveIORequest storageReceiveIORequest; protected PhysicalVolume physicalVolume; public TitanSimpleStreamReceive64( SimpleStreamReceiveEntity64 entity ){ this.volumeManager = entity.getVolumeManager(); this.simpleVolume = entity.getSimpleVolume(); this.stream = entity.getStream(); this.storageReceiveIORequest = entity.getReceiveStorageObject(); List guids = this.simpleVolume.listPhysicalVolume(); this.physicalVolume = this.volumeManager.getPhysicalVolume(guids.get(0)); } @Override public StorageIOResponse streamReceive() { // TitanDirectStreamReceiveEntity64 titanDirectStreamReceiveEntity64 = new TitanDirectStreamReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, this.stream, this.physicalVolume.getMountPoint().getMountPoint() ); // return this.physicalVolume.receive( titanDirectStreamReceiveEntity64 ); return null; } @Override public StorageIOResponse streamReceive(Number offset, Number endSize) { // TitanDirectStreamReceiveEntity64 titanDirectStreamReceiveEntity64 = new TitanDirectStreamReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, this.stream, this.physicalVolume.getMountPoint().getMountPoint() ); // return this.physicalVolume.receive( titanDirectStreamReceiveEntity64, offset, endSize ); return null; } @Override public StorageIOResponse streamReceive(CacheBlock cacheBlock, byte[] buffer) { // TitanDirectStreamReceiveEntity64 titanDirectStreamReceiveEntity64 = new TitanDirectStreamReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, this.stream, this.physicalVolume.getMountPoint().getMountPoint() ); // return this.physicalVolume.receive( titanDirectStreamReceiveEntity64, cacheBlock, buffer ); return null; } @Override public StorageIOResponse receive(Chanface chanface) throws IOException { return null; } @Override public StorageIOResponse receive(Chanface chanface, Number offset, Number endSize) throws IOException { return null; } @Override public StorageIOResponse randomReceive(Chanface chanface, Number offset, Number endSize) { return null; } @Override public StorageIOResponse receive(RandomAccessChanface randomAccessChanface) throws IOException{ return null; } @Override public StorageIOResponse receive(RandomAccessChanface randomAccessChanface, Number offset, Number endSize) throws IOException { return null; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/stream/TitanSimpleStreamReceiveEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.stream; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.ArchReceiveEntity; import com.pinecone.hydra.storage.volume.entity.SimpleVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; import java.io.InputStream; public class TitanSimpleStreamReceiveEntity64 extends ArchReceiveEntity implements SimpleStreamReceiveEntity64{ protected InputStream stream; protected SimpleVolume simpleVolume; protected SimpleStreamReceiver64 streamReceiver; public TitanSimpleStreamReceiveEntity64(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, InputStream stream, SimpleVolume simpleVolume ) { super(volumeManager, storageReceiveIORequest,null); this.stream = stream; this.simpleVolume = simpleVolume; this.streamReceiver = new TitanSimpleStreamReceive64( this ); } @Override public StorageIOResponse receive() throws UIOException { return this.streamReceiver.streamReceive(); } @Override public StorageIOResponse receive(Number offset, Number endSize) throws UIOException { return this.streamReceiver.streamReceive( offset, endSize ); } @Override public StorageIOResponse randomReceive(Number offset, Number endSize) throws IOException { return null; } @Override public StorageIOResponse receive(CacheBlock cacheBlock, byte[] buffer) throws UIOException { return this.streamReceiver.streamReceive( cacheBlock, buffer ); } @Override public InputStream getStream() { return this.stream; } @Override public void setStream(InputStream stream) { this.stream = stream; } @Override public SimpleVolume getSimpleVolume() { return this.simpleVolume; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/TitanLocalSpannedVolume.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.spanned; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.ulf.rdb.sqlite.SQLiteExecutor; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeConfig; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.ArchLogicVolume; import com.pinecone.hydra.storage.volume.entity.ExporterEntity; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.entity.PhysicalVolume; import com.pinecone.hydra.storage.volume.entity.ReceiveEntity; import com.pinecone.hydra.storage.volume.entity.local.LocalSpannedVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import com.pinecone.hydra.storage.volume.source.SpannedVolumeManipulator; import java.io.IOException; import java.sql.SQLException; import java.util.List; public class TitanLocalSpannedVolume extends ArchLogicVolume implements LocalSpannedVolume { private SpannedVolumeManipulator spannedVolumeManipulator; public TitanLocalSpannedVolume(VolumeManager volumeManager, SpannedVolumeManipulator spannedVolumeManipulator) { super(volumeManager); this.spannedVolumeManipulator = spannedVolumeManipulator; } public TitanLocalSpannedVolume( VolumeManager volumeManager){ super(volumeManager); } public TitanLocalSpannedVolume(){ } public void setSpannedVolumeManipulator( SpannedVolumeManipulator spannedVolumeManipulator ){ this.spannedVolumeManipulator = spannedVolumeManipulator; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public void extendLogicalVolume(GUID physicalGuid) { } @Override public List listPhysicalVolume() { return null; } @Override public void setVolumeTree(VolumeManager volumeManager) { this.volumeManager = volumeManager; } @Override public StorageIOResponse receive(ReceiveEntity entity) throws IOException { return entity.receive(); } @Override public StorageIOResponse receive(ReceiveEntity entity, Number offset, Number endSize) throws IOException { return entity.receive( offset, endSize ); } @Override public StorageIOResponse randomReceive(ReceiveEntity entity, Number offset, Number endSize) { return null; } @Override public StorageIOResponse receive(ReceiveEntity entity, CacheBlock cacheBlock, byte[] buffer) throws UIOException { return null; } @Override public StorageIOResponse export(ExporterEntity entity) throws IOException { return entity.export(); } @Override public StorageIOResponse export(ExporterEntity entity, Number offset, Number endSize) { return null; } @Override public StorageIOResponse export(ExporterEntity entity, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) throws UIOException { return entity.export( cacheBlock, offset, endSize, buffer ); } @Override public StorageIOResponse export(ExporterEntity entity, boolean accessRandom) throws UIOException { return null; } @Override public StorageIOResponse export(ExporterEntity entity, Number offset, Number endSize, boolean accessRandom) { return null; } @Override public StorageIOResponse export(ExporterEntity entity, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer, boolean accessRandom) throws UIOException { return null; } @Override public String toString() { return this.toJSONString(); } @Override public boolean existStorageObject(GUID storageObject) throws SQLException { List volumes = this.queryChildren(); for( LogicVolume volume : volumes ){ if ( volume.existStorageObject( storageObject ) ){ return true; } } return false; } // Build 模式,最后去执行 @Override public void build() throws SQLException { VolumeConfig config = this.volumeManager.getConfig(); PhysicalVolume smallestCapacityPhysicalVolume = this.volumeManager.getSmallestCapacityPhysicalVolume(); String url = smallestCapacityPhysicalVolume.getMountPoint().getMountPoint() + config.getPathSeparator() + this.guid + config.getSqliteFileExtension(); SQLiteExecutor sqLiteExecutor = (SQLiteExecutor) this.volumeManager.getKenusPool().allot(url); this.kenVolumeFileSystem.creatSpanLinkedVolumeTable( sqLiteExecutor ); this.kenVolumeFileSystem.createSpannedIndexTable( sqLiteExecutor ); List volumes = this.queryChildren(); int index = 0; for( LogicVolume volume : volumes ){ this.kenVolumeFileSystem.insertSpannedIndexTable( sqLiteExecutor, index, volume.getGuid() ); index++; } this.kenVolumeFileSystem.insertSimpleTargetMappingTab( smallestCapacityPhysicalVolume.getGuid(), this.getGuid() ); this.volumeManager.put( this ); } @Override public void storageExpansion(GUID volumeGuid) { //todo 跨区卷扩容还有点问题 this.volumeManager.storageExpansion( this.getGuid(), volumeGuid ); LogicVolume logicVolume = this.volumeManager.get(volumeGuid); this.spannedVolumeManipulator.updateDefinitionCapacity( this.guid, logicVolume.getVolumeCapacity().getDefinitionCapacity() ); } @Override public void deductCapacity(long deductCapacity) { this.volumeCapacity.setUsedSize( this.volumeCapacity.getUsedSize() + deductCapacity ); this.volumeManager.updateVolumeUsedSize( this.guid, this.volumeCapacity ); } @Override public void increaseCapacity(long increaseCapacity) { this.volumeCapacity.setUsedSize( this.volumeCapacity.getUsedSize() - increaseCapacity ); this.volumeManager.updateVolumeUsedSize( this.guid, this.volumeCapacity ); } @Override public boolean checkCapacity(long size) { long freeSpace = this.volumeCapacity.getDefinitionCapacity() - this.volumeCapacity.getUsedSize(); return freeSpace > size; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/export/SpannedExport.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.spanned.export; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.RandomAccessChanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.entity.Exporter; public interface SpannedExport extends Exporter { StorageIOResponse export(Chanface chanface) throws UIOException; StorageIOResponse export(RandomAccessChanface randomAccessChanface) throws UIOException; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/export/SpannedExport64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.spanned.export; public interface SpannedExport64 extends SpannedExport { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/export/SpannedExportEntity.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.spanned.export; import com.pinecone.hydra.storage.volume.entity.ExporterEntity; import com.pinecone.hydra.storage.volume.entity.SpannedVolume; public interface SpannedExportEntity extends ExporterEntity { SpannedVolume getSpannedVolume(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/export/SpannedExportEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.spanned.export; public interface SpannedExportEntity64 extends SpannedExportEntity { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/export/TitanSpannedExport64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.spanned.export; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.rdb.sqlite.SQLiteExecutor; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.RandomAccessChanface; import com.pinecone.hydra.storage.StorageExportIORequest; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeConfig; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.entity.PhysicalVolume; import com.pinecone.hydra.storage.volume.entity.SimpleVolume; import com.pinecone.hydra.storage.volume.entity.SpannedVolume; import com.pinecone.hydra.storage.volume.entity.local.physical.export.TitanDirectExportEntity64; import com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem; import com.pinecone.hydra.storage.volume.kvfs.OnVolumeFileSystem; import java.io.IOException; import java.sql.SQLException; import java.util.List; public class TitanSpannedExport64 implements SpannedExport64{ protected VolumeManager volumeManager; protected StorageExportIORequest storageExportIORequest; protected SpannedVolume spannedVolume; protected OnVolumeFileSystem kenVolumeFileSystem; public TitanSpannedExport64( SpannedExportEntity64 entity ){ this.spannedVolume = entity.getSpannedVolume();; this.volumeManager = entity.getVolumeManager(); this.storageExportIORequest = entity.getStorageIORequest(); this.kenVolumeFileSystem = new KenVolumeFileSystem( this.volumeManager ); } @Override public StorageIOResponse export(Chanface chanface) throws UIOException { //先查找冲突表中是否存在该文件 try { List volumes = this.spannedVolume.queryChildren(); GUID physicsVolumeGuid = this.kenVolumeFileSystem.getKVFSPhysicsVolume(this.spannedVolume.getGuid()); PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(physicsVolumeGuid); SQLiteExecutor sqLiteExecutor = this.getSQLiteExecutor(physicalVolume); GUID targetGuid = this.kenVolumeFileSystem.getSpanLinkedVolumeTableTargetGuid(sqLiteExecutor, this.storageExportIORequest.getStorageObjectGuid()); if ( targetGuid == null ){ int idx = this.kenVolumeFileSystem.hashStorageObjectID(this.storageExportIORequest.getStorageObjectGuid(), volumes.size()); GUID tableTargetGuid = this.kenVolumeFileSystem.getSpannedIndexTableTargetGuid(sqLiteExecutor, idx); String source = this.getSource(tableTargetGuid, this.storageExportIORequest.getStorageObjectGuid()); this.storageExportIORequest.setSourceName( source ); SimpleVolume simpleVolume = (SimpleVolume)this.volumeManager.get(tableTargetGuid); List guids = simpleVolume.listPhysicalVolume(); PhysicalVolume volume = this.volumeManager.getPhysicalVolume(guids.get(0)); TitanDirectExportEntity64 exportEntity = new TitanDirectExportEntity64( this.volumeManager, this.storageExportIORequest, chanface ); return volume.export( exportEntity ); } else { SimpleVolume simpleVolume = (SimpleVolume)this.volumeManager.get(targetGuid); List guids = simpleVolume.listPhysicalVolume(); PhysicalVolume volume = this.volumeManager.getPhysicalVolume(guids.get(0)); TitanDirectExportEntity64 exportEntity = new TitanDirectExportEntity64( this.volumeManager, this.storageExportIORequest, chanface ); return volume.export( exportEntity ); } } catch (SQLException e) { throw new UIOException(e); } catch (IOException e) { throw new RuntimeException(e); } } @Override public StorageIOResponse export(RandomAccessChanface randomAccessChanface) throws UIOException { return null; } private SQLiteExecutor getSQLiteExecutor(PhysicalVolume physicalVolume ) throws SQLException { VolumeConfig config = this.volumeManager.getConfig(); String mountPoint = physicalVolume.getMountPoint().getMountPoint(); String url = mountPoint + config.getPathSeparator() + this.spannedVolume.getGuid()+ config.getSqliteFileExtension(); return (SQLiteExecutor) this.volumeManager.getKenusPool().allot(url); } private String getSource(GUID volumeGuid, GUID storageObjectGuid ) throws SQLException { VolumeConfig config = this.volumeManager.getConfig(); GUID physicsVolumeGuid = this.kenVolumeFileSystem.getKVFSPhysicsVolume( volumeGuid ); PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume( physicsVolumeGuid ); String mountPoint = physicalVolume.getMountPoint().getMountPoint(); String url = mountPoint + config.getPathSeparator() + volumeGuid+ config.getSqliteFileExtension(); SQLiteExecutor sqLiteExecutor = (SQLiteExecutor) this.volumeManager.getKenusPool().allot(url); return this.kenVolumeFileSystem.getSimpleStorageObjectSourceName(storageObjectGuid, sqLiteExecutor); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/export/TitanSpannedExportEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.spanned.export; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageExportIORequest; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.ArchExportEntity; import com.pinecone.hydra.storage.volume.entity.SpannedVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; public class TitanSpannedExportEntity64 extends ArchExportEntity implements SpannedExportEntity64{ protected SpannedVolume spannedVolume; protected SpannedExport64 spannedExport; public TitanSpannedExportEntity64(VolumeManager volumeManager, StorageExportIORequest storageExportIORequest, Chanface channel, SpannedVolume spannedVolume) { super(volumeManager, storageExportIORequest, channel); this.spannedVolume = spannedVolume; this.spannedExport = new TitanSpannedExport64( this ); } @Override public StorageIOResponse export() throws UIOException { return this.spannedExport.export(this.channel); } @Override public StorageIOResponse export(Number offset, Number endSize) throws UIOException { return null; } @Override public StorageIOResponse export(CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) { return null; } @Override public SpannedVolume getSpannedVolume() { return this.spannedVolume; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/receive/SpannedReceive.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.spanned.receive; import com.pinecone.hydra.storage.volume.entity.Receiver; public interface SpannedReceive extends Receiver { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/receive/SpannedReceive64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.spanned.receive; public interface SpannedReceive64 extends SpannedReceive { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/receive/SpannedReceiveEntity.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.spanned.receive; import com.pinecone.hydra.storage.volume.entity.ReceiveEntity; import com.pinecone.hydra.storage.volume.entity.SpannedVolume; public interface SpannedReceiveEntity extends ReceiveEntity { SpannedVolume getSpannedVolume(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/receive/SpannedReceiveEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.spanned.receive; public interface SpannedReceiveEntity64 extends SpannedReceiveEntity { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/receive/TitanSpannedReceive64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.spanned.receive; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.rdb.sqlite.SQLiteExecutor; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.RandomAccessChanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.UnifiedTransmitConstructor; import com.pinecone.hydra.storage.volume.VolumeConfig; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.entity.PhysicalVolume; import com.pinecone.hydra.storage.volume.entity.ReceiveEntity; import com.pinecone.hydra.storage.volume.entity.SpannedVolume; import com.pinecone.hydra.storage.volume.entity.Volume; import com.pinecone.hydra.storage.volume.entity.VolumeCapacity64; import com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem; import com.pinecone.hydra.storage.volume.kvfs.OnVolumeFileSystem; import java.io.IOException; import java.sql.SQLException; import java.util.List; public class TitanSpannedReceive64 implements SpannedReceive64{ protected SpannedVolume spannedVolume; protected VolumeManager volumeManager; protected StorageReceiveIORequest storageReceiveIORequest; protected OnVolumeFileSystem kenVolumeFileSystem; public TitanSpannedReceive64( SpannedReceiveEntity64 entity ){ this.spannedVolume = entity.getSpannedVolume(); this.volumeManager = entity.getVolumeManager(); this.storageReceiveIORequest = entity.getReceiveStorageObject(); this.kenVolumeFileSystem = new KenVolumeFileSystem( this.volumeManager ); } @Override public StorageIOResponse receive(Chanface chanface) throws IOException { return this.receiveInternal(chanface, null, null ); } @Override public StorageIOResponse receive(Chanface chanface,Number offset, Number endSize) throws IOException { return this.receiveInternal(chanface, offset, endSize ); } @Override public StorageIOResponse randomReceive(Chanface chanface, Number offset, Number endSize) throws UIOException { return null; } @Override public StorageIOResponse receive(RandomAccessChanface randomAccessChanface) throws IOException { return this.receiveInternal(randomAccessChanface, null, null ); } @Override public StorageIOResponse receive(RandomAccessChanface randomAccessChanface, Number offset, Number endSize) throws IOException { return this.receiveInternal(randomAccessChanface, offset, endSize ); } private long freeSpace(Volume volume ){ VolumeCapacity64 volumeCapacity = volume.getVolumeCapacity(); return volumeCapacity.getDefinitionCapacity() - volumeCapacity.getUsedSize(); } private SQLiteExecutor getSQLiteExecutor( PhysicalVolume physicalVolume ) { VolumeConfig config = this.volumeManager.getConfig(); String mountPoint = physicalVolume.getMountPoint().getMountPoint(); String url = mountPoint + config.getPathSeparator() + this.spannedVolume.getGuid()+ config.getSqliteFileExtension(); return (SQLiteExecutor) this.volumeManager.getKenusPool().allot(url); } private StorageIOResponse receiveInternal(Chanface chanface,Number offset, Number endSize) throws IOException { List volumes = this.spannedVolume.queryChildren(); UnifiedTransmitConstructor constructor = new UnifiedTransmitConstructor(); GUID physicsGuid = this.kenVolumeFileSystem.getKVFSPhysicsVolume( this.spannedVolume.getGuid() ); PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(physicsGuid); SQLiteExecutor sqLiteExecutor = this.getSQLiteExecutor(physicalVolume); int idx = this.kenVolumeFileSystem.hashStorageObjectID(this.storageReceiveIORequest.getStorageObjectGuid(), volumes.size()); //Debug.trace("存储的GUID是:"+storageReceiveIORequest.getStorageObjectGuid()); GUID volumeGuid = null; try { volumeGuid = this.kenVolumeFileSystem.getSpannedIndexTableTargetGuid(sqLiteExecutor, idx); } catch (SQLException e) { throw new UIOException(e); } //Debug.trace( volumeGuid ); LogicVolume targetVolume = this.volumeManager.get(volumeGuid); if (this.freeSpace(targetVolume) < storageReceiveIORequest.getSize().longValue()) { for (LogicVolume volume : volumes) { if (this.freeSpace(volume) > storageReceiveIORequest.getSize().longValue()) { try { this.kenVolumeFileSystem.insertSpanLinkedVolumeTable(sqLiteExecutor, idx, storageReceiveIORequest.getStorageObjectGuid(), volume.getGuid()); } catch (SQLException e) { throw new UIOException(e); } //TitanSimpleReceiveEntity64 receiveEntity = new TitanSimpleReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, this.channel, (SimpleVolume) volume); ReceiveEntity receiveEntity = constructor.getReceiveEntity(volume.getClass(), this.volumeManager, this.storageReceiveIORequest, chanface, volume); return offset == null && endSize == null ? volume.receive( receiveEntity ) : volume.receive( receiveEntity, offset, endSize ); } } } else { //TitanSimpleReceiveEntity64 receiveEntity = new TitanSimpleReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, this.channel, (SimpleVolume) targetVolume); ReceiveEntity receiveEntity = constructor.getReceiveEntity(targetVolume.getClass(), this.volumeManager, this.storageReceiveIORequest, chanface, targetVolume); return offset == null && endSize == null ? targetVolume.receive( receiveEntity ) : targetVolume.receive(receiveEntity, offset, endSize); } return null; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/receive/TitanSpannedReceiveEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.spanned.receive; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.ArchReceiveEntity; import com.pinecone.hydra.storage.volume.entity.SpannedVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; public class TitanSpannedReceiveEntity64 extends ArchReceiveEntity implements SpannedReceiveEntity64{ protected SpannedVolume spannedVolume; protected SpannedReceive64 spannedReceive; public TitanSpannedReceiveEntity64(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel, SpannedVolume spannedVolume) { super(volumeManager, storageReceiveIORequest, channel); this.spannedVolume = spannedVolume; this.spannedReceive = new TitanSpannedReceive64( this ); } @Override public StorageIOResponse receive() throws IOException { return this.spannedReceive.receive(this.channel); } @Override public StorageIOResponse receive(Number offset, Number endSize) throws IOException { return this.spannedReceive.receive(this.channel, offset, endSize ); } @Override public StorageIOResponse randomReceive(Number offset, Number endSize) throws IOException { return this.spannedReceive.randomReceive( this.channel,offset,endSize ); } @Override public StorageIOResponse receive(CacheBlock cacheBlock, byte[] buffer) throws UIOException { return null; } @Override public SpannedVolume getSpannedVolume() { return this.spannedVolume; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/BufferOutMate.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; import com.pinecone.framework.system.prototype.Pinenut; import java.util.concurrent.Semaphore; public class BufferOutMate implements Pinenut { private Semaphore bufferOutLock; private int bufferOutThreadId; public BufferOutMate() { } public BufferOutMate(Semaphore bufferOutLock, int bufferOutThreadId) { this.bufferOutLock = bufferOutLock; this.bufferOutThreadId = bufferOutThreadId; } public Semaphore getBufferOutLock() { return bufferOutLock; } public void setBufferOutLock(Semaphore bufferOutLock) { this.bufferOutLock = bufferOutLock; } public int getBufferOutThreadId() { return bufferOutThreadId; } public void setBufferOutThreadId(int bufferOutThreadId) { this.bufferOutThreadId = bufferOutThreadId; } public String toString() { return "bufferToFileMate{bufferToFileLock = " + bufferOutLock + ", bufferToFileThreadId = " + bufferOutThreadId + "}"; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/BufferOutStatus.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; public enum BufferOutStatus implements StripBufferStatus{ Writing , Suspended , Exiting ; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/BufferWriteStatus.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; public enum BufferWriteStatus implements StripBufferStatus{ Writing , Suspended , Synchronization , Exiting ; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/CacheBlock.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.storage.volume.entity.LogicVolume; public interface CacheBlock extends Pinenut { ; CacheBlockStatus getStatus(); void setStatus( CacheBlockStatus status ); Number getValidByteStart(); void setValidByteStart( Number validByteStart ); Number getValidByteEnd(); void setValidByteEnd( Number validByteEnd ); Number getByteStart(); void setByteStart( Number byteStart ); Number getByteEnd(); void setByteEnd( Number byteEnd ); int getCacheBlockNumber(); void setCacheBlockNumber( int cacheBlockNumber ); long getBufferWriteThreadId(); void setBufferWriteThreadId( long bufferWriteThreadId ); LogicVolume getVolume(); void setVolume( LogicVolume volume ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/CacheBlockStatus.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; public enum CacheBlockStatus { Writing, Free, Full; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/LocalStripedTaskThread.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.storage.volume.runtime.ArchStripedTaskThread; import com.pinecone.hydra.storage.volume.runtime.VolumeJob; import java.util.concurrent.Semaphore; public class LocalStripedTaskThread extends ArchStripedTaskThread { public LocalStripedTaskThread ( String szName, Processum parent, VolumeJob volumeJob ) { super( szName, parent, volumeJob ); volumeJob.applyThread( this ); } StripBufferStatus getJobStatus(){ return this.mVolumeJob.getStatus(); } void setJobStatus( StripBufferStatus status ){ this.mVolumeJob.setStatus( status ); } Semaphore getBlockerLatch(){ return this.mVolumeJob.getBlockerLatch(); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/ReceiveBufferInStatus.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; public enum ReceiveBufferInStatus implements StripBufferStatus{ Writing , Suspended , Exiting ; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/ReceiveBufferOutStatus.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; public enum ReceiveBufferOutStatus implements StripBufferStatus{ Writing , Suspended , Exiting ; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripBufferInJob.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; public interface StripBufferInJob extends StripExportJob { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripBufferOutJob.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; public interface StripBufferOutJob extends StripExportJob { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripBufferStatus.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; import com.pinecone.framework.system.prototype.Pinenut; public interface StripBufferStatus extends Pinenut { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripCacheBlock.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; import com.pinecone.hydra.storage.volume.entity.LogicVolume; public class StripCacheBlock implements CacheBlock{ protected CacheBlockStatus status; protected Number validByteStart; protected Number validByteEnd; protected int cacheBlockNumber; protected Number byteStart; protected Number byteEnd; protected long bufferWriteThreadId; protected LogicVolume volume; public StripCacheBlock( int cacheBlockNumber, Number byteStart, Number byteEnd ){ this.status = CacheBlockStatus.Free; this.byteStart = byteStart; this.byteEnd = byteEnd; this.cacheBlockNumber = cacheBlockNumber; } @Override public CacheBlockStatus getStatus() { return this.status; } @Override public void setStatus(CacheBlockStatus status) { this.status = status; } @Override public Number getValidByteStart() { return this.validByteStart; } @Override public void setValidByteStart(Number validByteStart) { this.validByteStart = validByteStart; } @Override public Number getValidByteEnd() { return this.validByteEnd; } @Override public void setValidByteEnd(Number validByteEnd) { this.validByteEnd = validByteEnd; } @Override public Number getByteStart() { return this.byteStart; } @Override public void setByteStart(Number byteStart) { this.byteStart = byteStart; } @Override public Number getByteEnd() { return this.byteEnd; } @Override public void setByteEnd(Number byteEnd) { this.byteEnd = byteEnd; } @Override public int getCacheBlockNumber() { return this.cacheBlockNumber; } @Override public void setCacheBlockNumber(int cacheBlockNumber) { this.cacheBlockNumber = cacheBlockNumber; } @Override public long getBufferWriteThreadId() { return this.bufferWriteThreadId; } @Override public void setBufferWriteThreadId(long bufferWriteThreadId) { this.bufferWriteThreadId = bufferWriteThreadId; } @Override public LogicVolume getVolume() { return this.volume; } @Override public void setVolume(LogicVolume volume) { this.volume = volume; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripChannelReceiverJob.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; public interface StripChannelReceiverJob extends StripReceiverJob{ } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripExportJob.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; import com.pinecone.hydra.storage.volume.runtime.VolumeJob; public interface StripExportJob extends VolumeJob { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripLockEntity.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; import com.pinecone.framework.system.prototype.Pinenut; import java.util.List; import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; public interface StripLockEntity extends Pinenut { Object getLockObject(); void setLockObject( Object lockObject ); void unlockBufferToFileLock(); Semaphore getBufferToFileLock(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripReceiveBufferInJob.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; public interface StripReceiveBufferInJob extends StripReceiverJob{ } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripReceiveBufferOutJob.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; public interface StripReceiveBufferOutJob extends StripReceiverJob{ } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripReceiverJob.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; import com.pinecone.hydra.storage.volume.runtime.VolumeJob; public interface StripReceiverJob extends VolumeJob { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripTerminalStateRecord.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; public class StripTerminalStateRecord implements TerminalStateRecord { protected int sequentialNumbering; protected Number validByteStart; protected Number validByteEnd; @Override public int getSequentialNumbering() { return this.sequentialNumbering; } @Override public void setSequentialNumbering(int sequentialNumbering) { this.sequentialNumbering = sequentialNumbering; } @Override public Number getValidByteStart() { return this.validByteStart; } @Override public void setValidByteStart(Number validByteStart) { this.validByteStart = validByteStart; } @Override public Number getValidByteEnd() { return this.validByteEnd; } @Override public void setValidByteEnd(Number validByteEnd) { this.validByteEnd = validByteEnd; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/TerminalStateRecord.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; import com.pinecone.framework.system.prototype.Pinenut; public interface TerminalStateRecord extends Pinenut { int getSequentialNumbering(); void setSequentialNumbering( int sequentialNumbering ); Number getValidByteStart(); void setValidByteStart( Number validByteStart ); Number getValidByteEnd(); void setValidByteEnd( Number validByteEnd ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/TitanLocalStripedVolume.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.ulf.rdb.sqlite.SQLiteExecutor; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeConfig; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.ArchLogicVolume; import com.pinecone.hydra.storage.volume.entity.ExporterEntity; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.entity.PhysicalVolume; import com.pinecone.hydra.storage.volume.entity.ReceiveEntity; import com.pinecone.hydra.storage.volume.entity.local.LocalStripedVolume; import com.pinecone.hydra.storage.volume.source.StripedVolumeManipulator; import java.io.IOException; import java.sql.SQLException; import java.util.List; public class TitanLocalStripedVolume extends ArchLogicVolume implements LocalStripedVolume { private StripedVolumeManipulator stripedVolumeManipulator; public TitanLocalStripedVolume(VolumeManager volumeManager, StripedVolumeManipulator stripedVolumeManipulator) { super(volumeManager); this.stripedVolumeManipulator = stripedVolumeManipulator; } public TitanLocalStripedVolume( VolumeManager volumeManager){ super(volumeManager); } public TitanLocalStripedVolume(){ } @Override public void extendLogicalVolume(GUID physicalGuid) { } @Override public List listPhysicalVolume() { return null; } public void setStripedVolumeManipulator(StripedVolumeManipulator stripedVolumeManipulator ){ this.stripedVolumeManipulator = stripedVolumeManipulator; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public void setVolumeTree(VolumeManager volumeManager) { this.volumeManager = volumeManager; } @Override public StorageIOResponse receive(ReceiveEntity entity) throws IOException { return entity.receive(); } @Override public StorageIOResponse receive(ReceiveEntity entity, Number offset, Number endSize) throws IOException { return entity.receive( offset, endSize ); } @Override public StorageIOResponse randomReceive(ReceiveEntity entity, Number offset, Number endSize) { return null; } @Override public StorageIOResponse receive(ReceiveEntity entity, CacheBlock cacheBlock, byte[] buffer) throws IOException { return null; } @Override public StorageIOResponse export(ExporterEntity entity) throws IOException { return entity.export(); } @Override public StorageIOResponse export(ExporterEntity entity, Number offset, Number endSize) { return null; } @Override public StorageIOResponse export(ExporterEntity entity, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) throws UIOException { return entity.export( cacheBlock, offset, endSize, buffer ); } @Override public StorageIOResponse export(ExporterEntity entity, boolean accessRandom) { return null; } @Override public StorageIOResponse export(ExporterEntity entity, Number offset, Number endSize, boolean accessRandom) { return null; } @Override public StorageIOResponse export(ExporterEntity entity, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer, boolean accessRandom) { return null; } @Override public String toString() { return this.toJSONString(); } @Override public boolean existStorageObject(GUID storageObject) throws SQLException { return false; } @Override public void build() throws SQLException { VolumeConfig config = this.volumeManager.getConfig(); PhysicalVolume smallestCapacityPhysicalVolume = this.volumeManager.getSmallestCapacityPhysicalVolume(); String url = smallestCapacityPhysicalVolume.getMountPoint().getMountPoint() + config.getPathSeparator() + this.guid + config.getSqliteFileExtension(); SQLiteExecutor sqLiteExecutor = (SQLiteExecutor) this.volumeManager.getKenusPool().allot(url); this.kenVolumeFileSystem.createStripMetaTable( sqLiteExecutor ); this.volumeManager.put( this ); this.kenVolumeFileSystem.insertSimpleTargetMappingTab( smallestCapacityPhysicalVolume.getGuid(), this.getGuid() ); } @Override public void storageExpansion(GUID volumeGuid) { this.volumeManager.storageExpansion( this.getGuid(), volumeGuid ); LogicVolume logicVolume = this.volumeManager.get(volumeGuid); this.stripedVolumeManipulator.updateDefinitionCapacity(this.guid, logicVolume.getVolumeCapacity().getDefinitionCapacity()); } @Override public void deductCapacity(long deductCapacity) { this.volumeCapacity.setUsedSize( this.volumeCapacity.getUsedSize() + deductCapacity ); this.volumeManager.updateVolumeUsedSize( this.guid, this.volumeCapacity ); } @Override public void increaseCapacity(long increaseCapacity) { this.volumeCapacity.setUsedSize( this.volumeCapacity.getUsedSize() - increaseCapacity ); this.volumeManager.updateVolumeUsedSize( this.guid, this.volumeCapacity ); } @Override public boolean checkCapacity(long size) { long freeSpace = this.volumeCapacity.getDefinitionCapacity() - this.volumeCapacity.getUsedSize(); return freeSpace > size; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/TitanStripBufferInJob.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.volume.UnifiedTransmitConstructor; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.StorageExportIORequest; import com.pinecone.hydra.storage.volume.entity.ExporterEntity; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.export.StripedExport; import com.pinecone.hydra.storage.volume.runtime.MasterVolumeGram; import com.pinecone.hydra.storage.volume.runtime.VolumeJobCompromiseException; import java.io.IOException; import java.util.List; import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; public class TitanStripBufferInJob implements StripBufferInJob { protected VolumeManager volumeManager; protected StorageExportIORequest object; protected int jobCount; protected int jobCode; protected LogicVolume volume; protected Chanface channel; protected AtomicInteger currentCacheBlockNumber; protected final Semaphore blockerLatch; protected StripBufferStatus status; protected List< CacheBlock > cacheBlockGroup; protected LocalStripedTaskThread parentThread; protected byte[] buffer; protected Lock majorStatusIO; protected MasterVolumeGram masterVolumeGram; protected Number offset; protected Number endSize; protected UnifiedTransmitConstructor constructor; public TitanStripBufferInJob(MasterVolumeGram masterVolumeGram, StripedExport stripedExport, LogicVolume volume, StorageExportIORequest object, int jobCode ){ this.masterVolumeGram = masterVolumeGram; this.object = object; this.jobCount = this.masterVolumeGram.getJobCount(); this.jobCode = jobCode; this.volumeManager = stripedExport.getVolumeManager(); this.volume = volume; this.channel = stripedExport.getFileChannel(); this.currentCacheBlockNumber = new AtomicInteger( jobCode ); this.blockerLatch = new Semaphore(0); this.buffer = masterVolumeGram.getBuffer(); this.cacheBlockGroup = masterVolumeGram.getCacheGroup(); this.constructor = new UnifiedTransmitConstructor(); this.intoWritingStatus(); } @Override public void applyThread( LocalStripedTaskThread taskThread ) { this.parentThread = taskThread; this.masterVolumeGram = (MasterVolumeGram) this.parentThread.parentExecutum(); this.majorStatusIO = this.masterVolumeGram.getMajorStatusIO(); } @Override public StripBufferStatus getStatus() { return this.status; } protected void intoWritingStatus() { this.status = BufferWriteStatus.Writing; } protected void intoSuspendedStatus() { this.status = BufferWriteStatus.Suspended; } protected void intoSynchronizationStatus() { this.status = BufferWriteStatus.Synchronization; } protected void intoExitingStatus() { this.status = BufferWriteStatus.Exiting; } @Override public void execute() throws VolumeJobCompromiseException { long size = this.object.getSize().longValue(); long stripSize = this.volumeManager.getConfig().getDefaultStripSize().longValue(); long currentPosition = 0; MasterVolumeGram parentProcess = (MasterVolumeGram)this.parentThread.parentExecutum(); while ( true ){ if( this.cacheBlockGroup.get( currentCacheBlockNumber.get()).getStatus() == CacheBlockStatus.Free){ long bufferSize = stripSize; if( currentPosition >= size ){ this.intoExitingStatus(); this.wakeUpBufferToFileThread(); break; } this.cacheBlockGroup.get( currentCacheBlockNumber.get()).setStatus( CacheBlockStatus.Writing ); if( currentPosition + bufferSize > size ){ bufferSize = size - currentPosition; } try { // this.volume.channelExport( this.object, this.channel, this.cacheBlockGroup.get( currentCacheBlockNumber.get() ), currentPosition, bufferSize, this.buffer); //TitanSimpleExportEntity64 exportEntity = new TitanSimpleExportEntity64( this.volumeManager, this.object, this.channel ); ExporterEntity exportEntity = this.constructor.getExportEntity(this.volume.getClass(), this.volumeManager, this.object, this.channel,this.volume); this.volume.export( exportEntity, this.cacheBlockGroup.get( currentCacheBlockNumber.get() ), currentPosition, bufferSize, this.buffer ); currentPosition += bufferSize; this.wakeUpBufferToFileThread(); // 切换缓存块 this.intoSynchronizationStatus(); this.currentCacheBlockNumber.addAndGet(this.jobCount); if( this.currentCacheBlockNumber.get() > cacheBlockGroup.size() - 1 ){ this.currentCacheBlockNumber.getAndSet( jobCode ); } if( this.cacheBlockGroup.get( this.currentCacheBlockNumber.get() ).getStatus() == CacheBlockStatus.Full ){ try { this.intoSuspendedStatus(); Debug.trace("线程"+this.parentThread.getName()+":"+"我摸鱼了,没得写了"); this.blockerLatch.acquire(); } catch ( InterruptedException e ){ Thread.currentThread().interrupt(); e.printStackTrace(); } } this.intoWritingStatus(); } catch ( IOException e ) { throw new VolumeJobCompromiseException( e ); } } else { try { this.intoSuspendedStatus(); Debug.trace("我摸鱼了,没得写了"); this.wakeUpBufferToFileThread(); this.blockerLatch.acquire(); } catch ( InterruptedException e ){ Thread.currentThread().interrupt(); e.printStackTrace(); } } } Debug.trace("我是线程" + jobCode + "我已经完成任务"); } @Override public Semaphore getBlockerLatch() { return this.blockerLatch; } @Override public void setStatus(StripBufferStatus status) { this.status = status; } private void wakeUpBufferToFileThread(){ this.majorStatusIO.lock(); try { MasterVolumeGram masterVolumeGram = (MasterVolumeGram) this.parentThread.parentExecutum(); LocalStripedTaskThread bufferToFileThread = masterVolumeGram.getChildThread( this.masterVolumeGram.getBufferOutThreadId() ); if( bufferToFileThread.getJobStatus() == BufferOutStatus.Suspended ){ Debug.trace("线程"+bufferToFileThread.getName()+"被唤醒"); bufferToFileThread.setJobStatus( BufferOutStatus.Writing ); this.masterVolumeGram.getBufferOutBlockerLatch().release(); } } finally { this.majorStatusIO.unlock(); } } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/TitanStripBufferOutJob.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.runtime.MasterVolumeGram; import com.pinecone.hydra.storage.volume.runtime.VolumeJobCompromiseException; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicInteger; public class TitanStripBufferOutJob implements StripBufferOutJob { protected VolumeManager volumeManager; protected Chanface channel; protected int jobCount; protected StripBufferStatus status; protected List< CacheBlock > cacheBlocksGroup; protected AtomicInteger currentPosition; protected LocalStripedTaskThread parentThread; protected byte[] mBuffer; protected long totalSize; protected long exportSize; protected final Semaphore mBlockerLatch; protected MasterVolumeGram masterVolumeGram; public TitanStripBufferOutJob(MasterVolumeGram masterVolumeGram, VolumeManager volumeManager, Chanface channel, long totalSize, Semaphore blockerLatch){ this.masterVolumeGram = masterVolumeGram; this.volumeManager = volumeManager; this.channel = channel; this.jobCount = masterVolumeGram.getJobCount(); this.currentPosition = new AtomicInteger(0); this.cacheBlocksGroup = masterVolumeGram.getCacheGroup(); this.mBuffer = masterVolumeGram.getBuffer(); this.totalSize = totalSize; this.mBlockerLatch = blockerLatch; // this.masterVolumeGram.applyBufferOutBlockerLatch( this.mBlockerLatch ); } @Override public void applyThread(LocalStripedTaskThread thread) { this.parentThread = thread; } @Override public StripBufferStatus getStatus() { return this.status; } protected void setWritingStatus() { this.status = BufferOutStatus.Writing; } protected void setSuspendedStatus() { this.status = BufferOutStatus.Suspended; } protected void setExitingStatus() { this.status = BufferOutStatus.Exiting; } @Override public void execute() throws VolumeJobCompromiseException { while( true ){ if( this.exportSize >= this.totalSize ){ this.setExitingStatus(); this.masterVolumeGram.getMajorJobFuture().complete( true ); return; } if( !this.isAllExiting() ){ try{ Debug.trace("摸鱼罗"); this.setSuspendedStatus(); this.mBlockerLatch.acquire(); } catch ( InterruptedException e ) { Thread.currentThread().interrupt(); this.masterVolumeGram.getMajorJobFuture().completeExceptionally( e ); break; } } List writableCacheBlocks = this.getWritableCacheBlocks(); // Debug.trace("准备干活"); if (!writableCacheBlocks.isEmpty()){ Debug.trace("执行写入"); //ByteBuffer buffer = this.mergeArrays( writableCacheBlocks ); //ByteBuffer writeBuffer = ByteBuffer.wrap(buffer, 0, buffer.length ); try { //this.channel.write(buffer); int write = this.channel.write(this.mBuffer, writableCacheBlocks); this.exportSize += write; } catch ( IOException e ) { this.masterVolumeGram.getMajorJobFuture().completeExceptionally( e ); break; } //Arrays.fill(this.mBuffer, (byte) 0); this.updateCurrentPosition( writableCacheBlocks.size() ); this.setSuspendedStatus(); //唤醒所有缓存线程 //this.lockEntity.unlockPipeStage(); for ( int i = 0; i < jobCount; ++i ){ CacheBlock cacheBlock = this.cacheBlocksGroup.get(i); MasterVolumeGram masterVolumeGram = (MasterVolumeGram) this.parentThread.parentExecutum(); LocalStripedTaskThread bufferWriteThread = masterVolumeGram.getChildThread(cacheBlock.getBufferWriteThreadId()); //当文件较小时,只有一个线程在执行写入且一次执行完就结束线程,可能会导致thread为null的情况 if( bufferWriteThread != null ){ StripBufferStatus jobStatus = bufferWriteThread.getJobStatus(); if( jobStatus == BufferWriteStatus.Suspended ){ bufferWriteThread.setJobStatus( BufferWriteStatus.Writing ); Semaphore jobLock = bufferWriteThread.getBlockerLatch(); Debug.trace("线程"+bufferWriteThread.getName()+"被唤醒"); jobLock.release(); } } } } } this.masterVolumeGram.getMajorJobFuture().complete( false ); //Debug.warnSyn( "wangwang" ); } @Override public Semaphore getBlockerLatch() { return this.mBlockerLatch; } @Override public void setStatus(StripBufferStatus status) { this.status = status; } // private int getCacheLength(){ // int rounds = 0; // int length = 0; // for( int i = this.currentPosition.get(); i < this.cacheBlocksGroup.size(); i++ ){ // if( i == currentPosition.get() && rounds == 1 ){ // break; // } // // CacheBlock cacheBlock = cacheBlocksGroup.get(i); // if( cacheBlock.getStatus() != CacheBlockStatus.Full){ // return length; // } // length++; // if( i == this.cacheBlocksGroup.size() - 1 ){ // rounds++; // i = -1; // } // } // return length; // } // // private ByteBuffer mergeArrays( List< CacheBlock > writableCacheBlocks ){ // // 计算所有缓存块的总长度 // int totalLength = 0; // for (CacheBlock cacheBlock : writableCacheBlocks) { // totalLength += cacheBlock.getValidByteEnd().intValue() - cacheBlock.getValidByteStart().intValue(); // } // // // 创建一个 ByteBuffer 来存储合并的数据 // ByteBuffer mergedBuffer = ByteBuffer.allocate(totalLength); // // // 将数据从 mBuffer 复制到 mergedBuffer // for (CacheBlock cacheBlock : writableCacheBlocks) { // int start = cacheBlock.getValidByteStart().intValue(); // int end = cacheBlock.getValidByteEnd().intValue(); // int bufferSize = end - start; // // // 将 mBuffer 中的数据复制到 mergedBuffer // mergedBuffer.put(mBuffer, start, bufferSize); // // // 将缓存块状态设置为 Free // cacheBlock.setStatus(CacheBlockStatus.Free); // } // this.exportSize += totalLength; // // 准备将 mergedBuffer 用于读取 // mergedBuffer.flip(); // return mergedBuffer; // } private List< CacheBlock > getWritableCacheBlocks(){ ArrayList cacheBlocks = new ArrayList<>(); int rounds = 0; for( int i = this.currentPosition.get(); i < this.cacheBlocksGroup.size(); i++ ){ if( i == currentPosition.get() && rounds == 1 ){ break; } CacheBlock cacheBlock = cacheBlocksGroup.get(i); if( cacheBlock.getStatus() != CacheBlockStatus.Full){ break; } cacheBlocks.add( cacheBlock ); if( i == this.cacheBlocksGroup.size() - 1 ){ rounds++; i = -1; } } return cacheBlocks; } private void updateCurrentPosition( int length ){ for( int i= 0; i < length; i++ ){ int incremented = this.currentPosition.incrementAndGet(); if( incremented == cacheBlocksGroup.size() ){ this.currentPosition.getAndSet( 0 ); } } } private boolean isAllExiting(){ for( int i = 0; i < jobCount; ++i ){ CacheBlock cacheBlock = this.cacheBlocksGroup.get(i); MasterVolumeGram masterVolumeGram = (MasterVolumeGram) this.parentThread.parentExecutum(); LocalStripedTaskThread bufferWriteThread = masterVolumeGram.getChildThread(cacheBlock.getBufferWriteThreadId()); if( bufferWriteThread == null ){ return false; } StripBufferStatus jobStatus = bufferWriteThread.getJobStatus(); if( jobStatus != BufferWriteStatus.Exiting ){ return false; } } return true; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/TitanStripLockEntity.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; import java.util.concurrent.Semaphore; public class TitanStripLockEntity implements StripLockEntity{ private Semaphore bufferToFileLock; private Object lockObject; public TitanStripLockEntity(){} public TitanStripLockEntity( Object lockObject, Semaphore bufferToFileLock ){ this.lockObject = lockObject; this.bufferToFileLock = bufferToFileLock; } @Override public Object getLockObject() { return this.lockObject; } @Override public void setLockObject(Object lockObject) { this.lockObject = lockObject; } @Override public Semaphore getBufferToFileLock() { return this.bufferToFileLock; } @Override public void unlockBufferToFileLock() { this.bufferToFileLock.release(); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/TitanStripReceiveBufferInJob.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.runtime.MasterVolumeGram; import com.pinecone.hydra.storage.volume.runtime.VolumeJobCompromiseException; import java.io.IOException; import java.util.concurrent.Semaphore; import java.util.concurrent.locks.Lock; public class TitanStripReceiveBufferInJob implements StripReceiveBufferInJob{ protected MasterVolumeGram masterVolumeGram; protected byte[] buffer; protected int jobCode; protected CacheBlock cacheBlock; protected StripBufferStatus status; protected Chanface stream; protected final Semaphore blockerLatch; protected LocalStripedTaskThread parentThread; protected Lock majorStatusIO; public TitanStripReceiveBufferInJob(MasterVolumeGram masterVolumeGram, int jobCode, Chanface stream, LogicVolume volume){ this.masterVolumeGram = masterVolumeGram; this.buffer = this.masterVolumeGram.getBuffer(); this.jobCode = jobCode; this.cacheBlock = this.masterVolumeGram.getCacheGroup().get( jobCode ); this.status = ReceiveBufferInStatus.Suspended; this.stream = stream; this.blockerLatch = new Semaphore(0); this.cacheBlock.setVolume( volume ); } @Override public void execute() throws VolumeJobCompromiseException { while( true ){ try { if( this.status == ReceiveBufferInStatus.Exiting ){ this.masterVolumeGram.majorJobCountDown(); break; } if( this.masterVolumeGram.getCurrentBufferInJobCode() == this.jobCode ){ Debug.trace("我是缓存线程我开始工作了"); this.status = ReceiveBufferInStatus.Writing; this.cacheBlock.setStatus( CacheBlockStatus.Writing ); int start = this.cacheBlock.getByteStart().intValue(); int end = this.cacheBlock.getByteEnd().intValue(); int length = end - start; int read = this.stream.read(this.buffer, this.cacheBlock.getByteStart().intValue(), length); this.cacheBlock.setValidByteStart( start ); this.cacheBlock.setValidByteEnd( start + read ); this.status = ReceiveBufferInStatus.Suspended; this.cacheBlock.setStatus( CacheBlockStatus.Full ); LocalStripedTaskThread bufferOutThread = this.masterVolumeGram.getChildThread(this.masterVolumeGram.getBufferOutThreadId()); //检测缓存写出线程的状态为摸鱼状态则唤醒 if( bufferOutThread.getJobStatus() == ReceiveBufferOutStatus.Suspended ){ this.masterVolumeGram.getBufferOutBlockerLatch().release(); } //如果下一个线程不在工作则唤醒 int nextJobCode = this.jobCode+1; if( nextJobCode >= this.masterVolumeGram.getJobCount() ){ nextJobCode = 0; } CacheBlock nextCacheBlock = this.masterVolumeGram.getCacheGroup().get(nextJobCode); LocalStripedTaskThread nextThread = this.masterVolumeGram.getChildThread(nextCacheBlock.getBufferWriteThreadId()); if( nextThread.getJobStatus() == ReceiveBufferInStatus.Suspended && nextJobCode != this.jobCode ){ nextThread.getBlockerLatch().release(); } } Debug.trace("我休息了"); this.blockerLatch.acquire(); } catch (IOException | InterruptedException e) { throw new RuntimeException(e); } } } @Override public void applyThread(LocalStripedTaskThread thread) { this.parentThread = thread; this.majorStatusIO = this.masterVolumeGram.getMajorStatusIO(); } @Override public StripBufferStatus getStatus() { return this.status; } @Override public Semaphore getBlockerLatch() { return this.blockerLatch; } @Override public void setStatus(StripBufferStatus status) { this.status = status; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/TitanStripReceiveBufferOutJob.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.rdb.MappedExecutor; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.volume.UnifiedTransmitConstructor; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.entity.ReceiveEntity; import com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem; import com.pinecone.hydra.storage.volume.kvfs.OnVolumeFileSystem; import com.pinecone.hydra.storage.volume.runtime.MasterVolumeGram; import com.pinecone.hydra.storage.volume.runtime.VolumeJobCompromiseException; import java.io.IOException; import java.sql.SQLException; import java.util.List; import java.util.concurrent.Semaphore; public class TitanStripReceiveBufferOutJob implements StripReceiveBufferOutJob{ protected MasterVolumeGram masterVolumeGram; protected byte[] buffer; protected StripBufferStatus status; protected Chanface stream; protected final Semaphore blockerLatch; protected List< CacheBlock > cacheBlocksGroup; protected LocalStripedTaskThread parentThread; protected VolumeManager volumeManager; protected long totalSize; protected long exportSize; protected StorageReceiveIORequest request; protected OnVolumeFileSystem kenVolumeFileSystem; protected MappedExecutor executor; protected UnifiedTransmitConstructor constructor; public TitanStripReceiveBufferOutJob(MasterVolumeGram masterVolumeGram, VolumeManager volumeManager, Chanface stream, StorageReceiveIORequest request, MappedExecutor executor ){ this.masterVolumeGram = masterVolumeGram; this.stream = stream; this.totalSize = request.getSize().longValue(); this.volumeManager = volumeManager; this.blockerLatch = new Semaphore(0); this.masterVolumeGram.applyBufferOutBlockerLatch( this.blockerLatch ); this.exportSize = 0; this.cacheBlocksGroup = this.masterVolumeGram.getCacheGroup(); this.status = ReceiveBufferOutStatus.Suspended; this.request = request; this.buffer = masterVolumeGram.getBuffer(); this.kenVolumeFileSystem = new KenVolumeFileSystem( this.volumeManager ); this.executor = executor; } @Override public void execute() throws VolumeJobCompromiseException { while( true ){ try { Debug.trace("我摸鱼了"); this.blockerLatch.acquire(); if( exportSize >= totalSize ){ this.status = ReceiveBufferOutStatus.Exiting; for( CacheBlock cacheBlock : cacheBlocksGroup ){ LocalStripedTaskThread bufferInThread = this.masterVolumeGram.getChildThread(cacheBlock.getBufferWriteThreadId()); bufferInThread.setJobStatus( ReceiveBufferInStatus.Exiting ); bufferInThread.getBlockerLatch().release(); } break; } Debug.trace("开始上班"); this.status = ReceiveBufferOutStatus.Writing; CacheBlock currentCacheBlock = this.cacheBlocksGroup.get(this.masterVolumeGram.getCurrentBufferInJobCode()); int start = currentCacheBlock.getValidByteStart().intValue(); int end = currentCacheBlock.getValidByteEnd().intValue(); // todo应该使用适配器,现在默认底层是simpleVolume // TitanSimpleStreamReceiveEntity64 entity = new TitanSimpleStreamReceiveEntity64( this.volumeManager,this.request, this.stream, (SimpleVolume) currentCacheBlock.getVolume() ); ReceiveEntity entity = this.constructor.getReceiveEntity(currentCacheBlock.getVolume().getClass(), this.volumeManager, request, this.stream, currentCacheBlock.getVolume()); StorageIOResponse response = currentCacheBlock.getVolume().receive(entity, currentCacheBlock, this.buffer); this.status = ReceiveBufferOutStatus.Suspended; if( !this.isExist() ){ LogicVolume currentVolume = this.cacheBlocksGroup.get(this.masterVolumeGram.getCurrentBufferInJobCode()).getVolume(); this.kenVolumeFileSystem.insertStripMetaTable( this.executor, this.masterVolumeGram.getCurrentBufferInJobCode(), currentVolume.getGuid(), this.request.getStorageObjectGuid(), response.getSourceName() ); } this.exportSize += ( end - start ); this.masterVolumeGram.setCurrentBufferInJobCode( this.masterVolumeGram.getCurrentBufferInJobCode() + 1 ); if( this.masterVolumeGram.getCurrentBufferInJobCode() >= this.masterVolumeGram.getJobCount() ){ this.masterVolumeGram.setCurrentBufferInJobCode( 0 ); } //唤醒所有线程 for( CacheBlock cacheBlock : cacheBlocksGroup ){ LocalStripedTaskThread bufferInThread = this.masterVolumeGram.getChildThread(cacheBlock.getBufferWriteThreadId()); if( bufferInThread.getJobStatus() == ReceiveBufferInStatus.Suspended ){ bufferInThread.getBlockerLatch().release(); } } } catch (SQLException | IOException | InterruptedException e) { throw new RuntimeException(e); } } } @Override public void applyThread(LocalStripedTaskThread thread) { this.parentThread = thread; } @Override public StripBufferStatus getStatus() { return this.status; } @Override public Semaphore getBlockerLatch() { return this.blockerLatch; } @Override public void setStatus(StripBufferStatus status) { this.status = status; } boolean isExist( ) throws SQLException { LogicVolume currentVolume = this.cacheBlocksGroup.get(this.masterVolumeGram.getCurrentBufferInJobCode()).getVolume(); return this.kenVolumeFileSystem.isExistStripMetaTable(this.executor, currentVolume.getGuid(), this.request.getStorageObjectGuid()); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/TitanStripReceiverJob.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.util.rdb.MappedExecutor; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.volume.UnifiedTransmitConstructor; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.entity.ReceiveEntity; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem; import com.pinecone.hydra.storage.volume.kvfs.OnVolumeFileSystem; import com.pinecone.hydra.storage.volume.runtime.MasterVolumeGram; import java.io.IOException; import java.sql.SQLException; import java.util.concurrent.Semaphore; public class TitanStripReceiverJob implements StripChannelReceiverJob{ private MasterVolumeGram masterVolumeGram; private LogicVolume volume; private int jobCount; private int jobCode; private VolumeManager volumeManager; private StorageReceiveIORequest object; private Chanface chanface; private OnVolumeFileSystem kenVolumeFileSystem; private MappedExecutor executor; private StorageIOResponse storageIOResponse; private Number offset; private Number endSize; private UnifiedTransmitConstructor constructor; public TitanStripReceiverJob(MasterVolumeGram masterVolumeGram,ReceiveEntity entity, Chanface channel, int jobCount, int jobCode, LogicVolume volume, MappedExecutor executor, Number offset, Number ednSize ){ this.masterVolumeGram = masterVolumeGram; this.volumeManager = entity.getVolumeManager(); this.object = entity.getReceiveStorageObject(); this.chanface = channel; this.jobCount = jobCount; this.jobCode = jobCode; this.volume = volume; this.kenVolumeFileSystem = new KenVolumeFileSystem( this.volumeManager ); this.executor = executor; this.offset = offset; this.endSize = ednSize; this.constructor = new UnifiedTransmitConstructor(); } @Override public void execute() { //每次计算要保存的部分 long size = this.endSize.longValue(); long stripSize = this.volumeManager.getConfig().getDefaultStripSize().longValue(); long currentPosition = jobCode * stripSize + this.offset.longValue(); while( true ){ long bufferSize = stripSize; if( currentPosition >= size ){ this.masterVolumeGram.majorJobCountDown(); break; } if( currentPosition + bufferSize > size ){ bufferSize = size - currentPosition; } try { // this.storageIOResponse = this.volume.channelReceive(this.object, this.fileChannel, currentPosition, bufferSize); // TitanSimpleReceiveEntity64 receiveEntity = new TitanSimpleReceiveEntity64( this.volumeManager, this.object, this.fileChannel, (SimpleVolume) volume); ReceiveEntity receiveEntity = this.constructor.getReceiveEntity(this.volume.getClass(), this.volumeManager, this.object, this.chanface, volume); this.storageIOResponse = this.volume.receive( receiveEntity, currentPosition, bufferSize ); } catch (IOException e) { e.printStackTrace(); throw new RuntimeException(e); } currentPosition += bufferSize * jobCount; } try { if( this.storageIOResponse != null ){ this.kenVolumeFileSystem.insertStripMetaTable( executor, jobCode, volume.getGuid(), this.object.getStorageObjectGuid(), this.storageIOResponse.getSourceName() ); } //this.kenVolumeFileSystem.insertKVFSFileStripTable( executor, jobCode, volume.getGuid(), this.object.getStorageObjectGuid(), this.storageIOResponse.getSourceName() ); } catch (SQLException e) { throw new ProxyProvokeHandleException(e); } } @Override public void applyThread(LocalStripedTaskThread thread) { } @Override public StripBufferStatus getStatus() { return null; } @Override public Semaphore getBlockerLatch() { return null; } @Override public void setStatus(StripBufferStatus status) { } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/StripedExport.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.export; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.RandomAccessChanface; import com.pinecone.hydra.storage.StorageExportIORequest; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.Exporter; import com.pinecone.hydra.storage.volume.entity.StripedVolume; public interface StripedExport extends Exporter { StorageIOResponse export(Chanface chanface) throws UIOException; StorageIOResponse export( Chanface chanface,Number offset, Number endSize ) throws UIOException; StorageIOResponse export(RandomAccessChanface randomAccessChanface) throws UIOException; StorageIOResponse export( RandomAccessChanface randomAccessChanface,Number offset, Number endSize ) throws UIOException; VolumeManager getVolumeManager(); StorageExportIORequest getStorageIORequest(); Chanface getFileChannel(); StripedVolume getStripedVolume(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/StripedExport64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.export; public interface StripedExport64 extends StripedExport { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/StripedExportEntity.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.export; import com.pinecone.hydra.storage.volume.entity.ExporterEntity; import com.pinecone.hydra.storage.volume.entity.StripedVolume; public interface StripedExportEntity extends ExporterEntity { StripedVolume getStripedVolume(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/StripedExportEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.export; public interface StripedExportEntity64 extends StripedExportEntity { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/TitanStripedExport64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.export; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.system.executum.Processum; import com.pinecone.ulf.rdb.sqlite.SQLiteExecutor; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.RandomAccessChanface; import com.pinecone.hydra.storage.StorageExportIORequest; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.TitanStorageExportIORequest; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.entity.StripedVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.LocalStripedTaskThread; import com.pinecone.hydra.storage.volume.entity.local.striped.TitanStripBufferInJob; import com.pinecone.hydra.storage.volume.entity.local.striped.TitanStripBufferOutJob; import com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem; import com.pinecone.hydra.storage.volume.kvfs.OnVolumeFileSystem; import com.pinecone.hydra.storage.volume.runtime.MasterVolumeGram; import com.pinecone.hydra.system.Hydrogen; import java.io.File; import java.sql.SQLException; import java.util.List; import java.util.concurrent.ExecutionException; import java.util.concurrent.Semaphore; public class TitanStripedExport64 implements StripedExport64{ protected VolumeManager volumeManager; protected StorageExportIORequest storageExportIORequest; protected Chanface channel; protected StripedVolume stripedVolume; protected OnVolumeFileSystem kenVolumeFileSystem; public TitanStripedExport64( StripedExportEntity64 entity ){ this.volumeManager = entity.getVolumeManager(); this.storageExportIORequest = entity.getStorageIORequest(); this.channel = entity.getChannel(); this.stripedVolume = entity.getStripedVolume(); this.kenVolumeFileSystem = new KenVolumeFileSystem( this.volumeManager ); } @Override public StorageIOResponse export(Chanface chanface) throws UIOException { //初始化参数 List volumes = this.stripedVolume.queryChildren(); int jobCount = volumes.size(); int StripResidentCacheAllotRatio = volumeManager.getConfig().getStripResidentCacheAllotRatio(); Processum supProc = null; MasterVolumeGram masterVolumeGram = null; try { SQLiteExecutor sqLiteExecutor = this.stripedVolume.getSQLiteExecutor(); supProc = this.volumeManager.getSuperiorProcess(); masterVolumeGram = this.createMasterVolumeGram(supProc,jobCount,StripResidentCacheAllotRatio); // 创建文件写入线程 createBufferOutJob( masterVolumeGram, this.storageExportIORequest.getSize().longValue()); // 处理每个卷的线程 createAndStartVolumeThreads(volumes, sqLiteExecutor, masterVolumeGram ); } catch (SQLException e) { throw new UIOException(e); } // 同步等待任务完成并处理异常 this.waitForTaskCompletion(masterVolumeGram); //masterVolumeGram.majorJobCountDownLatchWait(); supProc.getTaskManager().erase(masterVolumeGram); return null; } @Override public StorageIOResponse export(Chanface chanface, Number offset, Number endSize) throws UIOException { //初始化参数 List volumes = this.stripedVolume.queryChildren(); int jobCount = volumes.size(); int StripResidentCacheAllotRatio = volumeManager.getConfig().getStripResidentCacheAllotRatio(); Hydrogen hydrogen = null; MasterVolumeGram masterVolumeGram = null; try { SQLiteExecutor sqLiteExecutor = this.stripedVolume.getSQLiteExecutor(); hydrogen = this.volumeManager.getHydrogen(); masterVolumeGram = this.createMasterVolumeGram(hydrogen,jobCount,StripResidentCacheAllotRatio); // 创建文件写入线程 createBufferOutJob( masterVolumeGram, this.storageExportIORequest.getSize().longValue()); // 处理每个卷的线程 createAndStartVolumeThreads(volumes, sqLiteExecutor, masterVolumeGram ); } catch (SQLException e) { throw new UIOException(e); } // 同步等待任务完成并处理异常 this.waitForTaskCompletion(masterVolumeGram); //masterVolumeGram.majorJobCountDownLatchWait(); hydrogen.getTaskManager().erase(masterVolumeGram); return null; } @Override public StorageIOResponse export(RandomAccessChanface randomAccessChanface) throws UIOException { return null; } @Override public StorageIOResponse export(RandomAccessChanface randomAccessChanface, Number offset, Number endSize) throws UIOException { return null; } private MasterVolumeGram createMasterVolumeGram(Processum supProcess, int jobCount, int StripResidentCacheAllotRatio ) { Number stripSize = this.volumeManager.getConfig().getDefaultStripSize(); MasterVolumeGram masterVolumeGram = new MasterVolumeGram(this.stripedVolume.getGuid().toString(), supProcess,jobCount, StripResidentCacheAllotRatio, stripSize.intValue()); supProcess.getTaskManager().add(masterVolumeGram); return masterVolumeGram; } private void createBufferOutJob(MasterVolumeGram masterVolumeGram, long totalSize) { Semaphore BufferOutLock = new Semaphore(0); TitanStripBufferOutJob BufferOutJob = new TitanStripBufferOutJob(masterVolumeGram,this.volumeManager, this.channel,totalSize, BufferOutLock ); LocalStripedTaskThread BufferOutThread = new LocalStripedTaskThread("BufferOut", masterVolumeGram, BufferOutJob); masterVolumeGram.getTaskManager().add(BufferOutThread); BufferOutThread.start(); masterVolumeGram.applyBufferOutBlockerLatch( BufferOutLock ); masterVolumeGram.applyBufferOutThreadId( BufferOutThread.getExecutumId() ); } private void createAndStartVolumeThreads(List volumes, SQLiteExecutor sqLiteExecutor, MasterVolumeGram masterVolumeGram) throws SQLException { for ( LogicVolume volume : volumes ) { String sourceName = this.kenVolumeFileSystem.getStripMetaSourceName(sqLiteExecutor, volume.getGuid(), this.storageExportIORequest.getStorageObjectGuid()); if ( sourceName == null ){ continue; } int code = this.kenVolumeFileSystem.getStripMetaCode(sqLiteExecutor, volume.getGuid(), this.storageExportIORequest.getStorageObjectGuid()); File file = new File(sourceName); StorageExportIORequest titanStorageExportIORequest = new TitanStorageExportIORequest(); titanStorageExportIORequest.setStorageObjectGuid( this.storageExportIORequest.getStorageObjectGuid() ); titanStorageExportIORequest.setSourceName(sourceName); titanStorageExportIORequest.setSize(file.length()); TitanStripBufferInJob exportJob = new TitanStripBufferInJob(masterVolumeGram,this, volume, titanStorageExportIORequest,code); LocalStripedTaskThread taskThread = new LocalStripedTaskThread(this.stripedVolume.getName() + code, masterVolumeGram, exportJob); for( int i = code; i < masterVolumeGram.getCacheGroup().size(); i += masterVolumeGram.getJobCount() ){ masterVolumeGram.getCacheGroup().get( i ).setBufferWriteThreadId( taskThread.getExecutumId() ); } masterVolumeGram.getTaskManager().add(taskThread); taskThread.start(); } } private void waitForTaskCompletion(MasterVolumeGram masterVolumeGram) throws ProxyProvokeHandleException { // try { // masterVolumeGram.getTaskManager().syncWaitingTerminated(); // } // catch (Exception e) { // throw new ProxyProvokeHandleException(e); // } try{ Object ret = masterVolumeGram.getMajorJobFuture().get(); if ( ret instanceof Exception ) { throw new ProxyProvokeHandleException( (Exception) ret ); } if ( !(Boolean) ret ) { throw new IllegalStateException( "Buffer-To-File thread has been returned `false`, which is expected `true`." ); } } catch ( InterruptedException | ExecutionException e ) { throw new ProxyProvokeHandleException( e ); } } @Override public VolumeManager getVolumeManager() { return this.volumeManager; } @Override public StorageExportIORequest getStorageIORequest() { return this.storageExportIORequest; } @Override public Chanface getFileChannel() { return this.channel; } @Override public StripedVolume getStripedVolume() { return this.stripedVolume; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/TitanStripedExportEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.export; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageExportIORequest; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.ArchExportEntity; import com.pinecone.hydra.storage.volume.entity.StripedVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; public class TitanStripedExportEntity64 extends ArchExportEntity implements StripedExportEntity64 { protected StripedVolume stripedVolume; protected StripedExport64 stripedExport; public TitanStripedExportEntity64(VolumeManager volumeManager, StorageExportIORequest storageExportIORequest, Chanface channel, StripedVolume stripedVolume) { super(volumeManager, storageExportIORequest, channel); this.stripedVolume = stripedVolume; this.stripedExport = new TitanStripedExport64( this ); } @Override public StorageIOResponse export() throws UIOException { return this.stripedExport.export(this.channel); } @Override public StorageIOResponse export(Number offset, Number endSize) throws UIOException { return null; } @Override public StorageIOResponse export(CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) { return null; } @Override public StripedVolume getStripedVolume() { return this.stripedVolume; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/channel/StripedChannelExport.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.export.channel; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.StorageExportIORequest; import com.pinecone.hydra.storage.volume.entity.Exporter; import com.pinecone.hydra.storage.volume.entity.StripedVolume; public interface StripedChannelExport extends Exporter { StorageIOResponse export() throws UIOException; StorageIOResponse export( Number offset, Number endSize ) throws UIOException; VolumeManager getVolumeManager(); StorageExportIORequest getStorageIORequest(); Chanface getFileChannel(); StripedVolume getStripedVolume(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/channel/StripedChannelExport64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.export.channel; public interface StripedChannelExport64 extends StripedChannelExport{ } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/channel/StripedChannelExportEntity.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.export.channel; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.volume.entity.ExporterEntity; import com.pinecone.hydra.storage.volume.entity.StripedVolume; public interface StripedChannelExportEntity extends ExporterEntity { Chanface getChannel(); void setChannel( Chanface channel ); StripedVolume getStripedVolume(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/channel/StripedChannelExportEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.export.channel; public interface StripedChannelExportEntity64 extends StripedChannelExportEntity{ } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/channel/TitanStripedChannelExport64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.export.channel; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.ulf.rdb.sqlite.SQLiteExecutor; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.StorageExportIORequest; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.entity.StripedVolume; import com.pinecone.hydra.storage.TitanStorageExportIORequest; import com.pinecone.hydra.storage.volume.entity.local.striped.LocalStripedTaskThread; import com.pinecone.hydra.storage.volume.entity.local.striped.TitanStripBufferOutJob; import com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem; import com.pinecone.hydra.storage.volume.kvfs.OnVolumeFileSystem; import com.pinecone.hydra.storage.volume.runtime.MasterVolumeGram; import java.io.File; import java.sql.SQLException; import java.util.List; import java.util.concurrent.Semaphore; public class TitanStripedChannelExport64 implements StripedChannelExport64{ private VolumeManager volumeManager; private StorageExportIORequest storageExportIORequest; private Chanface channel; private StripedVolume stripedVolume; private OnVolumeFileSystem kenVolumeFileSystem; public TitanStripedChannelExport64(StripedChannelExportEntity entity){ this.volumeManager = entity.getVolumeManager(); this.storageExportIORequest = entity.getStorageIORequest(); this.channel = entity.getChannel(); this.stripedVolume = entity.getStripedVolume(); this.kenVolumeFileSystem = new KenVolumeFileSystem( this.volumeManager ); } @Override public StorageIOResponse export() throws UIOException { //初始化参数 List volumes = this.stripedVolume.queryChildren(); int jobCount = volumes.size(); int StripResidentCacheAllotRatio = volumeManager.getConfig().getStripResidentCacheAllotRatio(); MasterVolumeGram masterVolumeGram = null; try { SQLiteExecutor sqLiteExecutor = this.stripedVolume.getSQLiteExecutor(); Hydrogen hydrogen = this.volumeManager.getHydrogen(); masterVolumeGram = this.createMasterVolumeGram(hydrogen,jobCount,StripResidentCacheAllotRatio); // 创建文件写入线程 createBufferOutJob( masterVolumeGram, this.storageExportIORequest.getSize().longValue()); // 处理每个卷的线程 createAndStartVolumeThreads(volumes, sqLiteExecutor, masterVolumeGram ); } catch (SQLException e) { throw new UIOException(e); } // 同步等待任务完成并处理异常 this.waitForTaskCompletion(masterVolumeGram); return null; } @Override public StorageIOResponse export(Number offset, Number endSize) throws UIOException { //初始化参数 List volumes = this.stripedVolume.queryChildren(); int jobCount = volumes.size(); int StripResidentCacheAllotRatio = volumeManager.getConfig().getStripResidentCacheAllotRatio(); MasterVolumeGram masterVolumeGram = null; try { SQLiteExecutor sqLiteExecutor = this.stripedVolume.getSQLiteExecutor(); Hydrogen hydrogen = this.volumeManager.getHydrogen(); masterVolumeGram = this.createMasterVolumeGram(hydrogen,jobCount,StripResidentCacheAllotRatio); // 创建文件写入线程 createBufferOutJob( masterVolumeGram, this.storageExportIORequest.getSize().longValue()); // 处理每个卷的线程 createAndStartVolumeThreads(volumes, sqLiteExecutor, masterVolumeGram ); } catch (SQLException e) { throw new UIOException(e); } // 同步等待任务完成并处理异常 this.waitForTaskCompletion(masterVolumeGram); return null; } private MasterVolumeGram createMasterVolumeGram(Hydrogen hydrogen, int jobCount, int StripResidentCacheAllotRatio ) { Number stripSize = this.volumeManager.getConfig().getDefaultStripSize(); MasterVolumeGram masterVolumeGram = new MasterVolumeGram(this.stripedVolume.getGuid().toString(), hydrogen,jobCount, StripResidentCacheAllotRatio, stripSize.intValue()); hydrogen.getTaskManager().add(masterVolumeGram); return masterVolumeGram; } private void createBufferOutJob(MasterVolumeGram masterVolumeGram, long totalSize) { Semaphore BufferOutLock = new Semaphore(0); TitanStripBufferOutJob BufferOutJob = new TitanStripBufferOutJob(masterVolumeGram,this.volumeManager, this.channel,totalSize, BufferOutLock ); LocalStripedTaskThread BufferOutThread = new LocalStripedTaskThread("BufferOut", masterVolumeGram, BufferOutJob); masterVolumeGram.getTaskManager().add(BufferOutThread); BufferOutThread.start(); masterVolumeGram.applyBufferOutBlockerLatch( BufferOutLock ); masterVolumeGram.applyBufferOutThreadId( BufferOutThread.getExecutumId() ); } private void createAndStartVolumeThreads(List volumes, SQLiteExecutor sqLiteExecutor, MasterVolumeGram masterVolumeGram) throws SQLException { for ( LogicVolume volume : volumes ) { String sourceName = this.kenVolumeFileSystem.getStripMetaSourceName(sqLiteExecutor, volume.getGuid(), this.storageExportIORequest.getStorageObjectGuid()); if ( sourceName == null ){ continue; } int code = this.kenVolumeFileSystem.getStripMetaCode(sqLiteExecutor, volume.getGuid(), this.storageExportIORequest.getStorageObjectGuid()); File file = new File(sourceName); StorageExportIORequest titanStorageExportIORequest = new TitanStorageExportIORequest(); titanStorageExportIORequest.setStorageObjectGuid( this.storageExportIORequest.getStorageObjectGuid() ); titanStorageExportIORequest.setSourceName(sourceName); titanStorageExportIORequest.setSize(file.length()); // TitanStripBufferInJob exportJob = new TitanStripBufferInJob(masterVolumeGram,this, volume, titanStorageExportIORequest,code); // LocalStripedTaskThread taskThread = new LocalStripedTaskThread(this.stripedVolume.getName() + code, masterVolumeGram, exportJob); // for( int i = code; i < masterVolumeGram.getCacheGroup().size(); i += masterVolumeGram.getJobCount() ){ // masterVolumeGram.getCacheGroup().get( i ).setBufferWriteThreadId( taskThread.getId() ); // } // masterVolumeGram.getTaskManager().add(taskThread); // taskThread.start(); } } private void waitForTaskCompletion(MasterVolumeGram masterVolumeGram) throws ProxyProvokeHandleException { try { masterVolumeGram.getTaskManager().syncWaitingTerminated(); } catch (Exception e) { throw new ProxyProvokeHandleException(e); } } @Override public VolumeManager getVolumeManager() { return this.volumeManager; } @Override public StorageExportIORequest getStorageIORequest() { return this.storageExportIORequest; } @Override public Chanface getFileChannel() { return this.channel; } @Override public StripedVolume getStripedVolume() { return this.stripedVolume; } private byte[] initializationBuffer(int jobCount, int bufferSize, int StripResidentCacheAllotRatio ){ return new byte[jobCount * bufferSize * StripResidentCacheAllotRatio]; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/channel/TitanStripedChannelExportEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.export.channel; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.ArchExportEntity; import com.pinecone.hydra.storage.StorageExportIORequest; import com.pinecone.hydra.storage.volume.entity.StripedVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; public class TitanStripedChannelExportEntity64 extends ArchExportEntity implements StripedChannelExportEntity64{ private Chanface channel; private StripedChannelExport64 stripedChannelExport64; private StripedVolume stripedVolume; public TitanStripedChannelExportEntity64(VolumeManager volumeManager, StorageExportIORequest storageExportIORequest, Chanface channel, StripedVolume stripedVolume) { super(volumeManager, storageExportIORequest,null); this.channel = channel; this.stripedVolume = stripedVolume; this.stripedChannelExport64 = new TitanStripedChannelExport64( this ); } @Override public Chanface getChannel() { return this.channel; } @Override public void setChannel(Chanface channel) { this.channel = channel; } @Override public StorageIOResponse export() throws UIOException { return this.stripedChannelExport64.export(); } @Override public StorageIOResponse export(Number offset, Number endSize) throws UIOException { return null; } @Override public StorageIOResponse export(CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) { return null; } @Override public StripedVolume getStripedVolume() { return this.stripedVolume; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/StripedReceive.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.receive; import com.pinecone.hydra.storage.volume.entity.Receiver; public interface StripedReceive extends Receiver { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/StripedReceive64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.receive; public interface StripedReceive64 extends StripedReceive { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/StripedReceiveEntity.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.receive; import com.pinecone.hydra.storage.volume.entity.ReceiveEntity; import com.pinecone.hydra.storage.volume.entity.StripedVolume; public interface StripedReceiveEntity extends ReceiveEntity { StripedVolume getStripedVolume(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/StripedReceiveEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.receive; public interface StripedReceiveEntity64 extends StripedReceiveEntity { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/StripedReceiver.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.receive; import com.pinecone.hydra.storage.volume.entity.Receiver; public interface StripedReceiver extends Receiver { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/StripedReceiverEntity.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.receive; import com.pinecone.hydra.storage.volume.entity.ReceiveEntity; public interface StripedReceiverEntity extends ReceiveEntity { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/TitanStripedReceive64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.receive; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.rdb.MappedExecutor; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.RandomAccessChanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeConfig; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.entity.PhysicalVolume; import com.pinecone.hydra.storage.volume.entity.ReceiveEntity; import com.pinecone.hydra.storage.volume.entity.StripedVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import com.pinecone.hydra.storage.volume.entity.local.striped.LocalStripedTaskThread; import com.pinecone.hydra.storage.volume.entity.local.striped.TitanStripReceiveBufferInJob; import com.pinecone.hydra.storage.volume.entity.local.striped.TitanStripReceiveBufferOutJob; import com.pinecone.hydra.storage.volume.entity.local.striped.TitanStripReceiverJob; import com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem; import com.pinecone.hydra.storage.volume.kvfs.OnVolumeFileSystem; import com.pinecone.hydra.storage.volume.runtime.MasterVolumeGram; import com.pinecone.hydra.system.Hydrogen; import java.sql.SQLException; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Semaphore; public class TitanStripedReceive64 implements StripedReceive64{ protected VolumeManager volumeManager; protected StorageReceiveIORequest storageReceiveIORequest; protected StripedVolume stripedVolume; protected ReceiveEntity entity; protected OnVolumeFileSystem kenVolumeFileSystem; protected MappedExecutor mappedExecutor; public TitanStripedReceive64( StripedReceiveEntity64 entity ){ this.volumeManager = entity.getVolumeManager(); this.storageReceiveIORequest = entity.getReceiveStorageObject(); this.kenVolumeFileSystem = new KenVolumeFileSystem( this.volumeManager ); this.stripedVolume = entity.getStripedVolume(); this.entity = entity; try { this.mappedExecutor = this.getExecutor(); } catch (SQLException e) { throw new RuntimeException(e); } } @Override public StorageIOResponse receive(Chanface chanface) throws UIOException { Hydrogen hydrogen = this.volumeManager.getHydrogen(); MasterVolumeGram masterVolumeGram = new MasterVolumeGram( this.stripedVolume.getGuid().toString(), hydrogen); hydrogen.getTaskManager().add( masterVolumeGram ); List volumes = this.stripedVolume.queryChildren(); int index = 0; masterVolumeGram.setMajorJobCountDownNum( volumes.size() ); for( LogicVolume volume : volumes ){ TitanStripReceiverJob receiverJob = new TitanStripReceiverJob(masterVolumeGram, this.entity, chanface, volumes.size(), index, volume, mappedExecutor, 0, this.entity.getReceiveStorageObject().getSize() ); LocalStripedTaskThread taskThread = new LocalStripedTaskThread( this.stripedVolume.getName() + index, masterVolumeGram, receiverJob ); masterVolumeGram.getTaskManager().add( taskThread ); taskThread.start(); index ++; } // this.waitForTaskCompletion( masterVolumeGram ); // masterVolumeGram.kill(); masterVolumeGram.majorJobCountDownLatchWait(); return null; } @Override public StorageIOResponse receive(Chanface chanface,Number offset, Number endSize) throws UIOException { Hydrogen hydrogen = this.volumeManager.getHydrogen(); MasterVolumeGram masterVolumeGram = new MasterVolumeGram( this.stripedVolume.getGuid().toString(), hydrogen); hydrogen.getTaskManager().add( masterVolumeGram ); List volumes = this.stripedVolume.queryChildren(); int index = 0; masterVolumeGram.setMajorJobCountDownNum( volumes.size() ); for( LogicVolume volume : volumes ){ TitanStripReceiverJob receiverJob = new TitanStripReceiverJob(masterVolumeGram, this.entity, chanface, volumes.size(), index, volume, this.mappedExecutor, offset, offset.longValue()+endSize.longValue() ); LocalStripedTaskThread taskThread = new LocalStripedTaskThread( this.stripedVolume.getName() + index, masterVolumeGram, receiverJob ); masterVolumeGram.getTaskManager().add( taskThread ); taskThread.start(); index ++; } // this.waitForTaskCompletion( masterVolumeGram ); // masterVolumeGram.kill(); masterVolumeGram.majorJobCountDownLatchWait(); return null; } @Override public StorageIOResponse randomReceive(Chanface chanface, Number offset, Number endSize) { return null; } @Override public StorageIOResponse receive(RandomAccessChanface randomAccessChanface) throws UIOException { Hydrogen hydrogen = this.volumeManager.getHydrogen(); List volumes = this.stripedVolume.queryChildren(); MasterVolumeGram masterVolumeGram = new MasterVolumeGram( this.stripedVolume.getGuid().toString(), hydrogen, volumes.size(), 1, this.volumeManager.getConfig().getDefaultStripSize().intValue() ); hydrogen.getTaskManager().add( masterVolumeGram ); MappedExecutor executor = null; try { executor = this.getExecutor(); } catch (SQLException e) { throw new UIOException(e); } TitanStripReceiveBufferOutJob bufferOutJob = new TitanStripReceiveBufferOutJob( masterVolumeGram, this.volumeManager, randomAccessChanface, this.storageReceiveIORequest, executor ); LocalStripedTaskThread taskThread = new LocalStripedTaskThread( "bufferOut",masterVolumeGram, bufferOutJob ); masterVolumeGram.getTaskManager().add( taskThread ); masterVolumeGram.applyBufferOutThreadId( taskThread.getExecutumId() ); taskThread.start(); int index = 0; masterVolumeGram.setMajorJobCountDownNum( volumes.size() ); for( LogicVolume volume : volumes ){ TitanStripReceiveBufferInJob bufferInJob = new TitanStripReceiveBufferInJob( masterVolumeGram, index,randomAccessChanface,volume ); LocalStripedTaskThread bufferInThread = new LocalStripedTaskThread(volume.getName(), masterVolumeGram, bufferInJob); masterVolumeGram.getTaskManager().add( bufferInThread ); CacheBlock cacheBlock = masterVolumeGram.getCacheGroup().get(index); cacheBlock.setBufferWriteThreadId( bufferInThread.getExecutumId() ); bufferInThread.start(); index++; } // this.waitForTaskCompletion( masterVolumeGram ); masterVolumeGram.majorJobCountDownLatchWait(); return null; } @Override public StorageIOResponse receive(RandomAccessChanface randomAccessChanface, Number offset, Number endSize) throws UIOException { return null; } private MappedExecutor getExecutor() throws SQLException { VolumeConfig config = this.volumeManager.getConfig(); GUID physicsVolumeGuid = this.kenVolumeFileSystem.getKVFSPhysicsVolume(this.stripedVolume.getGuid()); PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(physicsVolumeGuid); String url = physicalVolume.getMountPoint().getMountPoint()+ config.getPathSeparator() +this.stripedVolume.getGuid()+ config.getSqliteFileExtension(); return this.volumeManager.getKenusPool().allot(url); } private void waitForTaskCompletion(MasterVolumeGram masterVolumeGram) throws ProxyProvokeHandleException { Semaphore semaphore = new Semaphore(0); //semaphore.a CountDownLatch latch = new CountDownLatch(10); latch.countDown(); try{ latch.await(); } catch ( InterruptedException e ) { Thread.currentThread().interrupt(); throw new ProxyProvokeHandleException( e ); } // try { // masterVolumeGram.getTaskManager().syncWaitingTerminated(); // } // catch (Exception e) { // throw new ProxyProvokeHandleException(e); // } } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/TitanStripedReceiveEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.receive; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.ArchReceiveEntity; import com.pinecone.hydra.storage.volume.entity.StripedVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; public class TitanStripedReceiveEntity64 extends ArchReceiveEntity implements StripedReceiveEntity64{ protected StripedVolume stripedVolume; protected StripedReceive64 stripedReceive; protected Chanface chanface; public TitanStripedReceiveEntity64(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel, StripedVolume stripedVolume) { super(volumeManager, storageReceiveIORequest, channel); this.stripedVolume = stripedVolume; this.stripedReceive = new TitanStripedReceive64( this ); this.chanface = channel; } @Override public StorageIOResponse receive() throws IOException { return this.stripedReceive.receive(this.chanface); } @Override public StorageIOResponse receive(Number offset, Number endSize) throws IOException { return this.stripedReceive.receive( this.chanface, offset, endSize ); } @Override public StorageIOResponse randomReceive(Number offset, Number endSize) throws IOException { return null; } @Override public StorageIOResponse receive(CacheBlock cacheBlock, byte[] buffer) throws UIOException { return null; } @Override public StripedVolume getStripedVolume() { return this.stripedVolume; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/channnel/StripedChannelReceiver.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.receive.channnel; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.entity.local.striped.receive.StripedReceiver; public interface StripedChannelReceiver extends StripedReceiver { StorageIOResponse channelReceive( ) throws UIOException; StorageIOResponse channelReceive(Number offset, Number endSize) throws UIOException; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/channnel/StripedChannelReceiver64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.receive.channnel; public interface StripedChannelReceiver64 extends StripedChannelReceiver{ } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/channnel/StripedChannelReceiverEntity.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.receive.channnel; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.volume.entity.StripedVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.receive.StripedReceiverEntity; public interface StripedChannelReceiverEntity extends StripedReceiverEntity { Chanface getChannel(); void setChannel( Chanface channel ); StripedVolume getStripedVolume(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/channnel/StripedChannelReceiverEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.receive.channnel; public interface StripedChannelReceiverEntity64 extends StripedChannelReceiverEntity{ } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/channnel/TitanStripedChannelReceiver64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.receive.channnel; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.rdb.MappedExecutor; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.ulf.rdb.sqlite.SQLiteHost; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.RandomAccessChanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeConfig; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.entity.PhysicalVolume; import com.pinecone.hydra.storage.volume.entity.ReceiveEntity; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.volume.entity.StripedVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.LocalStripedTaskThread; import com.pinecone.hydra.storage.volume.entity.local.striped.TitanStripReceiverJob; import com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem; import com.pinecone.hydra.storage.volume.kvfs.OnVolumeFileSystem; import com.pinecone.hydra.storage.volume.runtime.MasterVolumeGram; import java.sql.SQLException; import java.util.List; public class TitanStripedChannelReceiver64 implements StripedChannelReceiver64{ private Chanface fileChannel; private VolumeManager volumeManager; private StorageReceiveIORequest storageReceiveIORequest; private StripedVolume stripedVolume; private ReceiveEntity entity; private OnVolumeFileSystem kenVolumeFileSystem; private SQLiteHost mSqLiteHost; public TitanStripedChannelReceiver64( StripedChannelReceiverEntity entity ){ this.entity = entity; this.fileChannel = entity.getChannel(); this.volumeManager = entity.getVolumeManager(); this.storageReceiveIORequest = entity.getReceiveStorageObject(); this.stripedVolume = entity.getStripedVolume(); this.kenVolumeFileSystem = new KenVolumeFileSystem( this.volumeManager ); } @Override public StorageIOResponse channelReceive() throws UIOException { Hydrogen hydrogen = this.volumeManager.getHydrogen(); MasterVolumeGram masterVolumeGram = new MasterVolumeGram( this.stripedVolume.getGuid().toString(), hydrogen); hydrogen.getTaskManager().add( masterVolumeGram ); List volumes = this.stripedVolume.queryChildren(); try { MappedExecutor sqLiteExecutor = this.getExecutor(); int index = 0; for( LogicVolume volume : volumes ){ TitanStripReceiverJob receiverJob = new TitanStripReceiverJob(masterVolumeGram, this.entity, this.fileChannel, volumes.size(), index, volume, sqLiteExecutor, 0, this.entity.getReceiveStorageObject().getSize() ); LocalStripedTaskThread taskThread = new LocalStripedTaskThread( this.stripedVolume.getName() + index, masterVolumeGram, receiverJob ); masterVolumeGram.getTaskManager().add( taskThread ); taskThread.start(); index ++; } this.mSqLiteHost.close(); } catch (SQLException e) { throw new UIOException(e); } this.waitForTaskCompletion( masterVolumeGram ); return null; } @Override public StorageIOResponse channelReceive(Number offset, Number endSize) throws UIOException { Hydrogen hydrogen = this.volumeManager.getHydrogen(); MasterVolumeGram masterVolumeGram = new MasterVolumeGram( this.stripedVolume.getGuid().toString(), hydrogen); hydrogen.getTaskManager().add( masterVolumeGram ); List volumes = this.stripedVolume.queryChildren(); MappedExecutor sqLiteExecutor = null; try { sqLiteExecutor = this.getExecutor(); } catch (SQLException e) { throw new UIOException(e); } int index = 0; for( LogicVolume volume : volumes ){ TitanStripReceiverJob receiverJob = new TitanStripReceiverJob(masterVolumeGram, this.entity, this.fileChannel, volumes.size(), index, volume, sqLiteExecutor, offset, offset.longValue()+endSize.longValue() ); LocalStripedTaskThread taskThread = new LocalStripedTaskThread( this.stripedVolume.getName() + index, masterVolumeGram, receiverJob ); masterVolumeGram.getTaskManager().add( taskThread ); taskThread.start(); index ++; } this.waitForTaskCompletion( masterVolumeGram ); return null; } private MappedExecutor getExecutor() throws SQLException { VolumeConfig config = this.volumeManager.getConfig(); GUID physicsVolumeGuid = this.kenVolumeFileSystem.getKVFSPhysicsVolume(this.stripedVolume.getGuid()); PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(physicsVolumeGuid); String url = physicalVolume.getMountPoint().getMountPoint()+ config.getPathSeparator() +this.stripedVolume.getGuid()+ config.getSqliteFileExtension(); return this.volumeManager.getKenusPool().allot(url); } private void waitForTaskCompletion(MasterVolumeGram masterVolumeGram) throws ProxyProvokeHandleException { try { masterVolumeGram.getTaskManager().syncWaitingTerminated(); } catch (Exception e) { throw new ProxyProvokeHandleException(e); } } // @Override // public StorageIOResponse receive() throws UIOException { // return null; // } // // @Override // public StorageIOResponse receive(Number offset, Number endSize) throws UIOException { // return null; // } @Override public StorageIOResponse receive(Chanface chanface) throws UIOException { return null; } @Override public StorageIOResponse receive(Chanface chanface, Number offset, Number endSize) throws UIOException { return null; } @Override public StorageIOResponse randomReceive(Chanface chanface, Number offset, Number endSize) { return null; } @Override public StorageIOResponse receive(RandomAccessChanface randomAccessChanface) throws UIOException { return null; } @Override public StorageIOResponse receive(RandomAccessChanface randomAccessChanface, Number offset, Number endSize) throws UIOException { return null; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/channnel/TitanStripedChannelReceiverEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.receive.channnel; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.ArchReceiveEntity; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.volume.entity.StripedVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; public class TitanStripedChannelReceiverEntity64 extends ArchReceiveEntity implements StripedChannelReceiverEntity64{ private Chanface channel; private StripedVolume stripedVolume; private StripedChannelReceiver64 stripedChannelReceiver64; public TitanStripedChannelReceiverEntity64(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel, StripedVolume stripedVolume) { super(volumeManager, storageReceiveIORequest,null); this.channel = channel; this.stripedVolume = stripedVolume; this.stripedChannelReceiver64 = new TitanStripedChannelReceiver64( this ); } @Override public Chanface getChannel() { return this.channel; } @Override public void setChannel(Chanface channel) { this.channel = channel; } @Override public StripedVolume getStripedVolume() { return this.stripedVolume; } @Override public StorageIOResponse receive() throws UIOException { return this.stripedChannelReceiver64.channelReceive(); } @Override public StorageIOResponse receive(Number offset, Number endSize) throws UIOException { return this.stripedChannelReceiver64.channelReceive( offset, endSize ); } @Override public StorageIOResponse randomReceive(Number offset, Number endSize) throws IOException { return null; } @Override public StorageIOResponse receive(CacheBlock cacheBlock, byte[] buffer) throws UIOException { return null; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/stream/StripedStreamReceive.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.receive.stream; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.entity.local.striped.receive.StripedReceiver; public interface StripedStreamReceive extends StripedReceiver { StorageIOResponse streamReceive( ) throws UIOException; StorageIOResponse streamReceive( Number offset, Number endSize ) throws UIOException; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/stream/StripedStreamReceive64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.receive.stream; public interface StripedStreamReceive64 extends StripedStreamReceive{ } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/stream/StripedStreamReceiveEntity.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.receive.stream; import com.pinecone.hydra.storage.volume.entity.StripedVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.receive.StripedReceiverEntity; import java.io.InputStream; public interface StripedStreamReceiveEntity extends StripedReceiverEntity { InputStream getStream(); void setStream( InputStream stream ); StripedVolume getStripedVolume(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/stream/StripedStreamReceiveEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.receive.stream; public interface StripedStreamReceiveEntity64 extends StripedStreamReceiveEntity{ } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/stream/TitanStripedStreamReceive64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.receive.stream; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.rdb.MappedExecutor; import com.pinecone.ulf.rdb.sqlite.SQLiteHost; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.RandomAccessChanface; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeConfig; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.PhysicalVolume; import com.pinecone.hydra.storage.volume.entity.ReceiveEntity; import com.pinecone.hydra.storage.volume.entity.StripedVolume; import com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem; import com.pinecone.hydra.storage.volume.kvfs.OnVolumeFileSystem; import com.pinecone.hydra.storage.volume.runtime.MasterVolumeGram; import java.io.InputStream; import java.sql.SQLException; public class TitanStripedStreamReceive64 implements StripedStreamReceive64{ protected InputStream stream; protected VolumeManager volumeManager; protected StorageReceiveIORequest storageReceiveIORequest; protected StripedVolume stripedVolume; protected ReceiveEntity entity; protected OnVolumeFileSystem kenVolumeFileSystem; protected SQLiteHost mSqLiteHost; public TitanStripedStreamReceive64( StripedStreamReceiveEntity entity ){ this.entity = entity; this.stream = entity.getStream(); this.volumeManager = entity.getVolumeManager(); this.storageReceiveIORequest = entity.getReceiveStorageObject(); this.kenVolumeFileSystem = new KenVolumeFileSystem( this.volumeManager ); this.stripedVolume = entity.getStripedVolume(); } @Override public StorageIOResponse streamReceive() throws UIOException { // Hydrogen hydrogen = this.volumeManager.getHydrogen(); // List volumes = this.stripedVolume.queryChildren(); // MasterVolumeGram masterVolumeGram = new MasterVolumeGram( this.stripedVolume.getGuid().toString(), hydrogen, volumes.size(), 1, this.volumeManager.getConfig().getDefaultStripSize().intValue() ); // hydrogen.getTaskManager().add( masterVolumeGram ); // MappedExecutor executor = this.getExecutor(); // // TitanStripReceiveBufferOutJob bufferOutJob = new TitanStripReceiveBufferOutJob( masterVolumeGram, this.volumeManager, this.stream, this.storageReceiveIORequest, executor ); // LocalStripedTaskThread taskThread = new LocalStripedTaskThread( "bufferOut",masterVolumeGram, bufferOutJob ); // masterVolumeGram.getTaskManager().add( taskThread ); // masterVolumeGram.applyBufferOutThreadId( taskThread.getId() ); // taskThread.start(); // // int index = 0; // for( LogicVolume volume : volumes ){ // TitanStripReceiveBufferInJob bufferInJob = new TitanStripReceiveBufferInJob( masterVolumeGram, index,this.stream,volume ); // LocalStripedTaskThread bufferInThread = new LocalStripedTaskThread(volume.getName(), masterVolumeGram, bufferInJob); // masterVolumeGram.getTaskManager().add( bufferInThread ); // CacheBlock cacheBlock = masterVolumeGram.getCacheGroup().get(index); // cacheBlock.setBufferWriteThreadId( bufferInThread.getId() ); // bufferInThread.start(); // index++; // } // // this.waitForTaskCompletion( masterVolumeGram ); return null; } @Override public StorageIOResponse streamReceive(Number offset, Number endSize) throws UIOException { return null; } private MappedExecutor getExecutor() throws SQLException { VolumeConfig config = this.volumeManager.getConfig(); GUID physicsVolumeGuid = this.kenVolumeFileSystem.getKVFSPhysicsVolume(this.stripedVolume.getGuid()); PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(physicsVolumeGuid); String url = physicalVolume.getMountPoint().getMountPoint()+ config.getPathSeparator() +this.stripedVolume.getGuid()+ config.getSqliteFileExtension(); return this.volumeManager.getKenusPool().allot(url); } private void waitForTaskCompletion(MasterVolumeGram masterVolumeGram) throws ProxyProvokeHandleException { try { masterVolumeGram.getTaskManager().syncWaitingTerminated(); } catch (Exception e) { throw new ProxyProvokeHandleException(e); } } // @Override // public StorageIOResponse receive() throws UIOException { // return null; // } // // @Override // public StorageIOResponse receive(Number offset, Number endSize) throws UIOException { // return null; // } @Override public StorageIOResponse receive(Chanface chanface) throws UIOException { return null; } @Override public StorageIOResponse receive(Chanface chanface, Number offset, Number endSize) throws UIOException { return null; } @Override public StorageIOResponse randomReceive(Chanface chanface, Number offset, Number endSize) { return null; } @Override public StorageIOResponse receive(RandomAccessChanface randomAccessChanface) throws UIOException { return null; } @Override public StorageIOResponse receive(RandomAccessChanface randomAccessChanface, Number offset, Number endSize) throws UIOException { return null; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/stream/TitanStripedStreamReceiveEntity64.java ================================================ package com.pinecone.hydra.storage.volume.entity.local.striped.receive.stream; import com.pinecone.hydra.storage.StorageIOResponse; import com.pinecone.hydra.storage.StorageReceiveIORequest; import com.pinecone.hydra.storage.io.UIOException; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.ArchReceiveEntity; import com.pinecone.hydra.storage.volume.entity.StripedVolume; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.io.IOException; import java.io.InputStream; public class TitanStripedStreamReceiveEntity64 extends ArchReceiveEntity implements StripedStreamReceiveEntity64{ protected InputStream stream; protected StripedVolume stripedVolume; protected StripedStreamReceive64 streamReceive; public TitanStripedStreamReceiveEntity64(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, InputStream stream, StripedVolume stripedVolume) { super(volumeManager, storageReceiveIORequest,null); this.stream = stream; this.stripedVolume = stripedVolume; this.streamReceive = new TitanStripedStreamReceive64( this ); } @Override public StorageIOResponse receive() throws UIOException { return this.streamReceive.streamReceive(); } @Override public StorageIOResponse receive(Number offset, Number endSize) throws UIOException { return this.streamReceive.streamReceive( offset, endSize ); } @Override public StorageIOResponse randomReceive(Number offset, Number endSize) throws IOException { return null; } @Override public StorageIOResponse receive(CacheBlock cacheBlock, byte[] buffer) throws UIOException { return null; } @Override public InputStream getStream() { return this.stream; } @Override public void setStream(InputStream stream) { this.stream = stream; } @Override public StripedVolume getStripedVolume() { return this.stripedVolume; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/kvfs/ExecutorPool.java ================================================ package com.pinecone.hydra.storage.volume.kvfs; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.rdb.MappedExecutor; public interface ExecutorPool extends Pinenut { MappedExecutor allot( String name ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/kvfs/KenVolumeFileSystem.java ================================================ package com.pinecone.hydra.storage.volume.kvfs; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.rdb.MappedExecutor; import com.pinecone.framework.util.rdb.ResultSession; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.source.SQLiteVolumeManipulator; import com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator; import com.pinecone.ulf.util.guid.GUIDs; import java.sql.ResultSet; import java.sql.SQLException; public class KenVolumeFileSystem implements OnVolumeFileSystem { private VolumeManager volumeManager; private VolumeMasterManipulator volumeMasterManipulator; private SQLiteVolumeManipulator sqLiteVolumeManipulator; public KenVolumeFileSystem( VolumeManager volumeManager ){ this.volumeManager = volumeManager; this.volumeMasterManipulator = volumeManager.getMasterManipulator(); this.sqLiteVolumeManipulator = this.volumeMasterManipulator.getSQLiteVolumeManipulator(); } @Override public GUID getKVFSPhysicsVolume(GUID volumeGuid) { return this.sqLiteVolumeManipulator.getPhysicsGuid(volumeGuid); } @Override public void insertSimpleTargetMappingTab( GUID physicsGuid, GUID volumeGuid ) { this.sqLiteVolumeManipulator.insert( physicsGuid, volumeGuid ); } @Override public void createSimpleTargetMappingTab( MappedExecutor mappedExecutor ) throws SQLException { mappedExecutor.execute( "CREATE TABLE `kvfs_simple_target_mapping`( `id` INTEGER PRIMARY KEY AUTOINCREMENT, `storage_object_guid` VARCHAR(36) , `storage_object_name` VARCHAR(36), `source_name` VARCHAR(330) );", false ); } @Override public void removeSimpleTargetMappingTab(GUID storageObjectGuid, MappedExecutor mappedExecutor) throws SQLException { mappedExecutor.execute("DELETE FROM `kvfs_simple_target_mapping` WHERE `storage_object_guid` = '" + storageObjectGuid + "'", false); } @Override public void insertSimpleTargetMappingSoloRecord( GUID storageObjectGuid, String storageObjectName, String sourceName, MappedExecutor mappedExecutor ) throws SQLException { mappedExecutor.execute( "INSERT INTO `kvfs_simple_target_mapping` ( `storage_object_guid` , `storage_object_name` , `source_name` ) VALUES ( '"+ storageObjectGuid+ "', '"+storageObjectName+"', '"+sourceName+"' )", false ); } @Override public String getSimpleStorageObjectSourceName(GUID storageObjectGuid, MappedExecutor mappedExecutor) throws SQLException { ResultSession query = mappedExecutor.query("SELECT `source_name` FROM `kvfs_simple_target_mapping` WHERE `storage_object_guid` = '" + storageObjectGuid + "' "); ResultSet resultSet = query.getResultSet(); if( resultSet.next() ){ return resultSet.getString("source_name"); } return null; } @Override public boolean existStorageObject(MappedExecutor mappedExecutor, GUID storageObjectGuid) throws SQLException { ResultSession query = mappedExecutor.query(" SELECT COUNT(*) FROM `kvfs_simple_target_mapping` WHERE `storage_object_guid` = '" + storageObjectGuid + "' "); ResultSet resultSet = query.getResultSet(); if( resultSet.next() ){ int count = resultSet.getInt(1); return count != 0; } return false; } @Override public int hashStorageObjectID( GUID keyGuid, int volumeNum ) { int hash = (keyGuid.hashCode() ^ 137) % volumeNum; // TODO! CONST hash = (hash ^ (hash >> 31)) - (hash >> 31); return hash; } @Override public void createSpannedIndexTable(MappedExecutor mappedExecutor) throws SQLException { mappedExecutor.execute( "CREATE TABLE `kvfs_span_volume_index`( `id` INTEGER PRIMARY KEY AUTOINCREMENT, `hash_key` int , `target_volume_guid` VARCHAR(36));", false ); } @Override public void insertSpannedIndexTable(MappedExecutor mappedExecutor, int hashKey, GUID targetVolumeGuid) throws SQLException { mappedExecutor.execute( "INSERT INTO `kvfs_span_volume_index` ( `hash_key`, `target_volume_guid` ) VALUES ( "+hashKey+", '"+targetVolumeGuid+"' )", false ); } @Override public GUID getSpannedIndexTableTargetGuid(MappedExecutor mappedExecutor, int hashKey) throws SQLException { ResultSession query = mappedExecutor.query("SELECT `target_volume_guid` FROM `kvfs_span_volume_index` WHERE `hash_key` = " + hashKey + " "); ResultSet resultSet = query.getResultSet(); if ( resultSet.next() ){ String targetVolumeGuid = resultSet.getString("target_volume_guid"); return GUIDs.GUID128( targetVolumeGuid ); } return null; } @Override public void creatSpanLinkedVolumeTable(MappedExecutor mappedExecutor) throws SQLException { mappedExecutor.execute( "CREATE TABLE `kvfs_span_linked_volume`( `id` INTEGER PRIMARY KEY AUTOINCREMENT, `hash_key` int , `key_guid` VARCHAR(36), `target_volume_guid` VARCHAR(36)) ;", false ); } @Override public void insertSpanLinkedVolumeTable(MappedExecutor mappedExecutor, int hashKey, GUID keyGuid, GUID targetVolumeGuid) throws SQLException { mappedExecutor.execute( "INSERT INTO `kvfs_span_linked_volume` ( `hash_key`, `key_guid`, `target_volume_guid` ) VALUES ( "+hashKey+", '"+keyGuid+"', '"+targetVolumeGuid+"' )", false ); } @Override public GUID getSpanLinkedVolumeTableTargetGuid(MappedExecutor mappedExecutor, GUID keyGuid) throws SQLException { ResultSession query = mappedExecutor.query("SELECT `target_volume_guid` FROM `kvfs_span_linked_volume` WHERE `key_guid` = '" + keyGuid + "' "); ResultSet resultSet = query.getResultSet(); if ( resultSet.next() ){ String targetVolumeGuid = resultSet.getString("target_volume_guid"); return GUIDs.GUID128( targetVolumeGuid ); } return null; } @Override public void createStripMetaTable(MappedExecutor mappedExecutor) throws SQLException { mappedExecutor.execute( "CREATE TABLE `kvfs_strip_meta`( `id` INTEGER PRIMARY KEY AUTOINCREMENT, `code` int , `volume_guid` VARCHAR(36), `storage_object_guid` VARCHAR(36), `source_name` TEXT) ;", false ); } @Override public void insertStripMetaTable(MappedExecutor mappedExecutor, int code, GUID volumeGuid, GUID storageObjectGuid, String sourceName) throws SQLException { mappedExecutor.execute( "INSERT INTO `kvfs_strip_meta` ( `code`, `volume_guid`, `storage_object_guid`, `source_name` ) VALUES ( "+code+", '"+volumeGuid+"', '"+storageObjectGuid+"', '"+sourceName+"' )", false ); } @Override public void removeStripMetaTable(GUID storageGuid, MappedExecutor mappedExecutor) throws SQLException { mappedExecutor.execute( "DELETE FROM `kvfs_strip_meta` WHERE `storage_object_guid` = '" + storageGuid + "'", false ); } @Override public String getStripMetaSourceName(MappedExecutor mappedExecutor, GUID volumeGuid, GUID storageObjectGuid) throws SQLException { ResultSession query = mappedExecutor.query("SELECT `source_name` FROM `kvfs_strip_meta` WHERE `volume_guid` = '" + volumeGuid + "' AND `storage_object_guid` = '" + storageObjectGuid + "' "); ResultSet resultSet = query.getResultSet(); if ( resultSet.next() ){ return resultSet.getString("source_name"); } return null; } @Override public int getStripMetaCode(MappedExecutor mappedExecutor, GUID volumeGuid, GUID storageObjectGuid) throws SQLException { ResultSession query = mappedExecutor.query("SELECT `code` FROM `kvfs_strip_meta` WHERE `volume_guid` = '" + volumeGuid + "' AND `storage_object_guid` = '" + storageObjectGuid + "' "); ResultSet resultSet = query.getResultSet(); if ( resultSet.next() ){ return resultSet.getInt("code"); } return 0; } @Override public boolean isExistStripMetaTable(MappedExecutor mappedExecutor, GUID volumeGuid, GUID storageObjectGuid) throws SQLException { ResultSession query = mappedExecutor.query("SELECT COUNT(*) FROM `kvfs_strip_meta` WHERE `volume_guid` = '" + volumeGuid + "' AND `storage_object_guid` = '" + storageObjectGuid + "' "); ResultSet resultSet = query.getResultSet(); if( resultSet.next() ){ int count = resultSet.getInt(1); return count != 0; } return false; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/kvfs/KenusPool.java ================================================ package com.pinecone.hydra.storage.volume.kvfs; import com.pinecone.framework.util.rdb.MappedExecutor; import com.pinecone.ulf.rdb.sqlite.SQLiteExecutor; import com.pinecone.ulf.rdb.sqlite.SQLiteHost; import java.sql.SQLException; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; public class KenusPool implements ExecutorPool { protected Map kenusInstances; public KenusPool(){ this.kenusInstances = new ConcurrentHashMap<>(); } @Override public MappedExecutor allot(String name) { MappedExecutor mappedExecutor = this.kenusInstances.get(name); if( mappedExecutor == null ){ try { SQLiteHost sqLiteHost = new SQLiteHost(name); SQLiteExecutor sqLiteExecutor = new SQLiteExecutor(sqLiteHost); this.kenusInstances.put(name, sqLiteExecutor); mappedExecutor = sqLiteExecutor; } catch (SQLException e) { e.printStackTrace(); } } return mappedExecutor; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/kvfs/OnVolumeFileSystem.java ================================================ package com.pinecone.hydra.storage.volume.kvfs; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.rdb.MappedExecutor; import java.sql.SQLException; public interface OnVolumeFileSystem extends Pinenut { GUID getKVFSPhysicsVolume(GUID volumeGuid); void insertSimpleTargetMappingTab(GUID physicsGuid, GUID volumeGuid); void createSimpleTargetMappingTab(MappedExecutor mappedExecutor ) throws SQLException; void removeSimpleTargetMappingTab( GUID storageObjectGuid, MappedExecutor mappedExecutor ) throws SQLException; void insertSimpleTargetMappingSoloRecord(GUID storageObjectGuid, String storageObjectName, String sourceName, MappedExecutor mappedExecutor ) throws SQLException; String getSimpleStorageObjectSourceName(GUID storageObjectGuid, MappedExecutor mappedExecutor ) throws SQLException; boolean existStorageObject( MappedExecutor mappedExecutor, GUID storageObjectGuid ) throws SQLException; int hashStorageObjectID( GUID keyGuid, int volumeNum); void createSpannedIndexTable(MappedExecutor mappedExecutor ) throws SQLException; void insertSpannedIndexTable(MappedExecutor mappedExecutor, int hashKey, GUID targetVolumeGuid ) throws SQLException; GUID getSpannedIndexTableTargetGuid(MappedExecutor mappedExecutor, int hashKey ) throws SQLException; void creatSpanLinkedVolumeTable( MappedExecutor mappedExecutor ) throws SQLException; void insertSpanLinkedVolumeTable( MappedExecutor mappedExecutor, int hashKey, GUID keyGuid, GUID targetVolumeGuid ) throws SQLException; GUID getSpanLinkedVolumeTableTargetGuid( MappedExecutor mappedExecutor, GUID keyGuid ) throws SQLException; void createStripMetaTable(MappedExecutor mappedExecutor ) throws SQLException; void insertStripMetaTable(MappedExecutor mappedExecutor, int code, GUID volumeGuid, GUID storageObjectGuid, String sourceName ) throws SQLException; void removeStripMetaTable( GUID storageGuid, MappedExecutor mappedExecutor ) throws SQLException; String getStripMetaSourceName(MappedExecutor mappedExecutor, GUID volumeGuid, GUID storageObjectGuid ) throws SQLException; int getStripMetaCode(MappedExecutor mappedExecutor, GUID volumeGuid, GUID storageObjectGuid ) throws SQLException; boolean isExistStripMetaTable(MappedExecutor mappedExecutor, GUID volumeGuid, GUID storageObjectGuid ) throws SQLException; } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/operator/ArchVolumeOperator.java ================================================ package com.pinecone.hydra.storage.volume.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem; import com.pinecone.hydra.storage.volume.source.VolumeCapacityManipulator; import com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator; import com.pinecone.hydra.system.ko.UOIUtils; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.ImperialTree; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import java.time.LocalDateTime; public abstract class ArchVolumeOperator implements VolumeOperator{ protected VolumeManager volumeManager; protected VolumeOperatorFactory factory; protected ImperialTree imperialTree; protected VolumeMasterManipulator volumeMasterManipulator; protected VolumeCapacityManipulator volumeCapacityManipulator; protected KenVolumeFileSystem kenVolumeFileSystem; public ArchVolumeOperator( VolumeMasterManipulator masterManipulator, VolumeManager volumeManager ){ this.imperialTree = volumeManager.getMasterTrieTree(); this.volumeManager = volumeManager; this.volumeMasterManipulator = masterManipulator; this.volumeCapacityManipulator = masterManipulator.getVolumeCapacityManipulator(); this.kenVolumeFileSystem = this.volumeManager.getKVFSystem(); } protected ImperialTreeNode affirmPreinsertionInitialize(LogicVolume volume ){ GUID guid = volume.getGuid(); volume.setUpdateTime( LocalDateTime.now() ); ImperialTreeNode imperialTreeNode = new GUIDImperialTrieNode(); imperialTreeNode.setGuid( guid ); imperialTreeNode.setType( UOIUtils.createLocalJavaClass( volume.getClass().getName() ) ); return imperialTreeNode; } public VolumeOperatorFactory getVolumeOperatorFactory(){ return this.factory; } protected String getVolumeMetaType( TreeNode treeNode ){ return treeNode.className().replace("Titan",""); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/operator/SimpleVolumeOperator.java ================================================ package com.pinecone.hydra.storage.volume.operator; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.rdb.sqlite.SQLiteExecutor; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.entity.PhysicalVolume; import com.pinecone.hydra.storage.volume.entity.SimpleVolume; import com.pinecone.hydra.storage.volume.entity.VolumeCapacity64; import com.pinecone.hydra.storage.volume.entity.local.LocalSimpleVolume; import com.pinecone.hydra.storage.volume.source.SimpleVolumeManipulator; import com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import java.io.File; import java.sql.SQLException; import java.util.HashMap; import java.util.List; import java.util.Map; public class SimpleVolumeOperator extends ArchVolumeOperator implements VolumeOperator{ protected Map cacheMap = new HashMap<>(); protected SimpleVolumeManipulator simpleVolumeManipulator; public SimpleVolumeOperator( VolumeOperatorFactory factory ){ this( factory.getMasterManipulator(), factory.getVolumeManager() ); this.factory = factory; } public SimpleVolumeOperator(VolumeMasterManipulator masterManipulator, VolumeManager volumeManager) { super(masterManipulator, volumeManager); this.simpleVolumeManipulator = masterManipulator.getSimpleVolumeManipulator(); } @Override public GUID insert(TreeNode treeNode) { LocalSimpleVolume simpleVolume = ( LocalSimpleVolume ) treeNode; ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize(simpleVolume); GUID guid = simpleVolume.getGuid(); VolumeCapacity64 volumeCapacity = simpleVolume.getVolumeCapacity(); if ( volumeCapacity.getVolumeGuid() == null ){ volumeCapacity.setVolumeGuid( guid ); } this.imperialTree.insert(imperialTreeNode); this.simpleVolumeManipulator.insert( simpleVolume ); this.volumeCapacityManipulator.insert( volumeCapacity ); return guid; } @Override public void purge(GUID guid) { List children = this.imperialTree.getChildren(guid); for( GUIDImperialTrieNode node : children ){ TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class[]{this.getClass()}, this ); VolumeOperator operator = this.factory.getOperator(this.getVolumeMetaType(newInstance)); operator.purge( node.getGuid() ); } this.removeNode( guid ); } @Override public SimpleVolume get(GUID guid) { SimpleVolume simpleVolume = this.simpleVolumeManipulator.getSimpleVolume(guid); VolumeCapacity64 volumeCapacity = this.volumeCapacityManipulator.getVolumeCapacity(guid); simpleVolume.setVolumeCapacity( volumeCapacity ); simpleVolume.setVolumeTree( this.volumeManager); simpleVolume.setKenVolumeFileSystem(); try { simpleVolume.assembleSQLiteExecutor(); } catch (SQLException e) { throw new ProxyProvokeHandleException(e); } return simpleVolume; } @Override public TreeNode get(GUID guid, int depth) { return null; } @Override public TreeNode getAsRootDepth(GUID guid) { return null; } @Override public void update(TreeNode treeNode) { SimpleVolume simpleVolume = (SimpleVolume) treeNode; } @Override public void removeStorageObject(GUID volumeGuid,GUID storageObjectGuid,long size) { SimpleVolume simpleVolume = (SimpleVolume)this.volumeManager.get(volumeGuid); try { SQLiteExecutor sqLiteExecutor = simpleVolume.getSQLiteExecutor(); String sourceName = this.kenVolumeFileSystem.getSimpleStorageObjectSourceName(storageObjectGuid, sqLiteExecutor); if( sourceName == null ){ return; } File file = new File(sourceName); simpleVolume.increaseCapacity( file.length() ); List guids = simpleVolume.listPhysicalVolume(); PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(guids.get(0)); physicalVolume.increaseCapacity( file.length() ); file.delete(); this.kenVolumeFileSystem.removeSimpleTargetMappingTab( storageObjectGuid, sqLiteExecutor ); } catch (SQLException e) { throw new RuntimeException(e); } } @Override public void updateName(GUID guid, String name) { } private void removeNode( GUID guid ){ GUIDImperialTrieNode node = this.imperialTree.getNode(guid); this.imperialTree.purge( guid ); this.imperialTree.removeCachePath( guid ); this.simpleVolumeManipulator.remove( guid ); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/operator/SpannedVolumeOperator.java ================================================ package com.pinecone.hydra.storage.volume.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.SpannedVolume; import com.pinecone.hydra.storage.volume.entity.VolumeCapacity64; import com.pinecone.hydra.storage.volume.entity.local.LocalSpannedVolume; import com.pinecone.hydra.storage.volume.source.SpannedVolumeManipulator; import com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import java.util.Collection; import java.util.List; public class SpannedVolumeOperator extends ArchVolumeOperator implements VolumeOperator{ protected SpannedVolumeManipulator SpannedVolumeManipulator; public SpannedVolumeOperator( VolumeOperatorFactory factory ){ this( factory.getMasterManipulator(), factory.getVolumeManager() ); this.factory = factory; } public SpannedVolumeOperator(VolumeMasterManipulator masterManipulator, VolumeManager volumeManager) { super(masterManipulator, volumeManager); this.SpannedVolumeManipulator = masterManipulator.getSpannedVolumeManipulator(); } @Override public GUID insert(TreeNode treeNode) { LocalSpannedVolume simpleVolume = ( LocalSpannedVolume ) treeNode; ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize(simpleVolume); GUID guid = simpleVolume.getGuid(); VolumeCapacity64 volumeCapacity = simpleVolume.getVolumeCapacity(); if ( volumeCapacity.getVolumeGuid() == null ){ volumeCapacity.setVolumeGuid( guid ); } this.imperialTree.insert(imperialTreeNode); this.SpannedVolumeManipulator.insert( simpleVolume ); this.volumeCapacityManipulator.insert( volumeCapacity ); return guid; } @Override public void purge(GUID guid) { List children = this.imperialTree.getChildren(guid); for( GUIDImperialTrieNode node : children ){ TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class[]{this.getClass()}, this ); VolumeOperator operator = this.factory.getOperator(this.getVolumeMetaType(newInstance)); operator.purge( node.getGuid() ); } this.removeNode( guid ); } @Override public TreeNode get(GUID guid) { SpannedVolume spannedVolume = this.SpannedVolumeManipulator.getSpannedVolume(guid); VolumeCapacity64 volumeCapacity = this.volumeCapacityManipulator.getVolumeCapacity(guid); spannedVolume.setVolumeCapacity( volumeCapacity ); spannedVolume.setVolumeTree( this.volumeManager); spannedVolume.setKenVolumeFileSystem(); return spannedVolume; } @Override public TreeNode get(GUID guid, int depth) { return null; } @Override public TreeNode getAsRootDepth(GUID guid) { return null; } @Override public void update(TreeNode treeNode) { } @Override public void removeStorageObject(GUID volumeGuid,GUID storageObjectGuid,long size) { Collection children = this.volumeManager.getChildren(volumeGuid); for( TreeNode treeNode : children ){ this.volumeManager.removeStorageObject( treeNode.getGuid(), storageObjectGuid,size ); } } @Override public void updateName(GUID guid, String name) { } private void removeNode( GUID guid ){ GUIDImperialTrieNode node = this.imperialTree.getNode(guid); this.imperialTree.purge( guid ); this.imperialTree.removeCachePath( guid ); this.SpannedVolumeManipulator.remove( guid ); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/operator/StripedVolumeOperator.java ================================================ package com.pinecone.hydra.storage.volume.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.rdb.sqlite.SQLiteExecutor; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.entity.StripedVolume; import com.pinecone.hydra.storage.volume.entity.VolumeCapacity64; import com.pinecone.hydra.storage.volume.entity.local.LocalStripedVolume; import com.pinecone.hydra.storage.volume.source.StripedVolumeManipulator; import com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator; import com.pinecone.hydra.unit.imperium.ImperialTreeNode; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import java.sql.SQLException; import java.util.Collection; import java.util.List; public class StripedVolumeOperator extends ArchVolumeOperator implements VolumeOperator{ protected StripedVolumeManipulator stripedVolumeManipulator; public StripedVolumeOperator( VolumeOperatorFactory factory ){ this( factory.getMasterManipulator(), factory.getVolumeManager() ); this.factory = factory; } public StripedVolumeOperator(VolumeMasterManipulator masterManipulator, VolumeManager volumeManager) { super(masterManipulator, volumeManager); this.stripedVolumeManipulator = masterManipulator.getStripedVolumeManipulator(); } @Override public GUID insert(TreeNode treeNode) { LocalStripedVolume stripedVolume = ( LocalStripedVolume ) treeNode; ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize(stripedVolume); GUID guid = stripedVolume.getGuid(); VolumeCapacity64 volumeCapacity = stripedVolume.getVolumeCapacity(); if ( volumeCapacity.getVolumeGuid() == null ){ volumeCapacity.setVolumeGuid( guid ); } this.imperialTree.insert(imperialTreeNode); this.stripedVolumeManipulator.insert( stripedVolume ); this.volumeCapacityManipulator.insert( volumeCapacity ); return guid; } @Override public void purge(GUID guid) { List children = this.imperialTree.getChildren(guid); for( GUIDImperialTrieNode node : children ){ TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class[]{this.getClass()}, this ); VolumeOperator operator = this.factory.getOperator(this.getVolumeMetaType(newInstance)); operator.purge( node.getGuid() ); } this.removeNode( guid ); } @Override public TreeNode get(GUID guid) { StripedVolume stripedVolume = this.stripedVolumeManipulator.getStripedVolume(guid); VolumeCapacity64 volumeCapacity = this.volumeCapacityManipulator.getVolumeCapacity(guid); stripedVolume.setVolumeCapacity( volumeCapacity ); stripedVolume.setVolumeTree( this.volumeManager); stripedVolume.setKenVolumeFileSystem(); return stripedVolume; } @Override public TreeNode get(GUID guid, int depth) { return null; } @Override public TreeNode getAsRootDepth(GUID guid) { return null; } @Override public void update(TreeNode treeNode) { } @Override public void updateName(GUID guid, String name) { } @Override public void removeStorageObject(GUID volumeGuid,GUID storageObjectGuid,long size) { LogicVolume logicVolume = this.volumeManager.get(volumeGuid); try { SQLiteExecutor sqLiteExecutor = logicVolume.getSQLiteExecutor(); this.kenVolumeFileSystem.removeStripMetaTable( storageObjectGuid, sqLiteExecutor ); Collection children = this.volumeManager.getChildren(volumeGuid); for( TreeNode treeNode : children ){ this.volumeManager.removeStorageObject( treeNode.getGuid(), storageObjectGuid, size ); } logicVolume.increaseCapacity( size ); } catch (SQLException e) { throw new RuntimeException(e); } } private void removeNode(GUID guid ){ this.imperialTree.purge( guid ); this.imperialTree.removeCachePath( guid ); this.stripedVolumeManipulator.remove( guid ); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/operator/TitanVolumeOperatorFactory.java ================================================ package com.pinecone.hydra.storage.volume.operator; import com.pinecone.hydra.storage.file.entity.GenericFileNode; import com.pinecone.hydra.storage.file.entity.GenericFolder; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; import java.util.HashMap; import java.util.Map; import java.util.TreeMap; public class TitanVolumeOperatorFactory implements VolumeOperatorFactory{ protected VolumeMasterManipulator volumeMasterManipulator; protected VolumeManager volumeManager; protected Map registerer = new HashMap<>(); protected Map metaTypeMap = new TreeMap<>(); protected void registerDefaultMetaType( Class genericType ) { this.metaTypeMap.put( genericType.getName(), genericType.getSimpleName().replace( this.volumeManager.getConfig().getVersionSignature(), "" )); } protected void registerDefaultMetaTypes() { this.registerDefaultMetaType( GenericFolder.class ); this.registerDefaultMetaType( GenericFileNode.class ); } public TitanVolumeOperatorFactory(VolumeManager volumeManager, VolumeMasterManipulator volumeMasterManipulator ){ this.volumeManager = volumeManager; this.volumeMasterManipulator = volumeMasterManipulator; this.registerer.put( DefaultSimpleVolume, new SimpleVolumeOperator( this ) ); this.registerer.put( DefaultStripedVolume, new StripedVolumeOperator( this ) ); this.registerer.put( DefaultSpannedVolume, new SpannedVolumeOperator( this ) ); this.registerDefaultMetaTypes(); } @Override public void register( String typeName, TreeNodeOperator functionalNodeOperation ) { this.registerer.put( typeName, functionalNodeOperation ); } @Override public void registerMetaType( Class clazz, String metaType ){ this.registerMetaType( clazz.getName(), metaType ); } @Override public void registerMetaType( String classFullName, String metaType ){ this.metaTypeMap.put( classFullName, metaType ); } @Override public String getMetaType( String classFullName ) { return this.metaTypeMap.get( classFullName ); } @Override public VolumeOperator getOperator(String typeName ) { //Debug.trace( this.registerer.toString() ); //Debug.trace( typeName ); return (VolumeOperator) this.registerer.get( typeName ); } @Override public VolumeManager getVolumeManager() { return this.volumeManager; } @Override public VolumeMasterManipulator getMasterManipulator() { return this.volumeMasterManipulator; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/operator/VolumeOperator.java ================================================ package com.pinecone.hydra.storage.volume.operator; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; public interface VolumeOperator extends TreeNodeOperator { void removeStorageObject(GUID volumeGuid,GUID storageObjectGuid,long size); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/operator/VolumeOperatorFactory.java ================================================ package com.pinecone.hydra.storage.volume.operator; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.storage.volume.entity.local.LocalSimpleVolume; import com.pinecone.hydra.storage.volume.entity.local.LocalSpannedVolume; import com.pinecone.hydra.storage.volume.entity.local.LocalStripedVolume; import com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator; import com.pinecone.hydra.unit.imperium.operator.OperatorFactory; import com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator; public interface VolumeOperatorFactory extends OperatorFactory { String DefaultSimpleVolume = LocalSimpleVolume.class.getSimpleName(); String DefaultStripedVolume = LocalStripedVolume.class.getSimpleName(); String DefaultSpannedVolume = LocalSpannedVolume.class.getSimpleName(); void register( String typeName, TreeNodeOperator functionalNodeOperation ); void registerMetaType( Class clazz, String metaType ); void registerMetaType( String classFullName, String metaType ); String getMetaType( String classFullName ); VolumeOperator getOperator(String typeName ); VolumeManager getVolumeManager(); VolumeMasterManipulator getMasterManipulator(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/policy/Dummy.java ================================================ package com.pinecone.hydra.storage.volume.policy; public class Dummy { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/policy/strip/ArchSizingMatcher.java ================================================ package com.pinecone.hydra.storage.volume.policy.strip; public abstract class ArchSizingMatcher implements SizingMatcher { protected Number mnLevelSize; protected DynamicStripSizingPolicy mSizingPolicy; public ArchSizingMatcher( DynamicStripSizingPolicy sizingPolicy, Number levelSize ) { this.mSizingPolicy = sizingPolicy; } public DynamicStripSizingPolicy getSizingPolicy() { return this.mSizingPolicy; } @Override public Number getLevelSize() { return this.mnLevelSize; } @Override public int getLevel() { return this.getSizingPolicy().getLevelByMatcher( this ); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/policy/strip/DynamicStripSizingPolicy.java ================================================ package com.pinecone.hydra.storage.volume.policy.strip; import java.util.List; import com.pinecone.framework.system.prototype.Pinenut; public interface DynamicStripSizingPolicy extends Pinenut { Number evalStripSize( Number integritySize ); List getMatchers(); int getLevelByMatcher( SizingMatcher that ); Number getDefaultStripSize(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/policy/strip/GenericDynamicStripSizingPolicy.java ================================================ package com.pinecone.hydra.storage.volume.policy.strip; import java.util.List; public class GenericDynamicStripSizingPolicy implements DynamicStripSizingPolicy { protected List mMatchers; protected Number mnDefaultStripSize; // Parent @Override public Number evalStripSize( Number integritySize ) { for( SizingMatcher matcher : this.mMatchers ) { Number ret = matcher.isMatched( integritySize ); if( ret != null ) { return ret; } } return this.mnDefaultStripSize; } @Override public List getMatchers() { return this.mMatchers; } @Override public int getLevelByMatcher( SizingMatcher that ) { return this.mMatchers.indexOf( that ); } @Override public Number getDefaultStripSize() { return this.mnDefaultStripSize; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/policy/strip/MegaFileSizingMatcher64.java ================================================ package com.pinecone.hydra.storage.volume.policy.strip; public class MegaFileSizingMatcher64 extends ArchSizingMatcher { public MegaFileSizingMatcher64( DynamicStripSizingPolicy sizingPolicy, Number levelSize ) { super( sizingPolicy, levelSize ); } @Override public Number isMatched( Number integritySize ) { long i64IntegritySize = integritySize.longValue(); if( i64IntegritySize > 100L * 1024 * 1024 * 1024 ) { // (100GB, +∞] return this.getLevelSize(); } return null; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/policy/strip/SizingMatcher.java ================================================ package com.pinecone.hydra.storage.volume.policy.strip; import com.pinecone.framework.system.prototype.Pinenut; public interface SizingMatcher extends Pinenut { Number isMatched( Number integritySize ); Number getLevelSize(); int getLevel(); DynamicStripSizingPolicy getSizingPolicy(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/policy/strip/SmallFileSizingMatcher64.java ================================================ package com.pinecone.hydra.storage.volume.policy.strip; public class SmallFileSizingMatcher64 extends ArchSizingMatcher { public SmallFileSizingMatcher64( DynamicStripSizingPolicy sizingPolicy, Number levelSize ) { super( sizingPolicy, levelSize ); } @Override public Number isMatched( Number integritySize ) { long i64IntegritySize = integritySize.longValue(); if( i64IntegritySize > 1024 * 1024 * 1024 && i64IntegritySize <= 100L * 1024 * 1024 * 1024 ) { // (1GB, 100GB] return this.getLevelSize(); } return null; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/policy/strip/TinyFileSizingMatcher64.java ================================================ package com.pinecone.hydra.storage.volume.policy.strip; public class TinyFileSizingMatcher64 extends ArchSizingMatcher { public TinyFileSizingMatcher64( DynamicStripSizingPolicy sizingPolicy, Number levelSize ) { super( sizingPolicy, levelSize ); } @Override public Number isMatched( Number integritySize ) { long i64IntegritySize = integritySize.longValue(); if( i64IntegritySize <= 1024 * 1024 * 1024 ) { // [0, 1G] return this.getLevelSize(); } return null; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/runtime/ArchStripedTaskThread.java ================================================ package com.pinecone.hydra.storage.volume.runtime; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.system.executum.Processum; public abstract class ArchStripedTaskThread extends ArchTaskThread implements Runnable { protected VolumeJob mVolumeJob; protected ArchStripedTaskThread ( String szName, Processum parent, VolumeJob volumeJob ) { super( szName, parent ); this.mVolumeJob = volumeJob; Thread affinityThread = new Thread( this ); affinityThread.setDaemon(false); this.setThreadAffinity( affinityThread ); this.getAffiliateThread().setName( szName ); this.setName( affinityThread.getName() ); } protected void executeSingleJob() throws VolumeJobCompromiseException { this.mVolumeJob.execute(); } @Override public void run() { //switch () try{ this.executeSingleJob(); } catch ( VolumeJobCompromiseException e ) { throw new ProxyProvokeHandleException( e ); } } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/runtime/ArchTaskThread.java ================================================ package com.pinecone.hydra.storage.volume.runtime; import com.pinecone.framework.system.executum.ArchThreadum; import com.pinecone.framework.system.executum.Processum; public abstract class ArchTaskThread extends ArchThreadum { protected ArchTaskThread ( String szName, Processum parent ) { super( szName, parent, null ); } @Override public void start() { if( this.getAffiliateThread() != null ) { this.getAffiliateThread().start(); } } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/runtime/MasterVolumeGram.java ================================================ package com.pinecone.hydra.storage.volume.runtime; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Semaphore; import java.util.concurrent.locks.Lock; import com.pinecone.framework.system.GenericMasterTaskManager; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.system.executum.ArchProcessum; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.lock.SpinLock; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import com.pinecone.hydra.storage.volume.entity.local.striped.LocalStripedTaskThread; import com.pinecone.hydra.storage.volume.entity.local.striped.StripCacheBlock; public class MasterVolumeGram extends ArchProcessum implements VolumeGram { protected Lock mMajorStatusIO = new SpinLock(); protected int jobCount; protected long bufferOutThreadId; protected Semaphore bufferOutBlockerLatch; protected int currentBufferInJobCode; protected CountDownLatch countDownLatch; protected final CompletableFuture majorJobFuture; protected List cacheGroup; protected byte[] buffer; public MasterVolumeGram( String szName, Processum parent ) { super( szName, parent ); this.mTaskManager = new GenericMasterTaskManager( this ); this.majorJobFuture = new CompletableFuture<>(); } public MasterVolumeGram( String szName, Processum parent, int jobCount, int StripResidentCacheAllotRatio, int stripSize ){ this( szName, parent ); this.jobCount = jobCount; this.cacheGroup = this.initializeCacheGroup( jobCount, StripResidentCacheAllotRatio, stripSize ); this.buffer = new byte[ jobCount * stripSize * StripResidentCacheAllotRatio ]; this.currentBufferInJobCode = 0; } public Lock getMajorStatusIO() { return this.mMajorStatusIO; } public LocalStripedTaskThread getChildThread( long threadId ){ return (LocalStripedTaskThread) this.getTaskManager().getExecutumPool().get( threadId ); } @Override public int getJobCount() { return this.jobCount; } @Override public void setJobCount(int jobCount) { this.jobCount = jobCount; } @Override public List getCacheGroup() { return this.cacheGroup; } @Override public void setCacheGroup(List cacheGroup) { this.cacheGroup = cacheGroup; } @Override public byte[] getBuffer() { return this.buffer; } @Override public void setBuffer(byte[] buffer) { this.buffer = buffer; } @Override public long getBufferOutThreadId() { return this.bufferOutThreadId; } @Override public void applyBufferOutThreadId(long bufferOutThreadId) { this.bufferOutThreadId = bufferOutThreadId; } @Override public void applyBufferOutBlockerLatch(Semaphore bufferOutBlockerLatch) { this.bufferOutBlockerLatch = bufferOutBlockerLatch; } @Override public Semaphore getBufferOutBlockerLatch() { return this.bufferOutBlockerLatch; } @Override public int getCurrentBufferInJobCode() { return this.currentBufferInJobCode; } @Override public void setCurrentBufferInJobCode(int currentBufferInJobCode) { this.currentBufferInJobCode = currentBufferInJobCode; } @Override public CompletableFuture getMajorJobFuture() { return this.majorJobFuture; } @Override public void majorJobCountDown() { this.countDownLatch.countDown(); } @Override public void setMajorJobCountDownNum(int num) { this.countDownLatch = new CountDownLatch(num); } @Override public void majorJobCountDownLatchWait() { try { this.countDownLatch.await(); } catch ( InterruptedException e ) { Thread.currentThread().interrupt(); throw new ProxyProvokeHandleException(e); } } private List< CacheBlock > initializeCacheGroup(int jobCount, int StripResidentCacheAllotRatio, Number stripSize ){ ArrayList cacheGroup = new ArrayList<>(); Number currentPosition = 0; for( int i = 0; i < jobCount * StripResidentCacheAllotRatio; i++ ){ StripCacheBlock stripCacheBlock = new StripCacheBlock( i, currentPosition, currentPosition.intValue() + stripSize.intValue() ); cacheGroup.add( stripCacheBlock ); currentPosition = currentPosition.intValue() + stripSize.intValue(); } return cacheGroup; } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/runtime/TaskThread.java ================================================ package com.pinecone.hydra.storage.volume.runtime; import com.pinecone.framework.system.executum.Executum; public interface TaskThread extends Executum { } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/runtime/VolumeGram.java ================================================ package com.pinecone.hydra.storage.volume.runtime; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Semaphore; public interface VolumeGram extends Processum { int getJobCount(); void setJobCount( int jobCount ); List getCacheGroup(); void setCacheGroup( List cacheGroup ); byte[] getBuffer(); void setBuffer( byte[] buffer ); long getBufferOutThreadId(); void applyBufferOutThreadId(long bufferOutThreadId ); void applyBufferOutBlockerLatch( Semaphore bufferOutBlockerLatch); Semaphore getBufferOutBlockerLatch(); int getCurrentBufferInJobCode(); void setCurrentBufferInJobCode( int currentBufferInJobCode ); CompletableFuture getMajorJobFuture(); void majorJobCountDown(); void setMajorJobCountDownNum( int num ); void majorJobCountDownLatchWait(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/runtime/VolumeJob.java ================================================ package com.pinecone.hydra.storage.volume.runtime; import com.pinecone.framework.system.functions.Executor; import com.pinecone.hydra.storage.volume.entity.local.striped.LocalStripedTaskThread; import com.pinecone.hydra.storage.volume.entity.local.striped.StripBufferStatus; import java.util.concurrent.Semaphore; public interface VolumeJob extends Executor { void execute() throws VolumeJobCompromiseException; void applyThread( LocalStripedTaskThread thread ); StripBufferStatus getStatus(); Semaphore getBlockerLatch(); void setStatus( StripBufferStatus status ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/runtime/VolumeJobCompromiseException.java ================================================ package com.pinecone.hydra.storage.volume.runtime; import com.pinecone.framework.system.executum.JobCompromisedException; public class VolumeJobCompromiseException extends JobCompromisedException { public VolumeJobCompromiseException () { super(); } public VolumeJobCompromiseException ( String message ) { super(message); } public VolumeJobCompromiseException ( String message, Throwable cause ) { super(message, cause); } public VolumeJobCompromiseException ( Throwable cause ) { super(cause); } protected VolumeJobCompromiseException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) { super( message, cause, enableSuppression, writableStackTrace ); } } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/LineSegmentManipulator.java ================================================ package com.pinecone.hydra.storage.volume.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface LineSegmentManipulator extends Pinenut { void insert( int idMin, int idMax, GUID volumeGuid ); GUID getVolumeGuid( int id ); void delete( int idMin, int idMax ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/LogicVolumeManipulator.java ================================================ package com.pinecone.hydra.storage.volume.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import java.util.List; public interface LogicVolumeManipulator extends GUIDNameManipulator { void extendLogicalVolume( GUID logicGuid, GUID physicalGuid ); List< GUID > listPhysicalVolume( GUID logicGuid ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/MirroredVolumeManipulator.java ================================================ package com.pinecone.hydra.storage.volume.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.entity.MirroredVolume; import java.util.List; public interface MirroredVolumeManipulator extends LogicVolumeManipulator { void insert( MirroredVolume mirroredVolume ); void remove( GUID guid ); MirroredVolume getMirroredVolume(GUID guid); void extendLogicalVolume( GUID logicGuid, GUID physicalGuid ); List listPhysicalVolume(GUID logicGuid ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/MountPointManipulator.java ================================================ package com.pinecone.hydra.storage.volume.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.entity.MountPoint; public interface MountPointManipulator extends Pinenut { void insert( MountPoint mountPoint ); void remove( GUID guid ); void removeByVolumeGuid( GUID guid ); MountPoint getMountPoint(GUID guid); MountPoint getMountPointByVolumeGuid( GUID guid ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/PhysicalVolumeManipulator.java ================================================ package com.pinecone.hydra.storage.volume.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.entity.PhysicalVolume; import com.pinecone.hydra.storage.volume.entity.Volume; import java.util.List; public interface PhysicalVolumeManipulator extends Pinenut { void insert( PhysicalVolume physicalVolume ); void remove( GUID guid ); PhysicalVolume getPhysicalVolume(GUID guid); PhysicalVolume getPhysicalVolumeByName( String name ); PhysicalVolume getSmallestCapacityPhysicalVolume(); GUID getParent( GUID guid ); List queryAllPhysicalVolumes(); void update( PhysicalVolume physicalVolume ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/SQLiteVolumeManipulator.java ================================================ package com.pinecone.hydra.storage.volume.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface SQLiteVolumeManipulator extends Pinenut { void insert( GUID physicsGuid, GUID volumeGuid ); GUID getPhysicsGuid( GUID volumeGuid ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/SimpleVolumeManipulator.java ================================================ package com.pinecone.hydra.storage.volume.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.entity.SimpleVolume; import com.pinecone.hydra.storage.volume.entity.Volume; import java.util.List; public interface SimpleVolumeManipulator extends LogicVolumeManipulator { void insert( SimpleVolume simpleVolume ); void remove( GUID guid ); void update( SimpleVolume simpleVolume ); SimpleVolume getSimpleVolume(GUID guid); void extendLogicalVolume( GUID logicGuid, GUID physicalGuid ); List listPhysicalVolume(GUID logicGuid ); List queryAllSimpleVolumes(); void updateDefinitionCapacity( GUID guid, long definitionCapacity ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/SpannedVolumeManipulator.java ================================================ package com.pinecone.hydra.storage.volume.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.entity.SpannedVolume; import com.pinecone.hydra.storage.volume.entity.Volume; import java.util.List; public interface SpannedVolumeManipulator extends LogicVolumeManipulator { void insert( SpannedVolume spannedVolume ); void remove( GUID guid ); void update( SpannedVolume spannedVolume ); SpannedVolume getSpannedVolume(GUID guid); void extendLogicalVolume( GUID logicGuid, GUID physicalGuid ); List listPhysicalVolume(GUID logicGuid ); List queryAllSpannedVolume(); void updateDefinitionCapacity( GUID guid, long definitionCapacity ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/StripedVolumeManipulator.java ================================================ package com.pinecone.hydra.storage.volume.source; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.entity.StripedVolume; import com.pinecone.hydra.storage.volume.entity.Volume; import java.util.List; public interface StripedVolumeManipulator extends LogicVolumeManipulator { void insert( StripedVolume stripedVolume ); void remove( GUID guid ); void update( StripedVolume stripedVolume ); StripedVolume getStripedVolume(GUID guid); void extendLogicalVolume( GUID logicGuid, GUID physicalGuid ); List listPhysicalVolume(GUID logicGuid ); List queryAllStripedVolume(); void updateDefinitionCapacity( GUID guid, long definitionCapacity ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/VolumeAllocateManipulator.java ================================================ package com.pinecone.hydra.storage.volume.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface VolumeAllocateManipulator extends Pinenut { void insert( GUID objectGuid, GUID childVolumeGuid, GUID parentVolumeGuid ); GUID get( GUID objectGuid, GUID parentGuid ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/VolumeCapacityManipulator.java ================================================ package com.pinecone.hydra.storage.volume.source; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.entity.VolumeCapacity64; public interface VolumeCapacityManipulator extends Pinenut { void insert( VolumeCapacity64 volumeCapacity ); void remove( GUID guid ); VolumeCapacity64 getVolumeCapacity(GUID guid); void update( GUID guid, long usedSize ); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/VolumeMasterManipulator.java ================================================ package com.pinecone.hydra.storage.volume.source; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; public interface VolumeMasterManipulator extends KOIMasterManipulator { MirroredVolumeManipulator getMirroredVolumeManipulator(); MountPointManipulator getMountPointManipulator(); SimpleVolumeManipulator getSimpleVolumeManipulator(); SpannedVolumeManipulator getSpannedVolumeManipulator(); StripedVolumeManipulator getStripedVolumeManipulator(); VolumeCapacityManipulator getVolumeCapacityManipulator(); PhysicalVolumeManipulator getPhysicalVolumeManipulator(); VolumeAllocateManipulator getVolumeAllocateManipulator(); SQLiteVolumeManipulator getSQLiteVolumeManipulator(); LogicVolumeManipulator getPrimeLogicVolumeManipulator(); } ================================================ FILE: Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/VolumeTreeManipulator.java ================================================ package com.pinecone.hydra.storage.volume.source; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; public interface VolumeTreeManipulator extends TrieTreeManipulator { } ================================================ FILE: Hydra/hydra-kom-default-driver/pom.xml ================================================ hydra com.pinecone.hydra 2.5.1 org.apache.maven.plugins maven-compiler-plugin 9 9 4.0.0 com.pinecone.hydra.kom.driver.default hydra-kom-default-driver 2.1.0 com.pinecone.hydra.kernel hydra-architecture 2.1.0 compile com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.pinecone.hydra.kernel hydra-framework-service 2.1.0 compile com.pinecone.hydra.kernel hydra-framework-storage 2.1.0 compile com.pinecone.hydra.kernel hydra-framework-config 2.1.0 compile com.pinecone.hydra.kernel hydra-framework-device 2.1.0 compile com.pinecone.hydra.kernel hydra-service-control 2.1.0 compile com.pinecone pinecone 2.5.1 compile org.mybatis mybatis 3.5.9 org.mybatis mybatis-spring 2.0.6 mysql mysql-connector-java 8.0.26 org.slf4j slf4j-api 1.7.30 com.pinecone.slime.jelly jelly 2.1.0 compile ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/FileOwnerMapper.java ================================================ package com.pinecone.hydra; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.LinkedType; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import java.util.List; public interface FileOwnerMapper extends TireOwnerManipulator { void insertRootNode (GUID guid, LinkedType linkedType ); default void insertRootNode ( GUID guid ) { this.insertRootNode( guid, LinkedType.Owned ); } void insert( GUID targetGuid, GUID parentGUID, LinkedType linkedType ); default void insertOwnedNode( GUID targetGuid, GUID parentGUID ) { this.insert( targetGuid, parentGUID, LinkedType.Owned ); } default void insertHardLinkedNode( GUID targetGuid, GUID parentGUID ) { this.insert( targetGuid, parentGUID, LinkedType.Hard ); } void update( GUID targetGuid, GUID parentGUID, LinkedType linkedType ); void updateParentGuid( GUID targetGuid, GUID parentGUID ); void updateLinkedType( GUID targetGuid, LinkedType linkedType ); void remove( GUID subordinateGuid, GUID ownerGuid ); void removeBySubordinate( GUID subordinateGuid ); void removeByOwner( GUID OwnerGuid ); GUID getOwner( GUID subordinateGuid ); List getSubordinates(GUID guid ); void setLinkedType ( GUID sourceGuid, GUID targetGuid, LinkedType linkedType ); default void setOwned ( GUID sourceGuid, GUID targetGuid ) { this.setLinkedType( sourceGuid, targetGuid, LinkedType.Owned ); } default void setHardLink ( GUID sourceGuid, GUID targetGuid ) { this.setLinkedType( sourceGuid, targetGuid, LinkedType.Hard ); } LinkedType getLinkedType ( GUID childGuid,GUID parentGuid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/AuthorizationMapper.java ================================================ package com.pinecone.hydra.account.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.account.entity.Authorization; import com.pinecone.hydra.account.entity.GenericAuthorization; import com.pinecone.hydra.account.source.AuthorizationManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Select; import java.util.List; @IbatisDataAccessObject public interface AuthorizationMapper extends AuthorizationManipulator { @Insert("INSERT INTO `hydra_account_authorization` (`guid`, `user_name`, `user_guid`, `credential_guid`, `privilege_token`, `privilege_guid`, `create_time`, `update_time`) VALUES (#{guid},#{userName},#{userGuid},#{credentialGuid},#{privilrgrToken},#{privilegeGuid},#{createTime},#{updateTime})") void insert(Authorization authorization); @Delete("DELETE FROM `hydra_account_authorization` WHERE `guid` = #{authorizationGuid}") void remove(GUID authorizationGuid); @Insert("UPDATE `hydra_account_authorization` SET `privilege_token` = #{privilegeToken}, `privilege_guid` = #{privilegeGuid}, `update_time` = #{updateTime} WHERE guid = #{authorizationGuid}") void update(GUID authorizationGuid); @Select("SELECT `id`, `guid`, `user_name`, `user_guid`, `credential_guid`, `privilege_token`, `privilege_guid`, `create_time`, `update_time` FROM `hydra_account_authorization` WHERE guid = #{authorizationGuid}") Authorization queryCredential(GUID authorizationGuid ); @Select("SELECT `id`, `guid`, `user_name` AS userName, `user_guid` AS userGuid, `credential_guid` AS credentialGuid , `privilege_token` AS privilegeToken, `privilege_guid` AS privilegeGuid, `create_time` AS createTime, `update_time` AS updateTime FROM `hydra_account_authorization` WHERE user_guid = #{userGuid}") List queryAuthorizationByUserGuid(GUID userGuid); @Delete("DELETE FROM `hydra_account_authorization` WHERE user_guid = #{userGuid}") void removeAuthorizationByUserGuid(GUID userGuid); @Select("SELECT `id`, `guid`, `user_name` AS userName, `user_guid` AS userGuid, `credential_guid` AS credentialGuid , `privilege_token` AS privilegeToken, `privilege_guid` AS privilegeGuid, `create_time` AS createTime, `update_time` AS updateTime FROM `hydra_account_authorization`") List queryAllAuthorization(); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/CredentialMapper.java ================================================ package com.pinecone.hydra.account.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.account.entity.Credential; import com.pinecone.hydra.account.source.CredentialManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Select; @IbatisDataAccessObject public interface CredentialMapper extends CredentialManipulator { @Insert("INSERT INTO `hydra_account_credential` (`guid`, `name`, `credential`, `create_time`, `update_time`, `type`) VALUES (#{guid},#{name},#{credential},#{createTime},#{updateTime},#{type})") void insert(Credential credential); @Delete("DELETE FROM hydra_account_credential WHERE guid = #{guid}") void remove(GUID credentialGuid); @Select("SELECT `id`, `guid`, `name`, `credential`, `create_time`, `update_time`, `type` FROM hydra_account_credential WHERE `guid` = #{guid}") Credential queryCredential(GUID credentialGuid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/DomainNodeMapper.java ================================================ package com.pinecone.hydra.account.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.account.entity.Domain; import com.pinecone.hydra.account.entity.GenericDomain; import com.pinecone.hydra.account.source.DomainNodeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import java.util.List; @IbatisDataAccessObject public interface DomainNodeMapper extends DomainNodeManipulator { @Insert("INSERT INTO `hydra_account_domain_node` (`domain_name`, `domin_guid`, `name`) VALUES (#{domainName}, #{guid}, #{name})") void insert(Domain domain); @Delete("DELETE FROM `hydra_account_domain_node` WHERE `domin_guid` = #{domainGuid}") void remove(GUID domainGuid); @Select("SELECT `id`, `domain_name` AS domainName, `domin_guid` AS guid, `name` FROM `hydra_account_domain_node` WHERE `domin_guid` = #{domainGuid}") GenericDomain queryDomain0(GUID domainGuid ); default GenericDomain queryDomain(GUID domainGuid ){ GenericDomain domain = this.queryDomain0(domainGuid); domain.setDomainNodeManipulator( this ); return domain; } @Select("SELECT `domin_guid` FROM hydra_account_domain_node WHERE `name` = #{name}") List getGuidsByName(String name ); @Select("SELECT `domin_guid` FROM hydra_account_domain_node WHERE `name` = #{name} AND domin_guid = #{guid}") List getGuidsByNameID(@Param("name") String name, @Param("guid") GUID guid ); @Select("SELECT `id`, `domain_name` AS domainName, `domin_guid` AS guid, `name` FROM `hydra_account_domain_node`") List queryAllDomain(); @Select("SELECT `name` AS domainName FROM `hydra_account_domain_node` WHERE `domin_guid` = #{domainGuid}") String queryDomainNameByGuid(GUID domainGuid); @Insert("UPDATE `hydra_account_domain_node` SET `domain_name` = #{domainName}, `name` = #{name} WHERE `domin_guid` = #{guid} ") void update(Domain domain); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/GroupNodeMapper.java ================================================ package com.pinecone.hydra.account.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.account.entity.GenericGroup; import com.pinecone.hydra.account.entity.Group; import com.pinecone.hydra.account.source.GroupNodeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import java.util.List; @IbatisDataAccessObject public interface GroupNodeMapper extends GroupNodeManipulator { @Insert("INSERT INTO `hydra_account_group_node` (`default_privilege_policy_guid`, `guid`, `name`) VALUES (#{defaultPrivilegePolicyGuid},#{guid},#{name})") void insert(Group group); @Delete("DELETE FROM `hydra_account_group_node` WHERE `guid` = #{groupGuid}") void remove(GUID groupGuid); @Select("SELECT `id`, `default_privilege_policy_guid`, `guid`, `name` FROM `hydra_account_group_node` WHERE `guid` = #{groupGuid}") GenericGroup queryGroup(GUID groupGuid ); @Select("SELECT `guid` FROM hydra_account_group_node WHERE `name` = #{name}") List getGuidsByName(String name ); @Select("SELECT `guid` FROM hydra_account_group_node WHERE `name` = #{name} AND guid = #{guid}") List getGuidsByNameID(@Param("name") String name, @Param("guid") GUID guid ); @Insert("UPDATE `hydra_account_group_node` SET `default_privilege_policy_guid` = #{defaultPrivilegePolicyGuid}, `guid` = #{guid}, `name` = #{name} WHERE `guid` = #{guid}") void update(Group group); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/PrivilegeMapper.java ================================================ package com.pinecone.hydra.account.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.account.entity.GenericPrivilege; import com.pinecone.hydra.account.source.PrivilegeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Select; import java.util.List; @IbatisDataAccessObject public interface PrivilegeMapper extends PrivilegeManipulator { @Insert("INSERT INTO `hydra_account_privilege` (`guid`, `name`, `privilege_code`, `create_time`, `update_time`, `type`, `parent_priv_guid`,`token`) VALUES (#{guid}, #{name}, #{privilegeCode}, #{createTime}, #{updateTime}, #{type}, #{parentPrivGuid},#{token})") void insert(GenericPrivilege privilege); @Insert("UPDATE `hydra_account_privilege` SET `guid` = #{guid}, `name` = #{name}, `privilege_code` = #{privilegeCode}, `create_time` = #{createTime}, `update_time` = #{updateTime}, `type` = #{type}, `parent_priv_guid` = #{parentPrivGuid} WHERE `guid` = #{guid}") void update(GenericPrivilege privilege); @Delete("DELETE FROM `hydra_account_privilege` WHERE `guid` = #{privilegeGuid}") void remove(GUID privilegeGuid); @Select("SELECT * FROM `hydra_account_privilege`") List queryAllPrivileges(); @Select("SELECT id, guid, token, name, privilege_code AS 'privilegeCode', create_time AS 'createTime', update_time AS 'updateTime' ,type, parent_priv_guid AS 'parentPrivGuid' FROM `hydra_account_privilege` WHERE `guid` = #{guid}") GenericPrivilege queryPrivilege(GUID guid); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/RoleMapper.java ================================================ package com.pinecone.hydra.account.ibatis; import com.pinecone.hydra.account.entity.GenericRole; import com.pinecone.hydra.account.entity.Role; import com.pinecone.hydra.account.source.RoleManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Select; import java.util.List; @IbatisDataAccessObject public interface RoleMapper extends RoleManipulator { @Insert("INSERT INTO `hydra_account_role` (`id`, `name`, `privilege_guids`, `create_time`, `update_time`, `type`) VALUES (#{id}, #{name}, #{privilegeGuids}, #{createTime}, #{updateTime}, #{type})") void insert(Role role); @Delete("DELETE FROM `hydra_account_role` WHERE `id` = #{id}") void remove(int id); @Insert("UPDATE `hydra_account_role` SET `create_time` = #{createTime}, `privilege_guids` = #{privilegeGuids}, `update_time` = #{updateTime}, `type`= #{type} WHERE `name` = #{name}") void updateRole(GenericRole role); @Select("SELECT * FROM `hydra_account_role` WHERE `name` = #{name}") GenericRole queryRolesByUserGuid(String userGuid); @Select("SELECT id , name, privilege_guids AS 'privilegeGuids', create_time AS 'createTime', update_time AS 'updateTime', type FROM `hydra_account_role`") List queryAllRoles(); @Delete("DELETE FROM `hydra_account_role` WHERE `id` = #{id}") void removeRoleById(int id); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/UserNodeMapper.java ================================================ package com.pinecone.hydra.account.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.account.entity.Account; import com.pinecone.hydra.account.entity.GenericAccount; import com.pinecone.hydra.account.source.UserNodeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import java.util.List; @IbatisDataAccessObject public interface UserNodeMapper extends UserNodeManipulator { @Insert("INSERT INTO `hydra_account_user_node` (`guid`, `user_name`, `nick_name`, `kernel_credential`, `credential_guid`, `kernel_group_type`, `create_time`, `update_time`) VALUES (#{guid},#{name},#{nickName},#{kernelCredential},#{credentialGuid},#{kernelGroupType},#{createTime},#{updateTime})") void insert(Account account); @Delete("DELETE FROM `hydra_account_user_node` WHERE `guid` = #{userGuid}") void remove(GUID userGuid); @Select("SELECT `id`, `guid`, `user_name` AS name, `nick_name` AS nickName, `kernel_credential` AS kernelCredential, `credential_guid` AS credentialGuid, `kernel_group_type` AS kernelGroupType, `create_time` AS createTime, `update_time` AS updateTime FROM hydra_account_user_node WHERE `guid` = #{userGuid}") GenericAccount queryUser(GUID userGuid ); @Select("SELECT `guid` FROM hydra_account_user_node WHERE `user_name` = #{name}") List getGuidsByName(String name ); @Select("SELECT `guid` FROM hydra_account_user_node WHERE `user_name` = #{name} AND guid = #{guid}") List getGuidsByNameID(@Param("name") String name, @Param("guid") GUID guid ); @Select("SELECT `guid`, `user_name` AS name, `nick_name` AS nickName, `kernel_group_type` AS kernelGroupType, `create_time` AS createTime, `update_time` AS updateTime ,`role` AS role FROM hydra_account_user_node") List queryAllAccount(); @Select("SELECT `guid`, `user_name` AS name, `nick_name` AS nickName, `kernel_group_type` AS kernelGroupType, `create_time` AS createTime, `update_time` AS updateTime ,`role` AS role FROM hydra_account_user_node WHERE `user_name` = #{userName}") GenericAccount queryAccountByName(String userName); @Select("UPDATE `hydra_account_user_node` SET `user_name` = #{name}, `nick_name` = #{nickName}, `kernel_group_type` = #{kernelGroupType}, `update_time` = #{updateTime}, `role` = #{role} WHERE `guid` = #{guid}") void update(GenericAccount account); @Select("SELECT `guid`, `user_name` AS name, `nick_name` AS nickName, `kernel_group_type` AS kernelGroupType, `create_time` AS createTime, `update_time` AS updateTime ,`role`,`credential_guid` AS credentialGuid FROM hydra_account_user_node WHERE `guid` = #{userGuid}") GenericAccount queryAccountByUserGuid(GUID userGuid); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/UserOwnerMapper.java ================================================ package com.pinecone.hydra.account.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Update; @IbatisDataAccessObject public interface UserOwnerMapper extends TireOwnerManipulator { @Insert("INSERT INTO `hydra_account_node_tree` (`guid`) VALUES ( #{guid} )") void insertRootNode(@Param("guid") GUID guid ); @Insert( "INSERT INTO `hydra_account_node_tree` (`guid`, `parent_guid`) VALUES (#{targetGuid}, #{parentGuid})" ) void insert( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID ); @Update( "UPDATE `hydra_account_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid} WHERE `guid` = #{targetGuid}" ) void update( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID ); @Update( "UPDATE `hydra_account_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid} WHERE `guid` = #{targetGuid}" ) void updateParentGuid( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID ); @Delete( "DELETE FROM `hydra_account_node_tree` WHERE `guid`=#{subordinateGuid} " ) void remove( @Param("subordinateGuid") GUID subordinateGuid ); @Delete( "DELETE FROM `hydra_account_node_tree` WHERE `guid`=#{subordinateGuid} " ) void removeBySubordinate( GUID subordinateGuid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/UserPathCacheMapper.java ================================================ package com.pinecone.hydra.account.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; @IbatisDataAccessObject public interface UserPathCacheMapper extends TriePathCacheManipulator { @Insert("INSERT INTO `hydra_account_node_cache_path` (`path`, `guid`) VALUES ( #{path}, #{guid} )") void insert(@Param("guid") GUID guid, @Param("path") String path ); @Delete("DELETE FROM `hydra_account_node_cache_path` WHERE `guid`=#{guid}") void remove( GUID guid ); @Select("SELECT `path` FROM `hydra_account_node_cache_path` WHERE `guid`=#{guid}") String getPath( GUID guid ); @Select("SELECT `guid` FROM `hydra_account_node_cache_path` WHERE `guid`=#{guid}") GUID getNode( String path ); @Select("SELECT `guid` FROM `hydra_account_node_cache_path` WHERE `path`=#{path}") GUID queryGUIDByPath( String path ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/UserTreeMapper.java ================================================ package com.pinecone.hydra.account.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @IbatisDataAccessObject public interface UserTreeMapper extends TrieTreeManipulator { @Insert("INSERT INTO `hydra_account_node_tree` (`guid`) VALUES ( #{guid} )") void insertRootNode(@Param("guid") GUID guid); @Override default void insert (TireOwnerManipulator ownerManipulator, GUIDImperialTrieNode node ){ this.insertTreeNode( node.getGuid(), node.getType(), node.getAttributesGUID(), node.getNodeMetadataGUID() ); ownerManipulator.insertRootNode( node.getGuid() ); } @Insert("INSERT INTO `hydra_account_nodes` (`guid`, `type`,`base_data_guid`,`node_meta_guid`) VALUES (#{guid},#{type},#{baseDataGuid},#{nodeMetaGuid})") void insertTreeNode(@Param("guid") GUID guid, @Param("type") UOI type, @Param("baseDataGuid") GUID baseDataGuid, @Param("nodeMetaGuid") GUID nodeMetaGuid ); @Select("SELECT `id` AS `enumId`, `guid`, `type`, base_data_guid AS baseDataGUID, node_meta_guid AS nodeMetadataGUID FROM hydra_account_nodes WHERE guid=#{guid}") GUIDImperialTrieNode getNodeExtendsFromMeta(GUID guid ); @Select("SELECT COUNT( `id` ) FROM hydra_account_nodes WHERE guid=#{guid}") boolean contains( GUID key ); @Override default GUIDImperialTrieNode getNode(GUID guid ) { GUIDImperialTrieNode node = this.getNodeExtendsFromMeta( guid ); if( node == null ){ return node; } List parent = this.fetchParentGuids( guid ); node.setParentGUID( parent ); return node; } @Select("SELECT id, guid, parent_guid FROM hydra_account_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}") GUIDImperialTrieNode getTreeNodeOnly(@Param("guid") GUID guid, @Param("parentGuid") GUID parentGuid ); @Select("SELECT count( * ) FROM hydra_account_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}") long countNode( GUID guid, GUID parentGuid ); @Override default void purge( GUID guid ) { this.removeNodeMeta( guid ); this.removeTreeNode( guid ); } @Delete("DELETE FROM `hydra_account_nodes` WHERE `guid`=#{guid}") void removeNodeMeta( @Param("guid") GUID guid ); @Delete("DELETE FROM `hydra_account_node_tree` WHERE `guid` = #{guid}") void removeTreeNode( @Param("guid") GUID guid ); @Delete("DELETE FROM `hydra_account_node_tree` WHERE `parent_guid` = #{parent_guid}") void removeTreeNodeByParentGuid( @Param("parent_guid") GUID parentGuid ); @Delete("DELETE FROM `hydra_account_node_tree` WHERE `guid` = #{guid} AND `parent_guid` = #{parent_guid}") void removeTreeNodeYoke( @Param("guid") GUID guid, @Param("parent_guid") GUID parentGuid ); @Delete("DELETE FROM `hydra_account_node_tree` WHERE `guid`=#{chileGuid} AND `parent_guid`=#{parentGuid}") void removeInheritance( @Param("chileGuid") GUID childGuid, @Param("parentGuid") GUID parentGuid ); @Select("SELECT `id` AS `enumId`, `guid`, `parent_guid` AS parentGuid FROM `hydra_account_node_tree` WHERE `parent_guid`=#{guid}") List getChildren(GUID guid ); @Select("SELECT `guid` FROM `hydra_account_node_tree` WHERE `parent_guid` = #{parentGuid}") List fetchChildrenGuids( @Param("parentGuid") GUID parentGuid ); @Select("SELECT `parent_guid` FROM `hydra_account_node_tree` WHERE `guid`=#{guid}") List fetchParentGuids( GUID guid ); @Update("UPDATE `hydra_account_nodes` SET `type` = #{type} WHERE guid=#{guid}") void updateType( UOI type , GUID guid ); @Select( "SELECT guid FROM hydra_account_node_tree WHERE parent_guid IS NULL " ) List fetchRoot(); @Override @Select( "SELECT COUNT( `guid` ) FROM hydra_account_node_tree WHERE `parent_guid` IS NULL AND guid = #{guid}" ) boolean isRoot( GUID guid ); @Update("UPDATE hydra_account_node_tree SET parent_guid = #{parentGuid} WHERE guid = #{childGuid}") void addChild( @Param("childGuid") GUID childGuid, @Param("parentGuid") GUID parentGuid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/hydranium/UserMappingDriver.java ================================================ package com.pinecone.hydra.account.ibatis.hydranium; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver; import com.pinecone.hydra.system.component.ResourceDispenserCenter; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; public class UserMappingDriver extends ArchMappingDriver implements KOIMappingDriver { protected KOIMasterManipulator mKOIMasterManipulator; public UserMappingDriver( Processum superiorProcess ) { super(superiorProcess); } public UserMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) { super( superiorProcess, ibatisClient, dispenserCenter, UserMappingDriver.class.getPackageName().replace( "hydranium", "" ) ); this.mKOIMasterManipulator = new UserMasterManipulatorImpl( this ); } @Override public KOIMasterManipulator getMasterManipulator() { return this.mKOIMasterManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/hydranium/UserMasterManipulatorImpl.java ================================================ package com.pinecone.hydra.account.ibatis.hydranium; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.account.ibatis.AuthorizationMapper; import com.pinecone.hydra.account.ibatis.CredentialMapper; import com.pinecone.hydra.account.ibatis.PrivilegeMapper; import com.pinecone.hydra.account.ibatis.RoleMapper; import com.pinecone.hydra.account.source.AuthorizationManipulator; import com.pinecone.hydra.account.source.CredentialManipulator; import com.pinecone.hydra.account.source.PrivilegeManipulator; import com.pinecone.hydra.account.source.RoleManipulator; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; import com.pinecone.hydra.account.ibatis.DomainNodeMapper; import com.pinecone.hydra.account.ibatis.GroupNodeMapper; import com.pinecone.hydra.account.ibatis.UserNodeMapper; import com.pinecone.hydra.account.source.DomainNodeManipulator; import com.pinecone.hydra.account.source.GroupNodeManipulator; import com.pinecone.hydra.account.source.UserMasterManipulator; import com.pinecone.hydra.account.source.UserNodeManipulator; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class UserMasterManipulatorImpl implements UserMasterManipulator { @Resource @Structure( type = DomainNodeMapper.class ) protected DomainNodeManipulator domainNodeManipulator; @Resource @Structure( type = GroupNodeMapper.class ) protected GroupNodeManipulator groupNodeManipulator; @Resource @Structure( type = UserNodeMapper.class ) protected UserNodeManipulator userNodeManipulator; @Resource @Structure( type = AuthorizationMapper.class ) protected AuthorizationManipulator authorizationManipulator; @Resource @Structure( type = UserMasterTreeManipulatorImpl.class ) protected KOISkeletonMasterManipulator skeletonMasterManipulator; @Resource @Structure( type = CredentialMapper.class ) protected CredentialManipulator credentialManipulator; @Resource @Structure( type = PrivilegeMapper.class ) protected PrivilegeManipulator privilegeManipulator; @Resource @Structure( type = RoleMapper.class ) protected RoleManipulator roleManipulator; public UserMasterManipulatorImpl() { } public UserMasterManipulatorImpl( KOIMappingDriver driver ) { driver.autoConstruct( UserMasterManipulatorImpl.class, Map.of(), this ); this.skeletonMasterManipulator = new UserMasterTreeManipulatorImpl( driver ); } @Override public KOISkeletonMasterManipulator getSkeletonMasterManipulator() { return this.skeletonMasterManipulator; } @Override public DomainNodeManipulator getDomainNodeManipulator() { return this.domainNodeManipulator; } @Override public GroupNodeManipulator getGroupNodeManipulator() { return this.groupNodeManipulator; } @Override public UserNodeManipulator getUserNodeManipulator() { return this.userNodeManipulator; } @Override public CredentialManipulator getCredentialManipulator() { return this.credentialManipulator; } @Override public AuthorizationManipulator getAuthorizationManipulator() { return this.authorizationManipulator; } @Override public PrivilegeManipulator getPrivilegeManipulator() { return this.privilegeManipulator; } @Override public RoleManipulator getRoleManipulator() { return this.roleManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/hydranium/UserMasterTreeManipulatorImpl.java ================================================ package com.pinecone.hydra.account.ibatis.hydranium; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import com.pinecone.hydra.account.ibatis.UserOwnerMapper; import com.pinecone.hydra.account.ibatis.UserPathCacheMapper; import com.pinecone.hydra.account.ibatis.UserTreeMapper; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class UserMasterTreeManipulatorImpl implements TreeMasterManipulator { @Resource @Structure( type = UserOwnerMapper.class ) protected TireOwnerManipulator tireOwnerManipulator; @Resource @Structure( type = UserTreeMapper.class ) protected TrieTreeManipulator trieTreeManipulator; @Resource @Structure( type = UserPathCacheMapper.class ) protected TriePathCacheManipulator triePathCacheManipulator; public UserMasterTreeManipulatorImpl() { } public UserMasterTreeManipulatorImpl( KOIMappingDriver driver ) { driver.autoConstruct( UserMasterTreeManipulatorImpl.class, Map.of(), this ); } @Override public TireOwnerManipulator getTireOwnerManipulator() { return this.tireOwnerManipulator; } @Override public TrieTreeManipulator getTrieTreeManipulator() { return this.trieTreeManipulator; } @Override public TriePathCacheManipulator getTriePathCacheManipulator() { return this.triePathCacheManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/bucket/ibatis/BucketMapping.java ================================================ package com.pinecone.hydra.bucket.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.bucket.entity.Bucket; import com.pinecone.hydra.storage.bucket.source.BucketManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import java.util.List; @IbatisDataAccessObject public interface BucketMapping extends BucketManipulator { @Insert("INSERT INTO hydra_uos_bucket (`bucket_name`, `create_Time`, `bucket_guid`, `user_guid`, `mount_point_guid`) VALUES (#{bucketName},#{createTime},#{bucketGuid},#{userGuid},#{mountPoint})") void insert( Bucket bucket ); @Delete("DELETE FROM hydra_uos_bucket WHERE `bucket_guid` = #{bucketGuid}") void remove( GUID bucketGuid ); @Delete("DELETE FROM `hydra_uos_bucket` WHERE `user_guid` = #{accountGuid} AND `bucket_name` = #{bucketName}") void removeByAccountAndBucketName(@Param("accountGuid") GUID accountGuid, @Param("bucketName") String bucketName); @Select("SELECT `id`, `bucket_name` AS bucketName, `create_Time` AS createTime, `bucket_guid` AS bucketGuid, `user_guid` AS userGuid, `mount_point_guid` AS mountPoint FROM hydra_uos_bucket WHERE `bucket_guid` = #{bucketGuid}") Bucket queryBucketByBucketGuid( GUID bucketGuid ); @Select("SELECT `id`, `bucket_name` AS bucketName, `create_Time` AS createTime, `bucket_guid` AS bucketGuid, `user_guid` AS userGuid, `mount_point_guid` AS mountPoint FROM hydra_uos_bucket WHERE `user_guid` = #{userGuid} ") List queryBucketsByUserGuid(GUID userGuid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/bucket/ibatis/SiteMapping.java ================================================ package com.pinecone.hydra.bucket.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.bucket.entity.GenericSite; import com.pinecone.hydra.storage.bucket.entity.Site; import com.pinecone.hydra.storage.bucket.source.SiteManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Select; import java.util.ArrayList; import java.util.List; @IbatisDataAccessObject public interface SiteMapping extends SiteManipulator { @Insert("INSERT INTO `hydra_ucdn_sites` (`site_name`, `create_time`, `site_guid`, `mount_point_guid`) VALUES (#{siteName}, #{createTime}, #{siteGuid}, #{mountPointGuid})") void insert(Site site ); @Delete("DELETE FROM `hydra_ucdn_sites` WHERE `site_guid` = #{siteGuid}") void remove( GUID siteGuid ); @Delete("DELETE FROM `hydra_ucdn_sites` WHERE `site_name` = #{siteName}") void removeByName( String siteName ); @Select("SELECT `id`, `site_name` AS siteName, `create_time` AS createTime, `site_guid` AS siteGuid, `mount_point_guid` AS mountPointGuid FROM `hydra_ucdn_sites` WHERE `site_guid` = #{siteGuid}") GenericSite querySite(GUID siteGuid ); @Select("SELECT `id`, `site_name` AS siteName, `create_time` AS createTime, `site_guid` AS siteGuid, `mount_point_guid` AS mountPointGuid FROM `hydra_ucdn_sites` WHERE site_name = #{siteName}") GenericSite querySiteByName( String siteName ); default List listSite(){ List genericSites = this.listSite0(); return new ArrayList<>(genericSites); } @Select("SELECT `id`, `site_name` AS siteName, `create_time` AS createTime, `site_guid` AS siteGuid, `mount_point_guid` AS mountPointGuid FROM `hydra_ucdn_sites`") List listSite0(); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/bucket/ibatis/SiteNodeMapper.java ================================================ package com.pinecone.hydra.bucket.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.bucket.entity.GenericSiteNode; import com.pinecone.hydra.storage.bucket.entity.Site; import com.pinecone.hydra.storage.bucket.entity.SiteNode; import com.pinecone.hydra.storage.bucket.source.SiteNodeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.ArrayList; import java.util.List; @IbatisDataAccessObject public interface SiteNodeMapper extends SiteNodeManipulator { @Insert("INSERT INTO `hydra_ucdn_site_node` (`node_name`, `node_guid`, `state`, `is_enabled`, `related_service`, `site_guid`) VALUES (#{nodeName},#{nodeGuid},#{state},#{isEnabled},#{relatedService},#{siteGuid})") void insert(SiteNode siteNode ); @Delete("DELETE FROM `hydra_ucdn_site_node` WHERE `node_guid` = #{siteNodeGuid}") void remove( GUID siteNodeGuid ); @Select("SELECT `node_name` AS nodeName, `node_guid` AS nodeGuid, `state`, `is_enabled` AS isEnabled, `related_service` AS relatedService, `id`, site_guid AS siteGuid FROM hydra_ucdn_site_node WHERE node_guid = #{siteNodeGuid}") GenericSiteNode querySiteNode(GUID siteNodeGuid ); default List querySiteNodeBySiteGuid( GUID siteGuid ){ List genericSiteNodes = this.querySiteNodeBySiteGuid0(siteGuid); return new ArrayList<>(genericSiteNodes); } @Select("SELECT `node_name` AS nodeName, `node_guid` AS nodeGuid, `state`, `is_enabled` AS isEnabled, `related_service` AS relatedService, `id`, site_guid AS siteGuid FROM hydra_ucdn_site_node WHERE site_guid = #{siteGuid}") List querySiteNodeBySiteGuid0( GUID siteGuid ); @Update("UPDATE `hydra_ucdn_site_node` SET `node_name` = #{nodeName}, `state` = #{state}, `is_enabled` = #{isEnabled} WHERE `node_guid` = #{nodeGuid}") void update( SiteNode siteNode ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/bucket/ibatis/hydranium/BucketMappingDriver.java ================================================ package com.pinecone.hydra.bucket.ibatis.hydranium; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver; import com.pinecone.hydra.system.component.ResourceDispenserCenter; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; public class BucketMappingDriver extends ArchMappingDriver implements KOIMappingDriver { protected KOIMasterManipulator mKOIMasterManipulator; public BucketMappingDriver( Processum superiorProcess ) { super( superiorProcess ); } // Temp , TODO public BucketMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) { super( superiorProcess, ibatisClient, dispenserCenter, BucketMappingDriver.class.getPackageName().replace( "hydranium", "" ) ); this.mKOIMasterManipulator = new BucketMasterManipulatorImpl( this ); } @Override public KOIMasterManipulator getMasterManipulator() { return this.mKOIMasterManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/bucket/ibatis/hydranium/BucketMasterManipulatorImpl.java ================================================ package com.pinecone.hydra.bucket.ibatis.hydranium; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.bucket.ibatis.BucketMapping; import com.pinecone.hydra.bucket.ibatis.SiteMapping; import com.pinecone.hydra.bucket.ibatis.SiteNodeMapper; import com.pinecone.hydra.storage.bucket.source.BucketManipulator; import com.pinecone.hydra.storage.bucket.source.BucketMasterManipulator; import com.pinecone.hydra.storage.bucket.source.SiteManipulator; import com.pinecone.hydra.storage.bucket.source.SiteNodeManipulator; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class BucketMasterManipulatorImpl implements BucketMasterManipulator { @Resource @Structure( type = BucketMapping.class ) BucketManipulator bucketMapping; @Resource @Structure( type = SiteMapping.class ) SiteManipulator siteManipulator; @Resource @Structure( type = SiteNodeMapper.class ) SiteNodeManipulator siteNodeManipulator; public BucketMasterManipulatorImpl() { } public BucketMasterManipulatorImpl( KOIMappingDriver driver ) { driver.autoConstruct( BucketMasterManipulatorImpl.class, Map.of(), this ); } @Override public BucketManipulator getBucketManipulator() { return this.bucketMapping; } @Override public SiteManipulator getSiteManipulator() { return this.siteManipulator; } @Override public SiteNodeManipulator getSiteNodeManipulator() { return this.siteNodeManipulator; } @Override public KOISkeletonMasterManipulator getSkeletonMasterManipulator() { return null; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/ClusterNodeMapper.java ================================================ package com.pinecone.hydra.deploy.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.entity.ClusterElement; import com.pinecone.hydra.deploy.kom.entity.GenericClusterElement; import com.pinecone.hydra.deploy.kom.source.ClusterNodeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @Mapper @IbatisDataAccessObject public interface ClusterNodeMapper extends ClusterNodeManipulator { @Override @Insert("INSERT INTO `hydra_deploy_cluster_node` " + "(`guid`, `name`, `type`, `create_time`, `update_time`) " + "VALUES (#{guid}, #{name}, #{type}, #{createTime}, #{updateTime})") void insert( ClusterElement clusterElement); @Override @Delete("DELETE FROM `hydra_deploy_cluster_node` WHERE `guid` = #{guid}") void remove( @Param("guid") GUID guid ); @Select("SELECT `id` AS `enumId`, `guid`, `name`, `type`, " + "`create_time` AS `createTime`, `update_time` AS `updateTime` " + "FROM `hydra_deploy_cluster_node` WHERE `guid` = #{guid}") GenericClusterElement getAppElement(@Param("guid") GUID guid ); @Override default ClusterElement getClusterElement(GUID guid, DeployInstrument instrument ) { GenericClusterElement element = this.getAppElement( guid ); element.apply( instrument ); return element; } @Override @Update("UPDATE `hydra_deploy_cluster_node` SET " + "`name` = #{name}, " + "`type` = #{type}, " + "`create_time` = #{createTime}, " + "`update_time` = #{updateTime} " + "WHERE `guid` = #{guid}") void update( ClusterElement clusterElement); @Override @Select( "SELECT `guid` FROM `hydra_deploy_cluster_node` WHERE `name` = #{name}" ) List getGuidsByName( String name ); @Override @Select( "SELECT `guid` FROM `hydra_deploy_cluster_node` WHERE `name` = #{name} AND `guid` = #{guid}" ) List getGuidsByNameID( @Param("name") String name, @Param("guid") GUID guid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/ContainerElementMapper.java ================================================ package com.pinecone.hydra.deploy.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.entity.ContainerElement; import com.pinecone.hydra.deploy.kom.entity.GenericContainerElement; import com.pinecone.hydra.deploy.kom.entity.GenericQuickElement; import com.pinecone.hydra.deploy.kom.entity.QuickElement; import com.pinecone.hydra.deploy.kom.source.ContainerElementManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import java.util.List; @Mapper @IbatisDataAccessObject public interface ContainerElementMapper extends ContainerElementManipulator { @Override @Insert("INSERT INTO `hydra_deploy_container` (`guid`, `status`,`name`) VALUES (#{guid},#{status},#{name})") void insert( ContainerElement quickElement ); @Override @Insert("UPDATE `hydra_deploy_container` SET `status` = #{status} ,`name` = #{name} WHERE `guid` = #{guid}") void update( ContainerElement serviceElement ); @Override @Delete("DELETE FROM `hydra_deploy_container` WHERE `guid` = #{guid}") void remove( GUID guid ); @Select("SELECT `guid`, `status` AS status FROM `hydra_deploy_container` WHERE `guid` = #{guid}") GenericContainerElement getContainerElement0( GUID guid ); @Override default GenericContainerElement getContainerElement(GUID guid, DeployInstrument instrument ){ GenericContainerElement element = this.getContainerElement0( guid ); element.apply( instrument ); return element; } @Override @Select("SELECT `guid`, `status` AS status,`name` AS name FROM `hydra_deploy_container` WHERE `guid` = #{guid}") List getGuidsByName(String name ); @Override @Select("SELECT `guid`, `status` AS status,`name` AS name FROM `hydra_deploy_container` WHERE `guid` = #{guid} AND `name` = #{name}") List getGuidsByNameID( String name, GUID guid ); @Select( "SELECT `guid`, `status` AS status,`name` AS name FROM `hydra_deploy_container` WHERE `name` = #{name}") List fetchQuickElementByName(@Param("name") String name ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/DeployNamespaceMapper.java ================================================ package com.pinecone.hydra.deploy.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.kom.entity.GenericNamespace; import com.pinecone.hydra.deploy.kom.entity.Namespace; import com.pinecone.hydra.deploy.kom.source.DeployNamespaceManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @Mapper @IbatisDataAccessObject public interface DeployNamespaceMapper extends DeployNamespaceManipulator { @Override @Insert("INSERT INTO `hydra_deploy_namespace_node` (`guid`, `name`) VALUES (#{guid},#{name})") void insert( Namespace ns ); @Override @Delete("DELETE FROM `hydra_deploy_namespace_node` WHERE `guid`=#{guid}") void remove( @Param("guid") GUID GUID ); @Override @Select("SELECT `id` AS `enumId`, `guid`, `name` FROM `hydra_deploy_namespace_node` WHERE `guid`=#{guid}") GenericNamespace getNamespace( @Param("guid") GUID guid ); @Override @Update("UPDATE `hydra_deploy_namespace_node` SET `name` = #{name} WHERE `guid` = #{guid}") void update( Namespace ns ); @Select("SELECT `id` AS `enumId`, `guid`, `name` FROM `hydra_deploy_namespace_node` WHERE name=#{name}") List fetchNamespaceNodeByName0( @Param("name") String name ); @Override @SuppressWarnings( "unchecked" ) default List fetchNamespaceNodeByName( String name ){ return (List) this.fetchNamespaceNodeByName0( name ); } @Override @Select( "SELECT `guid` FROM `hydra_deploy_namespace_node` WHERE `name` = #{name}" ) List getGuidsByName(String name); @Override @Select( "SELECT `guid` FROM `hydra_deploy_namespace_node` WHERE `name` = #{name} AND `guid` = #{guid}" ) List getGuidsByNameID( @Param("name") String name, @Param("guid") GUID guid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/DeployNodeMapper.java ================================================ package com.pinecone.hydra.deploy.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.kom.entity.DeployElement; import com.pinecone.hydra.deploy.kom.source.DeployNodeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import java.util.List; @Mapper @IbatisDataAccessObject public interface DeployNodeMapper extends DeployNodeManipulator { @Override @Insert( "INSERT INTO `hydra_deploy_deploy_nodes` (`guid`,`enable`,`name`) VALUES (#{guid}, #{enable}, #{name})") void insert( DeployElement deployElement ); @Override @Delete("DELETE FROM `hydra_deploy_deploy_nodes` WHERE `guid`=#{guid}") void remove( GUID UUID ); @Override @Insert( "UPDATE `hydra_deploy_deploy_nodes` SET `enable`=#{enable}, `name`=#{name} WHERE `guid`=#{guid}") void update( DeployElement deployElement ); @Override @Select("SELECT `guid`, `enable` AS Enable FROM `hydra_deploy_deploy_nodes` WHERE `name`=#{name}") List fetchDeployNodeByName( @Param("name") String name ); @Select("SELECT `guid` FROM `hydra_deploy_deploy_nodes` WHERE `name`=#{name}") @Override List getGuidsByName( String name ); @Select("SELECT `guid` FROM `hydra_deploy_deploy_nodes` WHERE `name`=#{name} AND `guid`!=#{guid}") @Override List getGuidsByNameID( String name, GUID guid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/DeployNodeMetaMapper.java ================================================ package com.pinecone.hydra.deploy.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.kom.DeployFamilyNode; import com.pinecone.hydra.deploy.kom.entity.GenericCommonMeta; import com.pinecone.hydra.deploy.kom.entity.Namespace; import com.pinecone.hydra.deploy.kom.source.NodeMetaManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; @Mapper @IbatisDataAccessObject public interface DeployNodeMetaMapper extends NodeMetaManipulator { @Override @Insert( "INSERT INTO `hydra_deploy_node_meta` (`guid`,`description`,`extra_information`,`name`,`ip_address`) VALUES (#{guid}, #{description}, #{extraInformation},#{name},#{ipAddress})") void insert( DeployFamilyNode node ); @Override @Insert( "INSERT INTO `hydra_deploy_node_meta` (`guid`,`description`,`extra_information`,`name`,`ip_address`) VALUES (#{guid}, #{description}, #{extraInformation},#{name},#{ipAddress})") void insertNS( Namespace node ); @Override @Delete("DELETE FROM `hydra_deploy_node_meta` WHERE `guid`=#{guid}") void remove( GUID guid ); @Override @Select("SELECT `id` AS `enumId`, `guid`, `description` AS Description, `extra_information` AS ExtraInformation ,`ip_address` AS ipAddress FROM `hydra_deploy_node_meta` WHERE `guid` = #{guid}") GenericCommonMeta getNodeCommonMeta(@Param("guid") GUID guid ); @Override @Update( "UPDATE `hydra_deploy_node_meta` SET `description` = #{description} , `extra_information` = #{extraInformation},`ip_address` = #{ipAddress} WHERE guid = #{guid}") void update( DeployFamilyNode node ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/DeployNodeOwnerMapper.java ================================================ package com.pinecone.hydra.deploy.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.LinkedType; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @IbatisDataAccessObject public interface DeployNodeOwnerMapper extends TireOwnerManipulator { @Override @Insert("INSERT INTO `hydra_deploy_node_tree` (`guid`, `linked_type`) VALUES ( #{guid}, #{linkedType} )") void insertRootNode( @Param("guid") GUID guid, @Param("linkedType") LinkedType linkedType ); @Override @Insert( "INSERT INTO `hydra_deploy_node_tree` (`guid`, `parent_guid`,`linked_type`) VALUES (#{targetGuid}, #{parentGuid}, #{linkedType})" ) void insert( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID, @Param("linkedType") LinkedType linkedType ); @Override @Update( "UPDATE `hydra_deploy_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}" ) void update( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID, @Param("linkedType") LinkedType linkedType ); @Override @Update( "UPDATE `hydra_deploy_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid} WHERE `guid` = #{targetGuid}" ) void updateParentGuid( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID ); @Override @Update( "UPDATE `hydra_deploy_node_tree` SET `guid` = #{targetGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}" ) void updateLinkedType( @Param("targetGuid") GUID targetGuid, @Param("linkedType") LinkedType linkedType ); @Override @Delete( "DELETE FROM `hydra_deploy_node_tree` WHERE `guid`=#{subordinateGuid} AND `linked_type` = 'Owned'" ) void remove( @Param("subordinateGuid") GUID subordinateGuid, @Param("ownerGuid") GUID ownerGuid ); @Override @Delete( "DELETE FROM `hydra_deploy_node_tree` WHERE `guid`=#{subordinateGuid} AND `linked_type` = 'Owned'" ) void removeBySubordinate( GUID subordinateGuid ); // @Delete("DELETE FROM `hydra_registry_node_owner` WHERE `owner_guid`=#{ownerGuid}") // void removeByOwner(GUID ownerGuid); @Override @Select( "SELECT `parent_guid` FROM `hydra_deploy_node_tree` WHERE `guid`=#{subordinateGuid} AND linked_type = 'Owned'" ) GUID getOwner( GUID subordinateGuid ); @Override @Select( "SELECT guid FROM hydra_deploy_node_tree where parent_guid=#{guid} AND linked_type = 'Owned'" ) List getSubordinates( GUID guid ); @Update("UPDATE `hydra_deploy_node_tree` SET `linked_type` = '#{linkedType}' WHERE `guid` = #{sourceGuid} AND `parent_guid` = #{targetGuid}") void setLinkedType( @Param("sourceGuid") GUID sourceGuid, @Param("targetGuid") GUID targetGuid, @Param("linkedType") LinkedType linkedType ); @Select("SELECT `linked_type` FROM `hydra_deploy_node_tree` WHERE `guid` = #{childGuid} AND `parent_guid` =#{parentGuid}") LinkedType getLinkedType( @Param("childGuid") GUID childGuid,@Param("parentGuid") GUID parentGuid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/DeployNodePathCacheMapper.java ================================================ package com.pinecone.hydra.deploy.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; @IbatisDataAccessObject public interface DeployNodePathCacheMapper extends TriePathCacheManipulator { @Override @Insert("INSERT INTO `hydra_deploy_node_path`(`path`, `guid`) VALUES ( #{path}, #{guid} )") void insert(@Param("guid") GUID guid, @Param("path") String path ); @Override @Insert("INSERT INTO `hydra_deploy_node_path` (path, long_path, guid) VALUES ( #{path},#{longPath},#{guid} )") void insertLongPath( @Param("guid") GUID guid, @Param("path") String path, @Param("longPath") String longPath ); @Override @Delete("DELETE FROM `hydra_deploy_node_path` WHERE `guid`=#{guid}") void remove( GUID guid ); default String getPath( GUID guid ){ String longPath = this.getLongPath(guid); if ( longPath != null ){ return this.getPath0( guid )+this.getLongPath( guid ); } return this.getPath0( guid ); }; @Select("SELECT `long_path` FROM `hydra_deploy_node_path` WHERE `guid`=#{guid}") String getLongPath( GUID guid ); @Select("SELECT `path` FROM `hydra_deploy_node_path` WHERE `guid`=#{guid}") String getPath0( GUID guid ); @Select("SELECT `guid` FROM `hydra_deploy_node_path` WHERE `guid`=#{guid}") GUID getNode( String path ); @Select("SELECT `guid` FROM `hydra_deploy_node_path` WHERE `path`=#{path}") GUID queryGUIDByPath( String path ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/DeployServiceInsMappingMapper.java ================================================ package com.pinecone.hydra.deploy.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.kom.entity.DeployInsMapping; import com.pinecone.hydra.deploy.kom.entity.GenericDeployInsMapping; import com.pinecone.hydra.deploy.kom.source.DeployServiceInsMappingManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; @Mapper @IbatisDataAccessObject public interface DeployServiceInsMappingMapper extends DeployServiceInsMappingManipulator { @Override @Insert("INSERT INTO `hydra_deploy_service_ins_mapping` (`deploy_guid`, `service_ins_guid`) VALUES (#{deployGuid}, #{serviceInsGuid})") void insert(@Param("deployInsMapping") DeployInsMapping deployInsMapping); @Override @Select("SELECT `id`, `deploy_guid`, `service_ins_guid`, `create_time`, `update_time` " + "FROM `hydra_deploy_service_ins_mapping` WHERE `service_ins_guid` = #{insGuid}") GenericDeployInsMapping queryDeployInsMappingByInsGuid(@Param("insGuid") GUID insGuid); @Override @Select("SELECT `id`, `deploy_guid`, `service_ins_guid`, `create_time`, `update_time` " + "FROM `hydra_deploy_service_ins_mapping` WHERE `deploy_guid` = #{deployGuid}") GenericDeployInsMapping queryDeployInsMappingByDeployGuid(GUID deployGuid); @Override @Delete("DELETE FROM `hydra_deploy_service_ins_mapping` WHERE `service_ins_guid` = #{insGuid}") void removeByInsGuid(GUID insGuid); @Override @Delete("DELETE FROM `hydra_deploy_service_ins_mapping` WHERE `deploy_guid` = #{deployGuid}") void removeByDeployGuid(GUID deployGuid); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/DeployTreeMapper.java ================================================ package com.pinecone.hydra.deploy.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.LinkedType; import com.pinecone.hydra.unit.imperium.entity.TreeReparseLinkNode; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @IbatisDataAccessObject public interface DeployTreeMapper extends TrieTreeManipulator { @Insert("INSERT INTO `hydra_deploy_node_tree` (`guid`, `linked_type`) VALUES ( #{guid}, #{linkedType} )") void insertRootNode( @Param("guid") GUID guid, @Param("linkedType") LinkedType linkedType ); @Override default void insert ( TireOwnerManipulator ownerManipulator, GUIDImperialTrieNode node ){ this.insertTreeNode( node.getGuid(), node.getType(), node.getAttributesGUID(), node.getNodeMetadataGUID() ); ownerManipulator.insertRootNode( node.getGuid() ); } @Insert("INSERT INTO hydra_deploy_nodes (`guid`, `type`,`base_data_guid`,`node_metadata_guid`) VALUES (#{guid},#{type},#{baseDataGuid},#{nodeMetaGuid})") void insertTreeNode( @Param("guid") GUID guid, @Param("type") UOI type, @Param("baseDataGuid") GUID baseDataGuid, @Param("nodeMetaGuid") GUID nodeMetaGuid ); @Select("SELECT `id` AS `enumId`, `guid`, `type`, base_data_guid AS baseDataGUID, node_metadata_guid AS nodeMetadataGUID FROM hydra_deploy_nodes WHERE guid=#{guid}") GUIDImperialTrieNode getNodeExtendsFromMeta( GUID guid ); @Override default GUIDImperialTrieNode getNode( GUID guid ) { GUIDImperialTrieNode node = this.getNodeExtendsFromMeta( guid ); if ( node == null ) { return null; } List parent = this.fetchParentGuids( guid ); node.setParentGUID( parent ); return node; } @Select("SELECT COUNT( `id` ) FROM hydra_deploy_nodes WHERE guid=#{guid}") boolean contains( GUID key ); @Select("SELECT id, guid, parent_guid, linked_type FROM hydra_deploy_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}") GUIDImperialTrieNode getTreeNodeOnly(@Param("guid") GUID guid, @Param("parentGuid") GUID parentGuid ); @Select("SELECT count( * ) FROM hydra_deploy_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}") long countNode( GUID guid, GUID parentGuid ); @Override default void purge( GUID guid ) { this.removeNodeMeta( guid ); this.removeTreeNode( guid ); this.removeOwnedTreeNode( guid ); } @Delete("DELETE FROM `hydra_deploy_nodes` WHERE `guid`=#{guid}") void removeNodeMeta( @Param("guid") GUID guid ); @Delete("DELETE FROM `hydra_deploy_node_tree` WHERE `guid` = #{guid}") void removeTreeNode( @Param("guid") GUID guid ); @Delete("DELETE FROM `hydra_deploy_node_tree` WHERE `parent_guid` = #{parent_guid}") void removeTreeNodeByParentGuid( @Param("parent_guid") GUID parentGuid ); @Delete("DELETE FROM `hydra_deploy_node_tree` WHERE `guid` = #{guid} AND `parent_guid` = #{parent_guid}") void removeTreeNodeYoke( @Param("guid") GUID guid, @Param("parent_guid") GUID parentGuid ); @Delete("DELETE FROM `hydra_deploy_node_tree` WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}") void removeTreeNodeWithLinkedType( @Param("guid") GUID guid, @Param("linkedType") LinkedType linkedType ); @Delete("DELETE FROM `hydra_deploy_node_tree` WHERE `guid`=#{chileGuid} AND `parent_guid`=#{parentGuid}") void removeInheritance( @Param("chileGuid") GUID childGuid, @Param("parentGuid") GUID parentGuid ); @Select("SELECT `id` AS `enumId`, `guid`, `parent_guid` AS parentGuid FROM `hydra_deploy_node_tree` WHERE `parent_guid`=#{guid}") List getChildren(GUID guid ); @Select("SELECT `guid` FROM `hydra_deploy_node_tree` WHERE `parent_guid` = #{parentGuid}") List fetchChildrenGuids( @Param("parentGuid") GUID parentGuid ); @Select("SELECT `parent_guid` FROM `hydra_deploy_node_tree` WHERE `guid`=#{guid}") List fetchParentGuids( GUID guid ); @Update("UPDATE `hydra_deploy_nodes` SET `type` = #{type} WHERE guid=#{guid}") void updateType( UOI type , GUID guid ); @Select( "SELECT guid FROM hydra_deploy_node_tree WHERE parent_guid IS NULL " ) List fetchRoot(); @Override @Select( "SELECT COUNT( `guid` ) FROM hydra_deploy_node_tree WHERE `parent_guid` IS NULL AND guid = #{guid}" ) boolean isRoot( GUID guid ); @Override @Select( "SELECT COUNT( `guid` ) FROM hydra_deploy_node_tree WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}" ) long queryLinkedCount( @Param("guid") GUID guid, @Param("linkedType") LinkedType linkedType ); @Override @Select( "SELECT COUNT( `guid` ) FROM hydra_deploy_node_tree WHERE `guid` = #{guid}" ) long queryAllLinkedCount( @Param("guid") GUID guid ); @Override @Insert( "INSERT INTO `hydra_deploy_node_tree` (`guid`, `linked_type`,`tag_name`,`tag_guid`,`parent_guid`) " + "VALUES (#{originalGuid}, #{linkedType}, #{tagName}, #{tagGuid}, #{dirGuid})" ) void newLinkTag( @Param("originalGuid") GUID originalGuid, @Param("dirGuid") GUID dirGuid, @Param("tagName") String tagName, @Param("tagGuid") GUID tagGuid, @Param("linkedType") LinkedType linkedType ); @Override @Update( "UPDATE hydra_deploy_node_tree SET tag_name = #{tagName} WHERE tag_guid =#{tagGuid}" ) void updateLinkTagName( @Param("tagGuid") GUID tagGuid, @Param("tagName") String tagName ); @Override @Select( "SELECT `guid` FROM hydra_deploy_node_tree WHERE tag_name = #{tagName} AND parent_guid = #{dirGuid}" ) GUID getOriginalGuid( @Param("tagName") String tagName, @Param("dirGuid") GUID dirGuid ); @Override @Select( "SELECT `guid` FROM hydra_deploy_node_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}" ) GUID getOriginalGuidByNodeGuid( @Param("tagName") String tagName, @Param("nodeGuid") GUID nodeGUID ); @Override @Select( "SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_deploy_node_tree WHERE tag_name = #{tagName} AND parent_guid = #{parentDirGuid}" ) TreeReparseLinkNode getReparseLinkNode( @Param("tagName") String tagName, @Param("parentDirGuid") GUID parentDirGuid ); @Override @Select( "SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_deploy_node_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}" ) TreeReparseLinkNode getReparseLinkNodeByNodeGuid( @Param("tagName") String tagName, @Param("nodeGuid") GUID nodeGUID ); @Override @Select( "SELECT `guid` FROM hydra_deploy_node_tree WHERE `tag_name` = #{tagName}" ) List fetchOriginalGuid( String tagName ); @Override @Select( "SELECT `guid` FROM hydra_deploy_node_tree WHERE `tag_name` = #{tagName} AND `parent_guid` IS NULL" ) List fetchOriginalGuidRoot( String tagName ); @Override @Select( "SELECT COUNT(*) FROM `hydra_deploy_node_tree` WHERE `tag_guid` = #{guid}" ) boolean isTagGuid(GUID guid); @Override @Delete( "DELETE FROM `hydra_deploy_node_tree` WHERE `tag_guid` = #{guid}" ) void removeReparseLink( GUID guid ); @Override @Select( "SELECT `guid` FROM `hydra_deploy_node_tree` WHERE `tag_guid` = #{tagGuid}" ) GUID getOriginalGuidByTagGuid(GUID tagGuid); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/PhysicalHostMapper.java ================================================ package com.pinecone.hydra.deploy.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.entity.GenericPhysicalHost; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.entity.GenericPhysicalHostElement; import com.pinecone.hydra.deploy.kom.entity.GenericQuickElement; import com.pinecone.hydra.deploy.kom.entity.PhysicalHostElement; import com.pinecone.hydra.deploy.kom.source.PhysicalHostManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Select; import java.util.List; @Mapper @IbatisDataAccessObject public interface PhysicalHostMapper extends PhysicalHostManipulator { @Override @Insert("INSERT INTO `hydra_deploy_physical_host` (`guid`, `name`, `hardware_specs`, `status`) VALUES (#{guid},#{name},#{hardwareSpecs},#{status})") void insert( PhysicalHostElement physicalHostElement ); @Override @Insert("UPDATE `hydra_deploy_physical_host` SET `name` = #{name}, `hardware_specs` = #{hardwareSpecs}, `status` = #{status} WHERE `guid` = #{guid}") void update( PhysicalHostElement serviceElement ); @Override @Delete("DELETE FROM `hydra_deploy_physical_host` WHERE `guid` = #{guid}") void remove(GUID guid); @Select("SELECT `guid`, `name` as ipAddress, `hardware_specs` as hardwareSpecs, `status` FROM `hydra_deploy_physical_host` WHERE `guid` = #{guid}") GenericPhysicalHostElement getPhysicalHostElement0( GUID guid ); @Override default GenericPhysicalHostElement getPhysicalHostElement( GUID guid, DeployInstrument instrument ){ GenericPhysicalHostElement element = this.getPhysicalHostElement0( guid ); element.apply( instrument ); return element; } @Select("SELECT `guid` FROM `hydra_deploy_physical_host` WHERE `name`=#{name}") @Override List getGuidsByName(String name ); @Select("SELECT `guid` FROM `hydra_deploy_physical_host` WHERE `name`=#{name} AND `guid`!=#{guid}") @Override List getGuidsByNameID( String name, GUID guid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/QuickElementMapper.java ================================================ package com.pinecone.hydra.deploy.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.entity.GenericQuickElement; import com.pinecone.hydra.deploy.kom.entity.GenericVirtualMachineElement; import com.pinecone.hydra.deploy.kom.entity.QuickElement; import com.pinecone.hydra.deploy.kom.entity.VirtualMachineElement; import com.pinecone.hydra.deploy.kom.source.QuickElementManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import java.util.List; @Mapper @IbatisDataAccessObject public interface QuickElementMapper extends QuickElementManipulator { @Override @Insert("INSERT INTO `hydra_deploy_quick` (`guid`, `type_name`, `enable`,`name`) VALUES (#{guid},#{typeName},#{enable},#{name})") void insert( QuickElement quickElement ); @Override @Insert("UPDATE `hydra_deploy_quick` SET `enable` = #{enable} , `type_name` = #{typeName} ,`name` = #{name} WHERE `guid` = #{guid}") void update( QuickElement serviceElement ); @Override @Delete("DELETE FROM `hydra_deploy_quick` WHERE `guid` = #{guid}") void remove( GUID guid ); @Select("SELECT `guid`, `type_name` AS typeName, `enable` AS Enable FROM `hydra_deploy_quick` WHERE `guid` = #{guid}") GenericQuickElement getQuickElement0( GUID guid ); @Override default GenericQuickElement getQuickElement( GUID guid, DeployInstrument instrument ){ GenericQuickElement element = this.getQuickElement0( guid ); element.apply( instrument ); return element; } @Override @Select("SELECT `guid`, `type_name` AS typeName, `enable` AS enable ,`name` AS Name FROM `hydra_deploy_quick` WHERE `guid` = #{guid}") List getGuidsByName(String name ); @Override @Select("SELECT `guid`, `type_name` AS typeName, `enable` AS enable ,`name` AS Name FROM `hydra_deploy_quick` WHERE `guid` = #{guid} AND `name` = #{name}") List getGuidsByNameID( String name, GUID guid ); @Select( "SELECT `guid`, `type_name` AS typeName, `enable` AS enable ,`name` AS Name FROM `hydra_deploy_quick` WHERE `name` = #{name}") List fetchQuickElementByName(@Param("name") String name ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/VirtualMachineMapper.java ================================================ package com.pinecone.hydra.deploy.ibatis; import java.util.List; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.entity.GenericVirtualMachine; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.deploy.kom.entity.GenericVirtualMachineElement; import com.pinecone.hydra.deploy.kom.entity.VirtualMachineElement; import com.pinecone.hydra.deploy.kom.source.VirtualMachineManipulator; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.entity.GenericTaskElement; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; @Mapper @IbatisDataAccessObject public interface VirtualMachineMapper extends VirtualMachineManipulator { @Insert("INSERT INTO `hydra_deploy_virtual_machine` (`guid`, `name`, `status`,`affiliate_host_guid`) VALUES (#{guid},#{name},#{status},#{affiliateHostGuid})") void insert( VirtualMachineElement virtualMachineElement ); @Insert( "UPDATE `hydra_deploy_virtual_machine` SET `name` = #{name}, `status` = #{status}, `affiliate_host_guid` = #{affiliateHostGuid} WHERE `guid` = #{guid}") void update( VirtualMachineElement serviceElement ); @Delete("DELETE FROM `hydra_deploy_virtual_machine` WHERE `guid` = #{guid}") void remove( GUID guid ); @Select("SELECT `guid`, `name` , `status`, `affiliate_host_guid` FROM `hydra_deploy_virtual_machine` WHERE `guid` = #{guid}") GenericVirtualMachineElement getDeployNode0( GUID guid ); @Override default VirtualMachineElement getDeployNode( GUID guid, DeployInstrument instrument ){ GenericVirtualMachineElement element = this.getDeployNode0( guid ); element.apply( instrument ); return element; } @Override @Select( "SELECT `guid` FROM `hydra_deploy_virtual_machine` WHERE `name` = #{name}" ) List getGuidsByName( String name ); @Override @Select( "SELECT `guid` FROM `hydra_deploy_virtual_machine` WHERE `name` = #{name} AND `guid` = #{guid}" ) List getGuidsByNameID( @Param("name") String name, @Param("guid") GUID guid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/hydranium/DeployMappingDriver.java ================================================ package com.pinecone.hydra.deploy.ibatis.hydranium; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver; import com.pinecone.hydra.system.component.ResourceDispenserCenter; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; public class DeployMappingDriver extends ArchMappingDriver implements KOIMappingDriver { protected KOIMasterManipulator mKOIMasterManipulator; public DeployMappingDriver(Processum superiorProcess ) { super( superiorProcess ); } // Temp , TODO public DeployMappingDriver(Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) { super( superiorProcess, ibatisClient, dispenserCenter, DeployMappingDriver.class.getPackageName().replace( "hydranium", "" ) ); this.mKOIMasterManipulator = new DeployMasterManipulatorImpl( this ); } @Override public KOIMasterManipulator getMasterManipulator() { return this.mKOIMasterManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/hydranium/DeployMasterManipulatorImpl.java ================================================ package com.pinecone.hydra.deploy.ibatis.hydranium; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.deploy.ibatis.ContainerElementMapper; import com.pinecone.hydra.deploy.ibatis.DeployNamespaceMapper; import com.pinecone.hydra.deploy.ibatis.DeployNodeMapper; import com.pinecone.hydra.deploy.ibatis.DeployNodeMetaMapper; import com.pinecone.hydra.deploy.ibatis.DeployNodeOwnerMapper; import com.pinecone.hydra.deploy.ibatis.DeployServiceInsMappingMapper; import com.pinecone.hydra.deploy.ibatis.DeployTreeMapper; import com.pinecone.hydra.deploy.ibatis.ClusterNodeMapper; import com.pinecone.hydra.deploy.ibatis.PhysicalHostMapper; import com.pinecone.hydra.deploy.ibatis.QuickElementMapper; import com.pinecone.hydra.deploy.ibatis.VirtualMachineMapper; import com.pinecone.hydra.deploy.kom.source.ContainerElementManipulator; import com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator; import com.pinecone.hydra.deploy.kom.source.DeployNamespaceManipulator; import com.pinecone.hydra.deploy.kom.source.DeployNodeManipulator; import com.pinecone.hydra.deploy.kom.source.DeployServiceInsMappingManipulator; import com.pinecone.hydra.deploy.kom.source.PhysicalHostManipulator; import com.pinecone.hydra.deploy.kom.source.QuickElementManipulator; import com.pinecone.hydra.deploy.kom.source.VirtualMachineManipulator; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; import com.pinecone.hydra.deploy.kom.source.ClusterNodeManipulator; import com.pinecone.hydra.deploy.kom.source.NodeMetaManipulator; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class DeployMasterManipulatorImpl implements DeployMasterManipulator { @Resource @Structure( type = DeployNodeOwnerMapper.class ) TireOwnerManipulator tireOwnerManipulator; @Resource @Structure(type = DeployTreeMapper.class ) TrieTreeManipulator trieTreeManipulator; @Resource @Structure(type = ClusterNodeMapper.class ) ClusterNodeManipulator jobNodeManipulator; @Resource @Structure(type = DeployNodeMetaMapper.class ) NodeMetaManipulator nodeMetaManipulator; @Resource @Structure(type = DeployNodeMapper.class) DeployNodeManipulator deployNodeManipulator; @Resource @Structure( type = DeployNamespaceMapper.class ) DeployNamespaceManipulator deployNamespaceManipulator; @Resource @Structure( type = PhysicalHostMapper.class ) PhysicalHostManipulator physicalHostManipulator; @Resource @Structure( type = VirtualMachineMapper.class ) VirtualMachineManipulator virtualMachineManipulator; @Resource @Structure( type = QuickElementMapper.class ) QuickElementManipulator quickElementManipulator; @Resource @Structure( type = ContainerElementMapper.class ) ContainerElementManipulator containerElementManipulator; @Resource @Structure( type = DeployServiceInsMappingMapper.class ) DeployServiceInsMappingManipulator deployServiceInsMappingManipulator; @Resource( type = DeployMasterTreeManipulatorImpl.class ) KOISkeletonMasterManipulator skeletonMasterManipulator; public DeployMasterManipulatorImpl() { } public DeployMasterManipulatorImpl(KOIMappingDriver driver ) { driver.autoConstruct( DeployMasterManipulatorImpl.class, Map.of(), this ); this.skeletonMasterManipulator = new DeployMasterTreeManipulatorImpl( driver ); } @Override public KOISkeletonMasterManipulator getSkeletonMasterManipulator() { return this.skeletonMasterManipulator; } @Override public TrieTreeManipulator getTrieTreeManipulator() { return this.trieTreeManipulator; } @Override public NodeMetaManipulator getNodeMetaManipulator() { return this.nodeMetaManipulator; } @Override public ClusterNodeManipulator getJobNodeManipulator() { return this.jobNodeManipulator; } @Override public DeployNodeManipulator getDeployNodeManipulator() { return this.deployNodeManipulator; } @Override public DeployNamespaceManipulator getNamespaceManipulator() { return this.deployNamespaceManipulator; } @Override public TireOwnerManipulator getTireOwnerManipulator() { return this.tireOwnerManipulator; } @Override public PhysicalHostManipulator getPhysicalHostManipulator() { return this.physicalHostManipulator; } @Override public VirtualMachineManipulator getVirtualMachineManipulator() { return this.virtualMachineManipulator; } @Override public QuickElementManipulator getQuickElementManipulator() { return this.quickElementManipulator; } @Override public ContainerElementManipulator getContainerElementManipulator() { return this.containerElementManipulator; } @Override public DeployServiceInsMappingManipulator getDeployServiceInsMappingManipulator() { return this.deployServiceInsMappingManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/hydranium/DeployMasterTreeManipulatorImpl.java ================================================ package com.pinecone.hydra.deploy.ibatis.hydranium; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.deploy.ibatis.DeployNodeOwnerMapper; import com.pinecone.hydra.deploy.ibatis.DeployNodePathCacheMapper; import com.pinecone.hydra.deploy.ibatis.DeployTreeMapper; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class DeployMasterTreeManipulatorImpl implements TreeMasterManipulator { @Resource @Structure( type = DeployNodePathCacheMapper.class ) TriePathCacheManipulator triePathCacheManipulator; @Resource @Structure( type = DeployNodeOwnerMapper.class ) TireOwnerManipulator tireOwnerManipulator; @Resource @Structure( type = DeployTreeMapper.class ) TrieTreeManipulator trieTreeManipulator; public DeployMasterTreeManipulatorImpl() { } public DeployMasterTreeManipulatorImpl(KOIMappingDriver driver ) { driver.autoConstruct( DeployMasterTreeManipulatorImpl.class, Map.of(), this ); } @Override public TriePathCacheManipulator getTriePathCacheManipulator() { return this.triePathCacheManipulator; } @Override public TireOwnerManipulator getTireOwnerManipulator() { return this.tireOwnerManipulator; } @Override public TrieTreeManipulator getTrieTreeManipulator() { return this.trieTreeManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/entity/ibatis/GUID128TypeHandler.java ================================================ package com.pinecone.hydra.entity.ibatis; import com.pinecone.ulf.util.guid.i128.UUID128; import org.apache.ibatis.type.BaseTypeHandler; import org.apache.ibatis.type.JdbcType; import org.apache.ibatis.type.MappedJdbcTypes; import org.apache.ibatis.type.MappedTypes; import java.sql.CallableStatement; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; @MappedTypes(UUID128.class) @MappedJdbcTypes(JdbcType.VARCHAR) public class GUID128TypeHandler extends BaseTypeHandler { @Override public void setNonNullParameter(PreparedStatement ps, int i, UUID128 parameter, JdbcType jdbcType) throws SQLException { ps.setString(i, parameter.toString()); } @Override public UUID128 getNullableResult(ResultSet rs, String columnName) throws SQLException { String value = rs.getString(columnName); if (value == null) { return null; // 如果值为 null,则直接返回 null } return new UUID128( value ); } @Override public UUID128 getNullableResult(ResultSet rs, int columnIndex) throws SQLException { String value = rs.getString(columnIndex); if (value == null) { return null; // 如果值为 null,则直接返回 null } return new UUID128( value ); } @Override public UUID128 getNullableResult(CallableStatement cs, int columnIndex) throws SQLException { String value = cs.getString(columnIndex); if (value == null) { return null; // 如果值为 null,则直接返回 null } return new UUID128( value ); } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/entity/ibatis/GUID72TypeHandler.java ================================================ package com.pinecone.hydra.entity.ibatis; import com.pinecone.ulf.util.guid.i64.GUID72; import org.apache.ibatis.type.BaseTypeHandler; import org.apache.ibatis.type.JdbcType; import org.apache.ibatis.type.MappedJdbcTypes; import org.apache.ibatis.type.MappedTypes; import java.sql.CallableStatement; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; @MappedTypes(GUID72.class) @MappedJdbcTypes(JdbcType.VARCHAR) public class GUID72TypeHandler extends BaseTypeHandler { @Override public void setNonNullParameter(PreparedStatement ps, int i, GUID72 parameter, JdbcType jdbcType) throws SQLException { ps.setString(i, parameter.toString()); } @Override public GUID72 getNullableResult(ResultSet rs, String columnName) throws SQLException { String value = rs.getString(columnName); if (value == null) { return null; // 如果值为 null,则直接返回 null } return new GUID72( value ); } @Override public GUID72 getNullableResult(ResultSet rs, int columnIndex) throws SQLException { String value = rs.getString(columnIndex); if (value == null) { return null; // 如果值为 null,则直接返回 null } return new GUID72( value ); } @Override public GUID72 getNullableResult(CallableStatement cs, int columnIndex) throws SQLException { String value = cs.getString(columnIndex); if (value == null) { return null; // 如果值为 null,则直接返回 null } return new GUID72( value ); } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/entity/ibatis/GUIDTypeHandler.java ================================================ package com.pinecone.hydra.entity.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.UUID128; import org.apache.ibatis.type.BaseTypeHandler; import org.apache.ibatis.type.JdbcType; import org.apache.ibatis.type.MappedJdbcTypes; import org.apache.ibatis.type.MappedTypes; import java.sql.CallableStatement; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; @MappedTypes(GUID.class) @MappedJdbcTypes(JdbcType.VARCHAR) public class GUIDTypeHandler extends BaseTypeHandler { @Override public void setNonNullParameter(PreparedStatement ps, int i, GUID parameter, JdbcType jdbcType) throws SQLException { ps.setString(i, parameter.toString()); } @Override public GUID getNullableResult(ResultSet rs, String columnName) throws SQLException { String value = rs.getString(columnName); if (value == null) { return null; // 如果值为 null,则直接返回 null } return new UUID128( value ); } @Override public GUID getNullableResult(ResultSet rs, int columnIndex) throws SQLException { String value = rs.getString(columnIndex); if (value == null) { return null; // 如果值为 null,则直接返回 null } return new UUID128( value ); } @Override public GUID getNullableResult(CallableStatement cs, int columnIndex) throws SQLException { String value = cs.getString(columnIndex); if (value == null) { return null; // 如果值为 null,则直接返回 null } return new UUID128( value ); } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/entity/ibatis/UOITypeHandler.java ================================================ package com.pinecone.hydra.entity.ibatis; import java.sql.CallableStatement; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import org.apache.ibatis.type.BaseTypeHandler; import org.apache.ibatis.type.JdbcType; import org.apache.ibatis.type.MappedJdbcTypes; import org.apache.ibatis.type.MappedTypes; import com.pinecone.framework.util.uoi.UOI; @MappedTypes(UOI.class) @MappedJdbcTypes(JdbcType.VARCHAR) public class UOITypeHandler extends BaseTypeHandler { @Override public void setNonNullParameter( PreparedStatement ps, int i, UOI parameter, JdbcType jdbcType ) throws SQLException { ps.setString(i, parameter.toString()); } @Override public UOI getNullableResult( ResultSet rs, String columnName ) throws SQLException { String value = rs.getString(columnName); if (value == null) { return null; // 如果值为 null,则直接返回 null } return new UOI( value ); } @Override public UOI getNullableResult( ResultSet rs, int columnIndex ) throws SQLException { String value = rs.getString(columnIndex); if (value == null) { return null; // 如果值为 null,则直接返回 null } return new UOI( value ); } @Override public UOI getNullableResult(CallableStatement cs, int columnIndex) throws SQLException { String value = cs.getString(columnIndex); if (value == null) { return null; // 如果值为 null,则直接返回 null } return new UOI( value ); } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/entity/ibatis/URITypeHandler.java ================================================ package com.pinecone.hydra.entity.ibatis; import java.net.URI; import java.sql.CallableStatement; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import org.apache.ibatis.type.BaseTypeHandler; import org.apache.ibatis.type.JdbcType; import org.apache.ibatis.type.MappedJdbcTypes; import org.apache.ibatis.type.MappedTypes; @MappedTypes(URI.class) @MappedJdbcTypes(JdbcType.VARCHAR) public class URITypeHandler extends BaseTypeHandler { @Override public void setNonNullParameter(PreparedStatement ps, int i, URI parameter, JdbcType jdbcType) throws SQLException { ps.setString(i, parameter.toString()); } @Override public URI getNullableResult(ResultSet rs, String columnName) throws SQLException { String value = rs.getString(columnName); if (value == null) { return null; // 如果值为 null,则直接返回 null } return URI.create( value ); } @Override public URI getNullableResult(ResultSet rs, int columnIndex) throws SQLException { String value = rs.getString(columnIndex); if (value == null) { return null; // 如果值为 null,则直接返回 null } return URI.create( value ); } @Override public URI getNullableResult(CallableStatement cs, int columnIndex) throws SQLException { String value = cs.getString(columnIndex); if (value == null) { return null; // 如果值为 null,则直接返回 null } return URI.create( value ); } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/entity/ibatis/hydranium/ArchMappingDriver.java ================================================ package com.pinecone.hydra.entity.ibatis.hydranium; import java.util.List; import java.util.Map; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.system.construction.UnifyStructureInjector; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.system.homotype.StereotypicInjector; import com.pinecone.hydra.entity.ibatis.GUID128TypeHandler; import com.pinecone.hydra.entity.ibatis.GUID72TypeHandler; import com.pinecone.hydra.entity.ibatis.GUIDTypeHandler; import com.pinecone.hydra.entity.ibatis.UOITypeHandler; import com.pinecone.hydra.entity.ibatis.URITypeHandler; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.system.component.ResourceDispenserCenter; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.slime.jelly.source.ibatis.ProxySessionMapperPool; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; public abstract class ArchMappingDriver implements KOIMappingDriver { protected Hydrogen mSystem; protected Processum mSuperiorProcess; protected IbatisClient mIbatisClient; //protected SqlSession mSqlSession; protected List > mMapperCandidates; protected ResourceDispenserCenter mResourceDispenserCenter; public ArchMappingDriver( Processum superiorProcess ) { this.mSuperiorProcess = superiorProcess; if ( this.mSuperiorProcess instanceof Hydrogen) { this.mSystem = (Hydrogen) this.mSuperiorProcess; } else { this.mSystem = (Hydrogen) superiorProcess.parentSystem(); } } // Temp , TODO public ArchMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter, String szPackageName ) { this( superiorProcess ); this.mIbatisClient = ibatisClient; //this.mSqlSession = ibatisClient.openSession( true ); //SqlSessionTemplate ibatisClient.getConfiguration().getTypeHandlerRegistry().register( GUID72TypeHandler.class ); ibatisClient.getConfiguration().getTypeHandlerRegistry().register( GUID128TypeHandler.class ); ibatisClient.getConfiguration().getTypeHandlerRegistry().register( GUIDTypeHandler.class ); ibatisClient.getConfiguration().getTypeHandlerRegistry().register( UOITypeHandler.class ); ibatisClient.getConfiguration().getTypeHandlerRegistry().register( URITypeHandler.class ); ibatisClient.addXMLObjectScope( "mapper.kernel.task" ); this.mMapperCandidates = ibatisClient.addDataAccessObjectScope( szPackageName ); for( Class mapperClass : this.mMapperCandidates ) { dispenserCenter.getInstanceDispenser().register( mapperClass, //new SoloSessionMapperPool( this.mSqlSession, mapperClass ) new ProxySessionMapperPool( ibatisClient, mapperClass ) ); } this.mResourceDispenserCenter = dispenserCenter; } @Override public StereotypicInjector autoConstruct( Class stereotype, Map config, Object instance ) { UnifyStructureInjector injector = new UnifyStructureInjector( stereotype, this.mResourceDispenserCenter.getInstanceDispenser() ); try { injector.inject( config, instance ); } catch ( Exception e ){ throw new ProxyProvokeHandleException( e ); } return injector; } @Override public String getVersionSignature() { return "HydraniumV2.1"; } @Override public Hydrogen getSystem() { return this.mSystem; } @Override public Processum getSuperiorProcess() { return this.mSuperiorProcess; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/ExternalSymbolicMapper.java ================================================ package com.pinecone.hydra.file.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.ExternalSymbolic; import com.pinecone.hydra.storage.file.entity.GenericExternalSymbolic; import com.pinecone.hydra.storage.file.source.ExternalSymbolicManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Select; @IbatisDataAccessObject public interface ExternalSymbolicMapper extends ExternalSymbolicManipulator { @Insert("INSERT INTO hydra_uofs_directly_external_symbolic (`guid`, `create_time`, `update_time`, `name`, `reparsed_point`) VALUES (#{guid},#{createTime},#{updateTime},#{name},#{reparsedPoint})") void insert( ExternalSymbolic externalSymbolic ); @Delete("DELETE FROM hydra_uofs_directly_external_symbolic WHERE `guid` = #{guid}") void remove( GUID guid ); @Select("SELECT `id`, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`, `reparsed_point` AS reparsedPoint FROM hydra_uofs_directly_external_symbolic WHERE `guid` = #{guid}") GenericExternalSymbolic getSymbolicByGuid( GUID guid ); @Select("SELECT `id`, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`, `reparsed_point` AS reparsedPoint FROM hydra_uofs_directly_external_symbolic WHERE `guid` = #{guid} AND `name` = #{nodeName}") GenericExternalSymbolic getSymbolicByNameGuid(String nodeName, GUID guid ); @Select("SELECT COUNT(*) FROM hydra_uofs_directly_external_symbolic WHERE `guid` = #{guid} AND `name` = #{nodeName}") boolean isSymbolicMatchedByNameGuid(String nodeName, GUID guid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/FileMapper.java ================================================ package com.pinecone.hydra.file.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.file.entity.GenericFileNode; import com.pinecone.hydra.storage.file.source.FileManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @Mapper @IbatisDataAccessObject public interface FileMapper extends FileManipulator { FileNode getFileNode(GUID guid, ElementNode element); @Insert("INSERT INTO `hydra_uofs_files` (`guid`, `create_time`, `update_time`, `deleted_at`, `name`, `checksum`, `parity_check`, `physical_size`,`logic_size`,`definition_size`,`crc32_xor`,`integrity_check_enable`,`disable_cluster`) VALUES (#{guid},#{createTime},#{updateTime},#{deletedTime},#{name},#{checksum},#{parityCheck},#{physicalSize},#{logicSize},#{definitionSize},#{crc32Xor},#{integrityCheckEnable},#{disableCluster})") void insert( FileNode fileNode ); @Delete("DELETE FROM `hydra_uofs_files` WHERE `guid` = #{guid}") void remove( GUID guid ); @Select("SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `deleted_at` AS deletedTime, `name`, `checksum`, `parity_check` AS parityCheck, `physical_size` AS physicalSize,`logic_size` AS logicSize,`definition_size` AS definitionSize,`crc32_xor` AS crc32Xor,`integrity_check_enable` AS integrityCheckEnable,`disable_cluster` AS disableCluster FROM hydra_uofs_files WHERE `guid` = #{guid}") GenericFileNode getFileNodeByGuid(GUID guid); @Select("SELECT `guid` FROM `hydra_uofs_files` WHERE `name` = #{name}") List getGuidsByName(String name ); @Select("SELECT `guid` FROM `hydra_uofs_files` WHERE `name` = #{name} AND `guid` = #{guid}") List getGuidsByNameID(@Param("name") String name, @Param("guid") GUID guid ); @Select("SELECT `guid` FROM hydra_uofs_files ") List dumpGuid(); @Update("UPDATE hydra_uofs_files SET checksum = #{checksum}, parity_check = #{parityCheck}, physical_size = #{physicalSize}, logic_size = #{logicSize}, crc32_xor = #{crc32Xor}, definition_size = #{definitionSize} WHERE guid = #{guid}") void update( FileNode fileNode ); @Update("UPDATE hydra_uofs_files SET name = #{name} WHERE guid = #{guid}") void rename( GUID guid,String newName ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/FileMetaMapper.java ================================================ package com.pinecone.hydra.file.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.FileMeta; import com.pinecone.hydra.storage.file.entity.GenericFileMeta; import com.pinecone.hydra.storage.file.source.FileMetaManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Select; @Mapper @IbatisDataAccessObject public interface FileMetaMapper extends FileMetaManipulator { FileMeta getFileMeta(GUID guid, ElementNode element); @Insert("INSERT INTO `hydra_uofs_files_meta` (`guid`) VALUES (#{guid})") void insert( FileMeta fileMeta ); @Delete("DELETE FROM `hydra_uofs_files_meta` WHERE `guid` = #{guid}") void remove( GUID guid ); @Select("SELECT `id` AS emunId, `guid` FROM `hydra_uofs_files_meta` WHERE guid = #{guid}") GenericFileMeta getFileMetaByGuid(GUID guid); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/FileOwnerMapper.java ================================================ package com.pinecone.hydra.file.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.LinkedType; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @Mapper @IbatisDataAccessObject public interface FileOwnerMapper extends TireOwnerManipulator { @Insert("INSERT INTO `hydra_uofs_node_tree` (`guid`, `linked_type`) VALUES ( #{guid}, #{linkedType} )") void insertRootNode(@Param("guid") GUID guid, @Param("linkedType") LinkedType linkedType ); @Insert( "INSERT INTO `hydra_uofs_node_tree` (`guid`, `parent_guid`,`linked_type`) VALUES (#{targetGuid}, #{parentGuid}, #{linkedType})" ) void insert( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID, @Param("linkedType") LinkedType linkedType ); @Update( "UPDATE `hydra_uofs_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}" ) void update( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID, @Param("linkedType") LinkedType linkedType ); @Update( "UPDATE `hydra_uofs_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid} WHERE `guid` = #{targetGuid}" ) void updateParentGuid( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID ); @Update( "UPDATE `hydra_uofs_node_tree` SET `guid` = #{targetGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}" ) void updateLinkedType( @Param("targetGuid") GUID targetGuid, @Param("linkedType") LinkedType linkedType ); @Delete( "DELETE FROM `hydra_uofs_node_tree` WHERE `guid`=#{subordinateGuid} AND `linked_type` = 'Owned'" ) void remove( @Param("subordinateGuid") GUID subordinateGuid, @Param("ownerGuid") GUID ownerGuid ); @Delete( "DELETE FROM `hydra_uofs_node_tree` WHERE `guid`=#{subordinateGuid} AND `linked_type` = 'Owned'" ) void removeBySubordinate( GUID subordinateGuid ); // @Delete("DELETE FROM `hydra_registry_node_owner` WHERE `owner_guid`=#{ownerGuid}") // void removeByOwner(GUID ownerGuid); @Select( "SELECT `parent_guid` FROM `hydra_uofs_node_tree` WHERE `guid`=#{subordinateGuid} AND linked_type = 'Owned'" ) GUID getOwner( GUID subordinateGuid ); @Select( "SELECT guid FROM hydra_uofs_node_tree where parent_guid=#{guid} AND linked_type = 'Owned'" ) List getSubordinates( GUID guid ); @Update("UPDATE `hydra_uofs_node_tree` SET `linked_type` = '#{linkedType}' WHERE `guid` = #{sourceGuid} AND `parent_guid` = #{targetGuid}") void setLinkedType( @Param("sourceGuid") GUID sourceGuid, @Param("targetGuid") GUID targetGuid, @Param("linkedType") LinkedType linkedType ); @Select("SELECT `linked_type` FROM `hydra_uofs_node_tree` WHERE `guid` = #{childGuid} AND `parent_guid` =#{parentGuid}") LinkedType getLinkedType( @Param("childGuid") GUID childGuid,@Param("parentGuid") GUID parentGuid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/FilePathCacheMapper.java ================================================ package com.pinecone.hydra.file.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; @Mapper @IbatisDataAccessObject public interface FilePathCacheMapper extends TriePathCacheManipulator { @Insert("INSERT INTO `hydra_uofs_node_cache_path` (`path`, `guid`) VALUES ( #{path}, #{guid} )") void insert(@Param("guid") GUID guid, @Param("path") String path ); @Insert("INSERT INTO `hydra_uofs_node_cache_path` (path, long_path, guid) VALUES ( #{path},#{longPath},#{guid} )") void insertLongPath( @Param("guid") GUID guid, @Param("path") String path, @Param("longPath") String longPath ); @Delete("DELETE FROM `hydra_uofs_node_cache_path` WHERE `guid`=#{guid}") void remove( GUID guid ); default String getPath( GUID guid ){ String longPath = this.getLongPath(guid); if( longPath != null ){ return this.getPath0( guid )+longPath; } return this.getPath0( guid ); }; @Select("SELECT `long_path` FROM `hydra_uofs_node_cache_path` WHERE `guid`=#{guid}") String getLongPath( GUID guid ); @Select("SELECT `path` FROM `hydra_uofs_node_cache_path` WHERE `guid`=#{guid}") String getPath0( GUID guid ); @Select("SELECT `guid` FROM `hydra_uofs_node_cache_path` WHERE `guid`=#{guid}") GUID getNode( String path ); @Select("SELECT `guid` FROM `hydra_uofs_node_cache_path` WHERE `path`=#{path}") GUID queryGUIDByPath( String path ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/FileSystemAttributeMapper.java ================================================ package com.pinecone.hydra.file.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.FileSystemAttributes; import com.pinecone.hydra.storage.file.source.FileSystemAttributeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import java.util.List; import java.util.Map; @Mapper @IbatisDataAccessObject public interface FileSystemAttributeMapper extends FileSystemAttributeManipulator { @Insert( "INSERT INTO `hydra_registry_node_attributes` (`guid`, `key`, `value`) VALUES (#{guid}, #{key}, #{value})" ) void insertAttribute(GUID guid, String key, String value ); List> getAttributesByGuid(GUID guid ); void updateAttribute( GUID guid, String key, String value ); void remove( GUID guid ); default FileSystemAttributes getAttributes(GUID guid, ElementNode element ){ return null; }; default void insert( FileSystemAttributes attributes) { for ( Map.Entry entry : attributes.getAttributes().entrySet() ) { this.insertAttribute( attributes.getGuid(), entry.getKey(), entry.getValue() ); } } default void update( FileSystemAttributes attributes) { for ( Map.Entry entry : attributes.getAttributes().entrySet() ) { this.updateAttribute( attributes.getGuid(), entry.getKey(), entry.getValue() ); } } boolean containsKey ( GUID guid, String key ); void clearAttributes( GUID guid ); void removeAttributeWithValue( GUID guid, String key, String value ); void removeAttribute( GUID guid, String key ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/FileTreeMapper.java ================================================ package com.pinecone.hydra.file.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.LinkedType; import com.pinecone.hydra.unit.imperium.entity.TreeReparseLinkNode; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @Mapper @IbatisDataAccessObject public interface FileTreeMapper extends TrieTreeManipulator { @Insert("INSERT INTO `hydra_uofs_node_tree` (`guid`, `linked_type`) VALUES ( #{guid}, #{linkedType} )") void insertRootNode(@Param("guid") GUID guid, @Param("linkedType") LinkedType linkedType ); @Override default void insert ( TireOwnerManipulator ownerManipulator, GUIDImperialTrieNode node ){ this.insertTreeNode( node.getGuid(), node.getType(), node.getAttributesGUID(), node.getNodeMetadataGUID() ); ownerManipulator.insertRootNode( node.getGuid() ); } @Insert("INSERT INTO hydra_uofs_nodes (`guid`, `type`,`base_data_guid`,`node_meta_guid`) VALUES (#{guid},#{type},#{baseDataGuid},#{nodeMetaGuid})") void insertTreeNode( @Param("guid") GUID guid, @Param("type") UOI type, @Param("baseDataGuid") GUID baseDataGuid, @Param("nodeMetaGuid") GUID nodeMetaGuid ); @Select("SELECT `id` AS `enumId`, `guid`, `type`, base_data_guid AS baseDataGUID, node_meta_guid AS nodeMetadataGUID FROM hydra_uofs_nodes WHERE guid=#{guid}") GUIDImperialTrieNode getNodeExtendsFromMeta(GUID guid ); @Select("SELECT COUNT( `id` ) FROM hydra_uofs_nodes WHERE guid=#{guid}") boolean contains( GUID key ); @Override default GUIDImperialTrieNode getNode(GUID guid ) { GUIDImperialTrieNode node = this.getNodeExtendsFromMeta( guid ); List parent = this.fetchParentGuids( guid ); node.setParentGUID( parent ); return node; } @Select("SELECT id, guid, parent_guid, linked_type FROM hydra_uofs_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}") GUIDImperialTrieNode getTreeNodeOnly(@Param("guid") GUID guid, @Param("parentGuid") GUID parentGuid ); @Select("SELECT count( * ) FROM hydra_uofs_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}") long countNode( GUID guid, GUID parentGuid ); @Override default void purge( GUID guid ) { this.removeNodeMeta( guid ); this.removeTreeNode( guid ); this.removeOwnedTreeNode( guid ); } @Delete("DELETE FROM `hydra_uofs_nodes` WHERE `guid`=#{guid}") void removeNodeMeta( @Param("guid") GUID guid ); @Delete("DELETE FROM `hydra_uofs_node_tree` WHERE `guid` = #{guid}") void removeTreeNode( @Param("guid") GUID guid ); @Delete("DELETE FROM `hydra_uofs_node_tree` WHERE `parent_guid` = #{parent_guid}") void removeTreeNodeByParentGuid( @Param("parent_guid") GUID parentGuid ); @Delete("DELETE FROM `hydra_uofs_node_tree` WHERE `guid` = #{guid} AND `parent_guid` = #{parent_guid}") void removeTreeNodeYoke( @Param("guid") GUID guid, @Param("parent_guid") GUID parentGuid ); @Delete("DELETE FROM `hydra_uofs_node_tree` WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}") void removeTreeNodeWithLinkedType( @Param("guid") GUID guid, @Param("linkedType") LinkedType linkedType ); @Delete("DELETE FROM `hydra_uofs_node_tree` WHERE `guid`=#{chileGuid} AND `parent_guid`=#{parentGuid}") void removeInheritance( @Param("chileGuid") GUID childGuid, @Param("parentGuid") GUID parentGuid ); @Select("SELECT `id` AS `enumId`, `guid`, `parent_guid` AS parentGuid FROM `hydra_uofs_node_tree` WHERE `parent_guid`=#{guid}") List getChildren(GUID guid ); @Select("SELECT `guid` FROM `hydra_uofs_node_tree` WHERE `parent_guid` = #{parentGuid}") List fetchChildrenGuids( @Param("parentGuid") GUID parentGuid ); @Select("SELECT `parent_guid` FROM `hydra_uofs_node_tree` WHERE `guid`=#{guid}") List fetchParentGuids( GUID guid ); @Update("UPDATE `hydra_uofs_nodes` SET `type` = #{type} WHERE guid=#{guid}") void updateType( UOI type , GUID guid ); @Select( "SELECT guid FROM hydra_uofs_node_tree WHERE parent_guid IS NULL " ) List fetchRoot(); @Override @Select( "SELECT COUNT( `guid` ) FROM hydra_uofs_node_tree WHERE `parent_guid` IS NULL AND guid = #{guid}" ) boolean isRoot( GUID guid ); @Override @Select( "SELECT COUNT( `guid` ) FROM hydra_uofs_node_tree WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}" ) long queryLinkedCount( @Param("guid") GUID guid, @Param("linkedType") LinkedType linkedType ); @Override @Select( "SELECT COUNT( `guid` ) FROM hydra_uofs_node_tree WHERE `guid` = #{guid}" ) long queryAllLinkedCount( @Param("guid") GUID guid ); @Override @Insert( "INSERT INTO `hydra_uofs_node_tree` (`guid`, `linked_type`,`tag_name`,`tag_guid`,`parent_guid`) " + "VALUES (#{originalGuid}, #{linkedType}, #{tagName}, #{tagGuid}, #{dirGuid})" ) void newLinkTag( @Param("originalGuid") GUID originalGuid, @Param("dirGuid") GUID dirGuid, @Param("tagName") String tagName, @Param("tagGuid") GUID tagGuid, @Param("linkedType") LinkedType linkedType ); @Override @Update( "UPDATE hydra_uofs_node_tree SET tag_name = #{tagName} WHERE tag_guid =#{tagGuid}" ) void updateLinkTagName( @Param("tagGuid") GUID tagGuid, @Param("tagName") String tagName ); @Override @Select( "SELECT `guid` FROM hydra_uofs_node_tree WHERE tag_name = #{tagName} AND parent_guid = #{dirGuid}" ) GUID getOriginalGuid( @Param("tagName") String tagName, @Param("dirGuid") GUID dirGuid ); @Override @Select( "SELECT `guid` FROM hydra_uofs_node_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}" ) GUID getOriginalGuidByNodeGuid( @Param("tagName") String tagName, @Param("nodeGuid") GUID nodeGUID ); @Override @Select( "SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_uofs_node_tree WHERE tag_name = #{tagName} AND parent_guid = #{parentDirGuid}" ) TreeReparseLinkNode getReparseLinkNode(@Param("tagName") String tagName, @Param("parentDirGuid") GUID parentDirGuid ); @Override @Select( "SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_uofs_node_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}" ) TreeReparseLinkNode getReparseLinkNodeByNodeGuid( @Param("tagName") String tagName, @Param("nodeGuid") GUID nodeGUID ); @Override @Select( "SELECT `guid` FROM hydra_uofs_node_tree WHERE `tag_name` = #{tagName}" ) List fetchOriginalGuid( String tagName ); @Override @Select( "SELECT `guid` FROM hydra_uofs_node_tree WHERE `tag_name` = #{tagName} AND `parent_guid` IS NULL" ) List fetchOriginalGuidRoot( String tagName ); @Override @Select( "SELECT COUNT(*) FROM `hydra_uofs_node_tree` WHERE `tag_guid` = #{guid}" ) boolean isTagGuid(GUID guid); @Override @Delete( "DELETE FROM `hydra_uofs_node_tree` WHERE `tag_guid` = #{guid}" ) void removeReparseLink( GUID guid ); @Override @Select( "SELECT `guid` FROM `hydra_uofs_node_tree` WHERE `tag_guid` = #{tagGuid}" ) GUID getOriginalGuidByTagGuid(GUID tagGuid); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/FolderMapper.java ================================================ package com.pinecone.hydra.file.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.Folder; import com.pinecone.hydra.storage.file.entity.GenericFolder; import com.pinecone.hydra.storage.file.source.FolderManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @Mapper @IbatisDataAccessObject public interface FolderMapper extends FolderManipulator { Folder getFolder(GUID guid, ElementNode element); @Insert("INSERT INTO `hydra_uofs_folders` (`guid`, `create_time`, `update_time`, `name`) VALUES (#{guid},#{createTime},#{updateTime},#{name})") void insert( Folder folder ); @Delete("DELETE FROM `hydra_uofs_folders` WHERE `guid` = #{guid}") void remove( GUID guid ); @Update("UPDATE `hydra_uofs_folders` SET update_time = #{updateTime}, name = #{name} WHERE guid = #{guid}") void update( Folder folder ); @Select("SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name` FROM `hydra_uofs_folders` WHERE `guid` = #{guid}") GenericFolder getFolderByGuid(GUID guid); @Select("SELECT `guid` FROM `hydra_uofs_folders` WHERE `name` = #{name}") List getGuidsByName(String name ); @Select("SELECT `guid` FROM `hydra_uofs_folders` WHERE `name` = #{name} AND `guid` = #{guid}") List getGuidsByNameID(@Param("name") String name, @Param("guid") GUID guid ); @Select("SELECT `guid` FROM hydra_uofs_folders") List dumpGuid(); @Select("SELECT COUNT('id') FROM hydra_uofs_folders WHERE guid = #{guid}") boolean isFolder(GUID guid); @Update("UPDATE hydra_uofs_folders SET name = #{newName} WHERE guid = #{fileGuid}") void rename( GUID fileGuid, String newName ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/FolderMetaMapper.java ================================================ package com.pinecone.hydra.file.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.FolderMeta; import com.pinecone.hydra.storage.file.entity.GenericFolderMeta; import com.pinecone.hydra.storage.file.source.FolderMetaManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Select; @Mapper @IbatisDataAccessObject public interface FolderMetaMapper extends FolderMetaManipulator { FolderMeta getFolderMeta(GUID guid, ElementNode element); @Insert("INSERT INTO `hydra_uofs_folder_meta` (`guid`) VALUES (#{guid})") void insert( FolderMeta folderMeta ); @Delete("DELETE FROM `hydra_uofs_folder_meta` WHERE `guid` = #{guid}") void remove( GUID guid ); @Select("SELECT `id` AS enumId, `guid` FROM `hydra_uofs_folder_meta` WHERE `guid` = #{guid}") GenericFolderMeta getFolderMetaByGuid(GUID guid); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/FolderVolumeMappingMapper.java ================================================ package com.pinecone.hydra.file.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.source.FolderVolumeMappingManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; @Mapper @IbatisDataAccessObject public interface FolderVolumeMappingMapper extends FolderVolumeMappingManipulator { @Insert("INSERT INTO `hydra_uofs_file_volume_mapping` (`folder_guid`, `volume_guid`) VALUES (#{folderGuid}, #{volumeGuid})") void insert(@Param("folderGuid") GUID folderGuid, @Param("volumeGuid") GUID volumeGuid ); @Delete("DELETE FROM `hydra_uofs_file_volume_mapping` WHERE `volume_guid` = #{volumeGuid} AND `folder_guid` = #{folderGuid}") void remove( @Param("folderGuid") GUID folderGuid, @Param("volumeGuid") GUID volumeGuid ); @Select("SELECT `volume_guid` FROM `hydra_uofs_file_volume_mapping` WHERE `folder_guid` = #{folderGuid}") GUID getVolumeGuid( GUID folderGuid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/LocalClusterMapper.java ================================================ package com.pinecone.hydra.file.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.GenericLocalCluster; import com.pinecone.hydra.storage.file.entity.LocalCluster; import com.pinecone.hydra.storage.file.source.LocalClusterManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.ArrayList; import java.util.List; @Mapper @IbatisDataAccessObject public interface LocalClusterMapper extends LocalClusterManipulator { LocalCluster getLocalCluster(GUID guid, ElementNode element); @Insert("INSERT INTO `hydra_uofs_local_cluster_fat` (`file_guid`, `seg_guid`, `seg_id`, `create_time`, `update_time`, `source_name`, `crc32`, `size`) VALUES (#{fileGuid},#{segGuid},#{segId},#{createTime},#{updateTime},#{sourceName},#{crc32},#{size})") void insert( LocalCluster localCluster ); @Delete("DELETE FROM `hydra_uofs_local_cluster_fat` WHERE `seg_guid` = #{guid}") void remove( GUID guid ); @Delete("DELETE FROM `hydra_uofs_local_cluster_fat` WHERE `file_guid` = #{fileGuid}") void removeClustersByFile( GUID fileGuid ); default GenericLocalCluster getLocalClusterByGuid(GUID guid){ GenericLocalCluster localCluster = this.getLocalClusterByGuid0(guid); if ( localCluster == null ) return null; localCluster.setLocalClusterManipulator( this ); return localCluster; } @Select("SELECT `id` AS enumId, `file_guid` AS fileGuid, `seg_guid` AS segGuid, `seg_id` AS segId, `create_time` AS createTime, `update_time` AS updateTime, `source_name` AS sourceName, `crc32`, `size` FROM `hydra_uofs_local_cluster_fat` WHERE `seg_guid` = #{guid}") GenericLocalCluster getLocalClusterByGuid0(GUID guid); @Select("SELECT `id`, `file_guid` AS fileGuid, `seg_guid` AS segGuid, `seg_id` AS segId, `create_time` AS createTime, `update_time` AS updateTime, `source_name` AS sourceName, `crc32`, `size` FROM `hydra_uofs_local_cluster_fat` WHERE `file_guid` = #{guid}") List getLocalClusterByFileGuid0(GUID guid ); @Select("SELECT `id`, `file_guid` AS fileGuid, `seg_guid` AS segGuid, `seg_id` AS segId, `create_time` AS createTime, `update_time` AS updateTime, `source_name` AS sourceName, `crc32`, `size` FROM `hydra_uofs_local_cluster_fat` WHERE `file_guid` = #{fileGuid} AND `seg_id` = #{segId}") GenericLocalCluster getClusterByFileWithId0(GUID fileGuid, long segId ); @Update("UPDATE `hydra_uofs_local_cluster_fat` SET `size` = #{size} WHERE `file_guid` = #{fileGuid} AND `seg_id` = #{segId}") void update( LocalCluster localCluster ); @Delete("DELETE FROM `hydra_uofs_local_cluster_fat` WHERE file_guid = #{fileGuid} AND seg_id = #{segId}") void removeClusterByFileWithId( GUID fileGuid, long segId ); default GenericLocalCluster getClusterByFileWithId(GUID fileGuid, long segId ){ GenericLocalCluster frame = this.getClusterByFileWithId0(fileGuid, segId); if( frame == null ){ return null; } frame.setLocalClusterManipulator( this ); return frame; } default List getLocalClusterByFileGuid(GUID guid){ List localClusters = new ArrayList<>(); List frames = this.getLocalClusterByFileGuid0(guid); for (LocalCluster frame : frames){ frame.setLocalClusterManipulator(this); localClusters.add( frame ); } return localClusters; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/RemoteClusterMapper.java ================================================ package com.pinecone.hydra.file.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.GenericRemoteCluster; import com.pinecone.hydra.storage.file.entity.RemoteCluster; import com.pinecone.hydra.storage.file.source.RemoteClusterManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import java.util.ArrayList; import java.util.List; @Mapper @IbatisDataAccessObject public interface RemoteClusterMapper extends RemoteClusterManipulator { RemoteCluster getRemoteCluster(GUID guid, ElementNode element); @Insert("INSERT INTO `hydra_uofs_files_cluster_mapping` (`file_guid`, `seg_guid`, `device_guid`, `seg_id`, `crc32`, `size`) VALUES (#{fileGuid},#{segGuid},#{deviceGuid},#{segId},#{crc32},#{size})") void insert( RemoteCluster remoteCluster ); @Delete("DELETE FROM `hydra_uofs_files_cluster_mapping` WHERE `seg_guid` = #{guid}") void remove( GUID guid ); @Delete("DELETE FROM `hydra_uofs_files_cluster_mapping` WHERE file_guid = #{fileGuid}") void removeClustersByFile( GUID fileGuid ); @Select("SELECT `id` AS enumID, `file_guid` AS fileGuid, `seg_guid` AS segGuid, `device_guid` AS deviceGuid, `seg_id` AS segId, `crc32`, `size` FROM `hydra_uofs_files_cluster_mapping` WHERE `seg_guid` = #{guid}") RemoteCluster fetchRemoteClustersByFileGuid(GUID guid); @Select("SELECT `id`, `file_guid` AS fileGuid, `seg_guid` AS segGuid, `device_guid` AS deviceGuid, `seg_id` AS segId, `crc32`, `size` FROM `hydra_uofs_files_cluster_mapping` WHERE `file_guid` = #{guid}") List fetchRemoteClustersByFileGuid0( GUID guid ); @Override default List fetchRemoteClusterByFileGuid( GUID guid ){ List remoteClusters = new ArrayList<>(); List frames = this.fetchRemoteClustersByFileGuid0(guid); for (RemoteCluster frame : frames){ frame.setRemoteClusterManipulator(this); remoteClusters.add(frame); } return remoteClusters; }; @Select("SELECT `id`, `file_guid` AS fileGuid, `seg_guid` AS segGuid, `device_guid` AS deviceGuid, `seg_id` AS segId, `crc32`, `size` " + "FROM `hydra_uofs_files_cluster_mapping` " + "WHERE `file_guid` = #{guid} " + "ORDER BY `seg_id`, `id` ASC " + "LIMIT #{offset}, #{pageSize}") List fetchRemoteClusterByFileGuidPart0( @Param("guid") GUID guid, @Param("offset") long offset, @Param("pageSize") int pageSize); @Override default List fetchRemoteClusterByFileGuid( GUID guid, long offset, int pageSize ) { List remoteClusters = new ArrayList<>(); List frames = this.fetchRemoteClusterByFileGuidPart0( guid, offset, pageSize ); for ( RemoteCluster frame : frames ){ frame.setRemoteClusterManipulator( this ); remoteClusters.add(frame); } return remoteClusters; }; @Select("SELECT COUNT(*) FROM `hydra_uofs_files_cluster_mapping` WHERE `file_guid` = #{guid}") long countRemoteClustersByFileGuid( @Param("guid") GUID guid ); @Select("SELECT `id` AS emunId, `file_guid` AS fileGuid, `seg_guid` AS segGuid, `device_guid` AS deviceGuid, `seg_id` AS segId, `crc32`, `size` FROM `hydra_uofs_files_cluster_mapping` WHERE `file_guid` = #{guid} ORDER BY `seg_id` DESC LIMIT 1") RemoteCluster getLastCluster(GUID guid ); @Delete("DELETE FROM `hydra_uofs_files_cluster_mapping` WHERE `file_guid` = #{fileGuid} AND `seg_id` = #{segId}") void removeClusterByFileWithId(GUID fileGuid, long segId ); @Select("SELECT COUNT(*) FROM `hydra_uofs_files_cluster_mapping` WHERE file_guid = #{fileGuid}") long countFileClusters( @Param("fileGuid") GUID fileGuid ); default RemoteCluster getClusterByFileWithId( GUID fileGuid, long segId ){ GenericRemoteCluster cluster = this.getClusterByFileWithId0(fileGuid, segId); if( cluster == null ) { return null; } cluster.setRemoteClusterManipulator( this ); return cluster; } @Select("SELECT `id` AS emunId, `file_guid` AS fileGuid, `seg_guid` AS segGuid, `device_guid` AS deviceGuid, `seg_id` AS segId, `crc32`, `size` FROM `hydra_uofs_files_cluster_mapping` WHERE `file_guid` = #{fileGuid} AND `seg_id` = #{segId}") GenericRemoteCluster getClusterByFileWithId0( GUID fileGuid, long segId ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/SymbolicMapper.java ================================================ package com.pinecone.hydra.file.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.Symbolic; import com.pinecone.hydra.storage.file.source.SymbolicManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Mapper; @Mapper @IbatisDataAccessObject public interface SymbolicMapper extends SymbolicManipulator { //Symbolic getSymbolic(GUID guid, ElementNode element); void insert( Symbolic symbolic ); void remove( GUID guid ); Symbolic getSymbolicByGuid(GUID guid); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/SymbolicMetaMapper.java ================================================ package com.pinecone.hydra.file.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.entity.SymbolicMeta; import com.pinecone.hydra.storage.file.source.SymbolicMetaManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Mapper; @Mapper @IbatisDataAccessObject public interface SymbolicMetaMapper extends SymbolicMetaManipulator { //SymbolicMeta getSymbolicMeta(GUID guid, ElementNode element); void insert( SymbolicMeta symbolicMeta ); void remove( GUID guid ); SymbolicMeta getSymbolicMetaByGuid(GUID guid); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/hydranium/FileMappingDriver.java ================================================ package com.pinecone.hydra.file.ibatis.hydranium; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver; import com.pinecone.hydra.system.component.ResourceDispenserCenter; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; public class FileMappingDriver extends ArchMappingDriver implements KOIMappingDriver { protected KOIMasterManipulator mKOIMasterManipulator; public FileMappingDriver( Processum superiorProcess ) { super( superiorProcess ); } // Temp , TODO public FileMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) { super( superiorProcess, ibatisClient, dispenserCenter, FileMappingDriver.class.getPackageName().replace( "hydranium", "" ) ); this.mKOIMasterManipulator = new FileMasterManipulatorImpl( this ); } @Override public KOIMasterManipulator getMasterManipulator() { return this.mKOIMasterManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/hydranium/FileMasterManipulatorImpl.java ================================================ package com.pinecone.hydra.file.ibatis.hydranium; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.file.ibatis.ExternalSymbolicMapper; import com.pinecone.hydra.file.ibatis.FileMapper; import com.pinecone.hydra.file.ibatis.FileMetaMapper; import com.pinecone.hydra.file.ibatis.FileSystemAttributeMapper; import com.pinecone.hydra.file.ibatis.FolderMapper; import com.pinecone.hydra.file.ibatis.FolderMetaMapper; import com.pinecone.hydra.file.ibatis.FolderVolumeMappingMapper; import com.pinecone.hydra.file.ibatis.LocalClusterMapper; import com.pinecone.hydra.file.ibatis.RemoteClusterMapper; import com.pinecone.hydra.file.ibatis.SymbolicMapper; import com.pinecone.hydra.file.ibatis.SymbolicMetaMapper; import com.pinecone.hydra.storage.file.source.ExternalSymbolicManipulator; import com.pinecone.hydra.storage.file.source.FileManipulator; import com.pinecone.hydra.storage.file.source.FileMasterManipulator; import com.pinecone.hydra.storage.file.source.FileMetaManipulator; import com.pinecone.hydra.storage.file.source.FileSystemAttributeManipulator; import com.pinecone.hydra.storage.file.source.FolderManipulator; import com.pinecone.hydra.storage.file.source.FolderMetaManipulator; import com.pinecone.hydra.storage.file.source.FolderVolumeMappingManipulator; import com.pinecone.hydra.storage.file.source.LocalClusterManipulator; import com.pinecone.hydra.storage.file.source.RemoteClusterManipulator; import com.pinecone.hydra.storage.file.source.SymbolicManipulator; import com.pinecone.hydra.storage.file.source.SymbolicMetaManipulator; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class FileMasterManipulatorImpl implements FileMasterManipulator { @Resource @Structure( type = FileSystemAttributeMapper.class ) FileSystemAttributeManipulator fileSystemAttributeManipulator; @Resource @Structure( type = FileMapper.class ) FileManipulator fileManipulator; @Resource @Structure( type = FileMetaMapper.class ) FileMetaManipulator fileMetaManipulator; @Resource @Structure( type = FolderMapper.class ) FolderManipulator folderManipulator; @Resource @Structure( type = FolderMetaMapper.class ) FolderMetaManipulator folderMetaManipulator; @Resource @Structure( type = LocalClusterMapper.class ) LocalClusterManipulator localClusterManipulator; @Resource @Structure( type = RemoteClusterMapper.class ) RemoteClusterManipulator remoteClusterManipulator; @Resource @Structure( type = SymbolicMapper.class ) SymbolicManipulator symbolicManipulator; @Resource @Structure( type = SymbolicMetaMapper.class ) SymbolicMetaManipulator symbolicMetaManipulator; @Resource @Structure( type = ExternalSymbolicMapper.class ) ExternalSymbolicManipulator externalSymbolicManipulator; @Resource( type = FileMasterTreeManipulatorImpl.class ) KOISkeletonMasterManipulator skeletonMasterManipulator; @Structure( type = FolderVolumeMappingMapper.class) FolderVolumeMappingMapper folderVolumeRelationMapper; public FileMasterManipulatorImpl() { } public FileMasterManipulatorImpl( KOIMappingDriver driver ) { driver.autoConstruct( FileMasterManipulatorImpl.class, Map.of(), this ); this.skeletonMasterManipulator = new FileMasterTreeManipulatorImpl( driver ); } @Override public FileSystemAttributeManipulator getAttributeManipulator() { return this.fileSystemAttributeManipulator; } @Override public FileManipulator getFileManipulator() { return this.fileManipulator; } @Override public FileMetaManipulator getFileMetaManipulator() { return this.fileMetaManipulator; } @Override public FolderManipulator getFolderManipulator() { return this.folderManipulator; } @Override public FolderMetaManipulator getFolderMetaManipulator() { return this.folderMetaManipulator; } @Override public LocalClusterManipulator getLocalClusterManipulator() { return this.localClusterManipulator; } @Override public RemoteClusterManipulator getRemoteClusterManipulator() { return this.remoteClusterManipulator; } @Override public SymbolicManipulator getSymbolicManipulator() { return this.symbolicManipulator; } @Override public SymbolicMetaManipulator getSymbolicMetaManipulator() { return this.symbolicMetaManipulator; } @Override public ExternalSymbolicManipulator getExternalSymbolicManipulator() { return this.externalSymbolicManipulator; } @Override public FolderVolumeMappingManipulator getFolderVolumeRelationManipulator() { return this.folderVolumeRelationMapper; } @Override public KOISkeletonMasterManipulator getSkeletonMasterManipulator() { return this.skeletonMasterManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/hydranium/FileMasterTreeManipulatorImpl.java ================================================ package com.pinecone.hydra.file.ibatis.hydranium; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.file.ibatis.FileOwnerMapper; import com.pinecone.hydra.file.ibatis.FilePathCacheMapper; import com.pinecone.hydra.file.ibatis.FileTreeMapper; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class FileMasterTreeManipulatorImpl implements TreeMasterManipulator { @Resource @Structure( type = FilePathCacheMapper.class ) TriePathCacheManipulator triePathCacheManipulator; @Resource @Structure( type = FileOwnerMapper.class ) TireOwnerManipulator tireOwnerManipulator; @Resource @Structure( type = FileTreeMapper.class ) TrieTreeManipulator trieTreeManipulator; public FileMasterTreeManipulatorImpl() { } public FileMasterTreeManipulatorImpl( KOIMappingDriver driver ) { driver.autoConstruct( FileMasterTreeManipulatorImpl.class, Map.of(), this ); } @Override public TriePathCacheManipulator getTriePathCacheManipulator() { return this.triePathCacheManipulator; } @Override public TireOwnerManipulator getTireOwnerManipulator() { return this.tireOwnerManipulator; } @Override public TrieTreeManipulator getTrieTreeManipulator() { return this.trieTreeManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/layer/ibatis/LayerCachePathMapper.java ================================================ package com.pinecone.hydra.layer.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.vgraph.layer.source.LayerPathCacheManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; @IbatisDataAccessObject public interface LayerCachePathMapper extends LayerPathCacheManipulator { @Insert("INSERT INTO `hydra_atlas_layer_node_cache_path` (`path`, `guid`) VALUES ( #{path}, #{guid} )") void insert(@Param("guid") GUID guid, @Param("path") String path ); @Delete("DELETE FROM `hydra_atlas_layer_node_cache_path` WHERE `guid`=#{guid}") void remove( GUID guid ); @Select("SELECT `path` FROM `hydra_atlas_layer_node_cache_path` WHERE `guid`=#{guid}") String getPath( GUID guid ); @Select("SELECT `guid` FROM `hydra_atlas_layer_node_cache_path` WHERE `guid`=#{guid}") GUID getNode( String path ); @Select("SELECT `guid` FROM `hydra_atlas_layer_node_cache_path` WHERE `path`=#{path}") GUID queryGUIDByPath( String path ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/layer/ibatis/LayerHandleMapper.java ================================================ package com.pinecone.hydra.layer.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.vgraph.layer.source.LayerHandleManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import java.util.List; @IbatisDataAccessObject public interface LayerHandleMapper extends LayerHandleManipulator { @Override @Insert("INSERT INTO `hydra_atlas_layer_handle` (`layer_guid`, `handle_guid`, `type`) VALUES (#{layerGuid},#{handleGuid},'source')") void insertSourceNode(GUID layerGuid, GUID handleGuid); @Override @Insert("INSERT INTO `hydra_atlas_layer_handle` (`layer_guid`, `handle_guid`, `type`) VALUES (#{layerGuid},#{handleGuid},'sink')") void insertSinkNode(GUID layerGuid, GUID handleGuid); @Override @Insert({ "" }) void batchInsertSourceNodes(@Param("layerGuid") GUID layerGuid, @Param("handleGuids") List handleGuids); @Override @Insert({ "" }) void batchInsertSinkNodes(@Param("layerGuid") GUID layerGuid, @Param("handleGuids") List handleGuids); @Override @Select("SELECT `handle_guid` FROM `hydra_atlas_layer_handle` WHERE `layer_guid` = #{layerGuid} AND `type` = 'source'") List fetchSourceNodes( GUID layerGuid ); @Override @Select("SELECT `handle_guid` FROM `hydra_atlas_layer_handle` WHERE `layer_guid` = #{layerGuid} AND `type` = 'sink'") List fetchSinkNodes( GUID layerGuid ); @Override @Select("SELECT COUNT(id) FROM `hydra_atlas_layer_handle` WHERE `layer_guid` = #{layerGuid} AND `type` = 'source'") long countSourceNode(GUID layerGuid); @Override @Select("SELECT halh.handle_guid " + "FROM hydra_atlas_layer_handle halh " + "JOIN hydra_atlas_vgraph_task_mapping vatm ON halh.handle_guid = vatm.vgraph_node_guid " + "JOIN hydra_task_task_node httn ON vatm.task_guid = httn.guid " + "WHERE halh.layer_guid = #{layerGuid} " + "AND halh.type = 'source' " + "AND NOT EXISTS (" + " SELECT id FROM hydra_atlas_vgraph_adjacent hava WHERE hava.guid = halh.handle_guid" + ") " + "ORDER BY httn.priority " + "LIMIT #{limit} OFFSET #{offset}") List fetchSourceGuidsByTaskPriority(GUID layerGuid, long offset, long limit); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/layer/ibatis/LayerMapper.java ================================================ package com.pinecone.hydra.layer.ibatis; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.vgraph.layer.AtlasLayer; import com.pinecone.hydra.unit.vgraph.layer.Layer; import com.pinecone.hydra.unit.vgraph.layer.LayerGraphHandle; import com.pinecone.hydra.unit.vgraph.layer.source.LayerManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import com.pinecone.slime.meta.TableIndex64Meta; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import java.util.List; @IbatisDataAccessObject public interface LayerMapper extends LayerManipulator { @Override @Insert( "INSERT INTO `hydra_atlas_layer_layers` " + "(`layer_guid`, `layer_name`, `update_time`, `create_time`) " + "VALUES (#{mGuid}, #{parentGuid}, #{mszName}, #{mUpdateTime}, #{mCreateTime})" ) void insertLayer( LayerGraphHandle layer ); @Override @Insert( "" ) void batchInsertLayer( @Param( "list" ) List list ); @Override @Delete( "DELETE FROM `hydra_atlas_layer_layers` " + "WHERE `layer_guid` = #{guid}" ) void remove( GUID guid ); @Select( "SELECT " + "l.`id` AS id, " + "l.`layer_guid` AS guid, " + "t.`parent_guid` AS parentGuid, " + "l.`layer_name` AS name, " + "l.`update_time` AS updateTime, " + "l.`create_time` AS createTime " + "FROM `hydra_atlas_layer_layers` l " + "LEFT JOIN `hydra_atlas_layer_tree` t ON t.`guid` = l.`layer_guid` " + "WHERE l.`layer_guid` = #{guid}" ) AtlasLayer queryLayer0( GUID guid ); @Override @SuppressWarnings( "unchecked" ) default Layer queryLayer( GUID guid ) { return this.queryLayer0( guid ); } @Override @Select( "SELECT `layer_guid` AS mGuid " + "FROM `hydra_atlas_layer_layers` " + "WHERE `layer_name` = #{name}" ) List getGuidsByName( String name ); @Override @Select( "SELECT `layer_guid` AS mGuid " + "FROM `hydra_atlas_layer_layers` " + "WHERE `layer_name` = #{name} " + "AND `layer_guid` = #{guid}" ) List getGuidsByNameID( String name, GUID guid ); @Override @SuppressWarnings( "unchecked" ) default List fetchLayer( List guids ) { return ( List ) this.fetchLayer0( guids ); } @Select( "" ) List fetchLayer0( List guids ); @Select( "" ) List fetchLayerPage0( @Param( "offset" ) long offset, @Param( "limit" ) long limit, @Param( "anyNode" ) boolean anyNode, @Param( "parentGuid" ) @Nullable GUID parentGuid ); @Override @SuppressWarnings( "unchecked" ) default List fetchLayerPage( long offset, long limit, boolean anyNode, @Nullable GUID parentGuid ) { return ( List ) this.fetchLayerPage0( offset, limit, anyNode, parentGuid ); } @Select( "" ) List fetchLayerPageById0( @Param( "idStart" ) long idStart, @Param( "idEnd" ) long idEnd, @Param( "anyNode" ) boolean anyNode, @Param( "parentGuid" ) @Nullable GUID parentGuid ); @Override @SuppressWarnings( "unchecked" ) default List fetchLayerPageById( long idStart, long idEnd, boolean anyNode, @Nullable GUID parentGuid ) { return ( List ) this.fetchLayerPageById0( idStart, idEnd, anyNode, parentGuid ); } @Override @Select( "" ) TableIndex64Meta selectLayerIndexMeta( @Param( "anyNode" ) boolean anyNode, @Param( "parentGuid" ) @Nullable GUID parentGuid ); @Override @Select( "" ) long countLayer( @Param( "anyNode" ) boolean anyNode, @Param( "parentGuid" ) @Nullable GUID parentGuid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/layer/ibatis/LayerOwnerMapper.java ================================================ package com.pinecone.hydra.layer.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.LinkedType; import com.pinecone.hydra.unit.vgraph.layer.source.LayerOwnerManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @IbatisDataAccessObject public interface LayerOwnerMapper extends LayerOwnerManipulator { @Insert("INSERT INTO `hydra_atlas_layer_tree` (`guid`) VALUES ( #{guid} )") void insertRootNode(@Param("guid") GUID guid ); @Insert( "INSERT INTO `hydra_atlas_layer_tree` (`guid`, `parent_guid`,`linked_type`) VALUES (#{targetGuid}, #{parentGuid}, #{linkedType})" ) void insert( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID, @Param("linkedType") LinkedType linkedType ); @Update( "UPDATE `hydra_atlas_layer_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}" ) void update( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID, @Param("linkedType") LinkedType linkedType ); @Update( "UPDATE `hydra_atlas_layer_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid} WHERE `guid` = #{targetGuid}" ) void updateParentGuid( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID ); @Update( "UPDATE `hydra_atlas_layer_tree` SET `guid` = #{targetGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}" ) void updateLinkedType( @Param("targetGuid") GUID targetGuid, @Param("linkedType") LinkedType linkedType ); @Delete( "DELETE FROM `hydra_atlas_layer_tree` WHERE `guid`=#{subordinateGuid} AND `linked_type` = 'Owned'" ) void remove( @Param("subordinateGuid") GUID subordinateGuid, @Param("ownerGuid") GUID ownerGuid ); @Delete( "DELETE FROM `hydra_atlas_layer_tree` WHERE `guid`=#{subordinateGuid} AND `linked_type` = 'Owned'" ) void removeBySubordinate( GUID subordinateGuid ); // @Delete("DELETE FROM `hydra_registry_node_owner` WHERE `owner_guid`=#{ownerGuid}") // void removeByOwner(GUID ownerGuid); @Select( "SELECT `parent_guid` FROM `hydra_atlas_layer_tree` WHERE `guid`=#{subordinateGuid} AND linked_type = 'Owned'" ) GUID getOwner( GUID subordinateGuid ); @Select( "SELECT guid FROM hydra_atlas_layer_tree where parent_guid=#{guid} AND linked_type = 'Owned'" ) List getSubordinates(GUID guid ); @Update("UPDATE `hydra_atlas_layer_tree` SET `linked_type` = '#{linkedType}' WHERE `guid` = #{sourceGuid} AND `parent_guid` = #{targetGuid}") void setLinkedType( @Param("sourceGuid") GUID sourceGuid, @Param("targetGuid") GUID targetGuid, @Param("linkedType") LinkedType linkedType ); @Select("SELECT `linked_type` FROM `hydra_atlas_layer_tree` WHERE `guid` = #{childGuid} AND `parent_guid` =#{parentGuid}") LinkedType getLinkedType( @Param("childGuid") GUID childGuid,@Param("parentGuid") GUID parentGuid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/layer/ibatis/LayerTreeMapper.java ================================================ package com.pinecone.hydra.layer.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.LinkedType; import com.pinecone.hydra.unit.imperium.entity.TreeReparseLinkNode; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.vgraph.layer.source.LayerTreeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @IbatisDataAccessObject public interface LayerTreeMapper extends LayerTreeManipulator { @Insert("INSERT INTO hydra_atlas_layer_tree_nodes (`guid`) VALUES ( #{guid} )") void insertRootNode(@Param("guid") GUID guid); @Override default void insert ( TireOwnerManipulator ownerManipulator, GUIDImperialTrieNode node ){ this.insertTreeNode( node.getGuid(), node.getType(), node.getAttributesGUID(), node.getNodeMetadataGUID() ); ownerManipulator.insertRootNode( node.getGuid() ); } @Insert("INSERT INTO `hydra_atlas_layer_tree_nodes` (`guid`, `type`,`base_data_guid`,`node_meta_guid`) VALUES (#{guid},#{type},#{baseDataGuid},#{nodeMetaGuid})") void insertTreeNode( @Param("guid") GUID guid, @Param("type") UOI type, @Param("baseDataGuid") GUID baseDataGuid, @Param("nodeMetaGuid") GUID nodeMetaGuid ); @Select("SELECT `id` AS `enumId`, `guid`, `type`, base_data_guid AS baseDataGUID, node_meta_guid AS nodeMetadataGUID FROM hydra_atlas_layer_tree_nodes WHERE guid=#{guid}") GUIDImperialTrieNode getNodeExtendsFromMeta(GUID guid ); @Select("SELECT COUNT( `id` ) FROM hydra_atlas_layer_tree_nodes WHERE guid=#{guid}") boolean contains( GUID key ); @Override default GUIDImperialTrieNode getNode(GUID guid ) { GUIDImperialTrieNode node = this.getNodeExtendsFromMeta( guid ); if( node == null ){ return node; } List parent = this.fetchParentGuids( guid ); node.setParentGUID( parent ); return node; } @Select("SELECT id, guid, parent_guid FROM hydra_atlas_layer_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}") GUIDImperialTrieNode getTreeNodeOnly(@Param("guid") GUID guid, @Param("parentGuid") GUID parentGuid ); @Select("SELECT count( * ) FROM hydra_atlas_layer_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}") long countNode( GUID guid, GUID parentGuid ); @Override default void purge( GUID guid ) { this.removeNodeMeta( guid ); this.removeTreeNode( guid ); } @Delete("DELETE FROM `hydra_atlas_layer_tree_nodes` WHERE `guid`=#{guid}") void removeNodeMeta( @Param("guid") GUID guid ); @Delete("DELETE FROM `hydra_atlas_layer_tree` WHERE `guid` = #{guid}") void removeTreeNode( @Param("guid") GUID guid ); @Delete("DELETE FROM `hydra_atlas_layer_tree` WHERE `parent_guid` = #{parent_guid}") void removeTreeNodeByParentGuid( @Param("parent_guid") GUID parentGuid ); @Delete("DELETE FROM `hydra_atlas_layer_tree` WHERE `guid` = #{guid} AND `parent_guid` = #{parent_guid}") void removeTreeNodeYoke( @Param("guid") GUID guid, @Param("parent_guid") GUID parentGuid ); @Delete("DELETE FROM `hydra_atlas_layer_tree` WHERE `guid`=#{chileGuid} AND `parent_guid`=#{parentGuid}") void removeInheritance( @Param("chileGuid") GUID childGuid, @Param("parentGuid") GUID parentGuid ); @Select("SELECT `id` AS `enumId`, `guid`, `parent_guid` AS parentGuid FROM `hydra_atlas_layer_tree` WHERE `parent_guid`=#{guid}") List getChildren(GUID guid ); @Select("SELECT `guid` FROM `hydra_atlas_layer_tree` WHERE `parent_guid` = #{parentGuid}") List fetchChildrenGuids( @Param("parentGuid") GUID parentGuid ); @Select("SELECT `parent_guid` FROM `hydra_atlas_layer_tree` WHERE `guid`=#{guid}") List fetchParentGuids( GUID guid ); @Update("UPDATE `hydra_atlas_layer_tree_nodes` SET `type` = #{type} WHERE guid=#{guid}") void updateType( UOI type , GUID guid ); @Select( "SELECT guid FROM hydra_atlas_layer_tree WHERE parent_guid IS NULL " ) List fetchRoot(@Param("tableName") String tableName); @Override @Select( "SELECT COUNT( `guid` ) FROM hydra_atlas_layer_tree WHERE `parent_guid` IS NULL AND guid = #{guid}" ) boolean isRoot( GUID guid ); @Update("UPDATE hydra_atlas_layer_tree SET parent_guid = #{parentGuid} WHERE guid = #{childGuid}") void addChild( @Param("childGuid") GUID childGuid, @Param("parentGuid") GUID parentGuid ); @Override @Select( "SELECT COUNT( `guid` ) FROM hydra_atlas_layer_tree WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}" ) long queryLinkedCount( @Param("guid") GUID guid, @Param("linkedType") LinkedType linkedType ); @Override @Select( "SELECT COUNT( `guid` ) FROM hydra_atlas_layer_tree WHERE `guid` = #{guid}" ) long queryAllLinkedCount( @Param("guid") GUID guid ); @Override @Insert( "INSERT INTO `hydra_atlas_layer_tree` (`guid`, `linked_type`,`tag_name`,`tag_guid`,`parent_guid`) " + "VALUES (#{originalGuid}, #{linkedType}, #{tagName}, #{tagGuid}, #{dirGuid})" ) void newLinkTag( @Param("originalGuid") GUID originalGuid, @Param("dirGuid") GUID dirGuid, @Param("tagName") String tagName, @Param("tagGuid") GUID tagGuid, @Param("linkedType") LinkedType linkedType ); @Override @Update( "UPDATE hydra_atlas_layer_tree SET tag_name = #{tagName} WHERE tag_guid =#{tagGuid}" ) void updateLinkTagName( @Param("tagGuid") GUID tagGuid, @Param("tagName") String tagName ); @Override @Select( "SELECT `guid` FROM hydra_atlas_layer_tree WHERE tag_name = #{tagName} AND parent_guid = #{dirGuid}" ) GUID getOriginalGuid( @Param("tagName") String tagName, @Param("dirGuid") GUID dirGuid ); @Override @Select( "SELECT `guid` FROM hydra_atlas_layer_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}" ) GUID getOriginalGuidByNodeGuid( @Param("tagName") String tagName, @Param("nodeGuid") GUID nodeGUID ); @Override @Select( "SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_atlas_layer_tree WHERE tag_name = #{tagName} AND parent_guid = #{parentDirGuid}" ) TreeReparseLinkNode getReparseLinkNode(@Param("tagName") String tagName, @Param("parentDirGuid") GUID parentDirGuid ); @Override @Select( "SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_atlas_layer_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}" ) TreeReparseLinkNode getReparseLinkNodeByNodeGuid( @Param("tagName") String tagName, @Param("nodeGuid") GUID nodeGUID ); @Override @Select( "SELECT `guid` FROM hydra_atlas_layer_tree WHERE `tag_name` = #{tagName}" ) List fetchOriginalGuid( String tagName ); @Override @Select( "SELECT `guid` FROM hydra_atlas_layer_tree WHERE `tag_name` = #{tagName} AND `parent_guid` IS NULL" ) List fetchOriginalGuidRoot( String tagName ); @Override @Select( "SELECT COUNT(*) FROM `hydra_atlas_layer_tree` WHERE `tag_guid` = #{guid}" ) boolean isTagGuid(GUID guid); @Override @Delete( "DELETE FROM `hydra_atlas_layer_tree` WHERE `tag_guid` = #{guid}" ) void removeReparseLink( GUID guid ); @Override @Select( "SELECT `guid` FROM `hydra_atlas_layer_tree` WHERE `tag_guid` = #{tagGuid}" ) GUID getOriginalGuidByTagGuid(GUID tagGuid); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/layer/ibatis/NamespaceMapper.java ================================================ package com.pinecone.hydra.layer.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.vgraph.layer.LayerNamespace; import com.pinecone.hydra.unit.vgraph.layer.source.NamespaceManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import java.util.List; @IbatisDataAccessObject public interface NamespaceMapper extends NamespaceManipulator { @Insert("INSERT INTO `hydra_atlas_layer_namespace` (`guid`, `name`, `update_time`, `create_time`) VALUES (#{guid},#{name},#{updateTime},#{createTime})") void insert( LayerNamespace layerNamespace ); @Delete("DELETE FROM `hydra_atlas_layer_namespace` WHERE guid = #{guid}") void remove( GUID guid ); @Select("SELECT `id` AS enumId, `guid`, `name`, `update_time` AS updateTime, `create_time` AS createTime FROM `hydra_atlas_layer_namespace` WHERE `guid` = #{guid}") LayerNamespace query( GUID guid ); @Select( "SELECT `guid` FROM `hydra_atlas_layer_namespace` WHERE `name` = #{name}" ) List getGuidsByName(String name ); @Select( "SELECT `guid` FROM `hydra_atlas_layer_namespace` WHERE `name` = #{name} AND `guid` = #{guid}" ) List getGuidsByNameID(@Param("name") String name, @Param("guid") GUID guid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/layer/ibatis/hydranium/LayerMappingDriver.java ================================================ package com.pinecone.hydra.layer.ibatis.hydranium; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver; import com.pinecone.hydra.system.component.ResourceDispenserCenter; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; public class LayerMappingDriver extends ArchMappingDriver implements KOIMappingDriver { protected KOIMasterManipulator mKOIMasterManipulator; public LayerMappingDriver(Processum superiorProcess ) { super(superiorProcess); } public LayerMappingDriver(Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) { super( superiorProcess, ibatisClient, dispenserCenter, LayerMappingDriver.class.getPackageName().replace( "hydranium", "" ) ); this.mKOIMasterManipulator = new LayerMasterManipulatorImpl( this ); } @Override public KOIMasterManipulator getMasterManipulator() { return this.mKOIMasterManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/layer/ibatis/hydranium/LayerMasterManipulatorImpl.java ================================================ package com.pinecone.hydra.layer.ibatis.hydranium; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.layer.ibatis.LayerHandleMapper; import com.pinecone.hydra.layer.ibatis.LayerMapper; import com.pinecone.hydra.layer.ibatis.NamespaceMapper; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; import com.pinecone.hydra.unit.vgraph.layer.source.LayerHandleManipulator; import com.pinecone.hydra.unit.vgraph.layer.source.LayerManipulator; import com.pinecone.hydra.unit.vgraph.layer.source.LayerMasterManipulator; import com.pinecone.hydra.unit.vgraph.layer.source.NamespaceManipulator; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class LayerMasterManipulatorImpl implements LayerMasterManipulator { @Resource @Structure( type = LayerMasterTreeManipulatorImpl.class ) KOISkeletonMasterManipulator skeletonMasterManipulator; @Resource @Structure( type = NamespaceMapper.class ) NamespaceManipulator namespaceManipulator; @Resource @Structure( type = LayerMapper.class ) LayerManipulator layerManipulator; @Resource @Structure( type = LayerHandleMapper.class ) LayerHandleManipulator layerHandleManipulator; public LayerMasterManipulatorImpl() { } public LayerMasterManipulatorImpl(KOIMappingDriver driver ) { driver.autoConstruct( LayerMasterManipulatorImpl.class, Map.of(), this ); this.skeletonMasterManipulator = new LayerMasterTreeManipulatorImpl( driver ); } @Override public KOISkeletonMasterManipulator getSkeletonMasterManipulator() { return this.skeletonMasterManipulator; } @Override public LayerManipulator getLayerManipulator() { return this.layerManipulator; } @Override public NamespaceManipulator getNamespaceManipulator() { return this.namespaceManipulator; } @Override public LayerHandleManipulator getLayerHandleManipulator() { return this.layerHandleManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/layer/ibatis/hydranium/LayerMasterTreeManipulatorImpl.java ================================================ package com.pinecone.hydra.layer.ibatis.hydranium; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.layer.ibatis.LayerCachePathMapper; import com.pinecone.hydra.layer.ibatis.LayerOwnerMapper; import com.pinecone.hydra.layer.ibatis.LayerTreeMapper; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class LayerMasterTreeManipulatorImpl implements TreeMasterManipulator { @Resource @Structure( type = LayerTreeMapper.class ) TrieTreeManipulator trieTreeManipulator; @Resource @Structure( type = LayerCachePathMapper.class ) TriePathCacheManipulator triePathCacheManipulator; @Resource @Structure( type = LayerOwnerMapper.class ) TireOwnerManipulator tireOwnerManipulator; public LayerMasterTreeManipulatorImpl() { } public LayerMasterTreeManipulatorImpl(KOIMappingDriver driver ) { driver.autoConstruct( LayerMasterTreeManipulatorImpl.class, Map.of(), this ); } @Override public TireOwnerManipulator getTireOwnerManipulator() { return this.tireOwnerManipulator; } @Override public TrieTreeManipulator getTrieTreeManipulator() { return this.trieTreeManipulator; } @Override public TriePathCacheManipulator getTriePathCacheManipulator() { return this.triePathCacheManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/policy/ibatis/PolicyFileMappingMapper.java ================================================ package com.pinecone.hydra.policy.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.policy.source.PolicyFileMappingManipulator; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import java.util.List; public interface PolicyFileMappingMapper extends PolicyFileMappingManipulator { @Insert("INSERT INTO `hydra_uofs_file_policy_mapping` (`file_path`, `policy_guid`) VALUES (#{filePath}, #{policyGuid})") void insert(@Param("policyGuid" ) GUID policyGuid, @Param("filePath") String filePath); @Delete("DELETE FROM hydra_uofs_file_policy_mapping WHERE policy_guid = #{policyGuid} AND file_path = #{filePath}") void remove(@Param("policyGuid") GUID policyGuid, @Param("filePath") String filePath); @Select("SELECT policy_guid FROM hydra_uofs_file_policy_mapping WHERE file_path = #{filePath}") List queryPolicy(@Param("filePath") String filePath ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/policy/ibatis/PolicyMapper.java ================================================ package com.pinecone.hydra.policy.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.policy.entity.Policy; import com.pinecone.hydra.storage.policy.source.PolicyManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Select; @IbatisDataAccessObject public interface PolicyMapper extends PolicyManipulator { @Insert("INSERT INTO `hydra_uofs_policy` (`policy_name`, `policy_guid`, `policy_desc`) VALUES (#{policyName}, #{policyGuid}, #{policyDesc})") void insert(Policy policy); @Delete("DELETE FROM `hydra_uofs_policy` WHERE `policy_guid` = #{policyGuid}") void remove(GUID policyGuid); @Select("SELECT `id`, `policy_name` AS policyName, `policy_guid` AS policyGuid, `policy_desc` AS policyDesc FROM hydra_uofs_policy") Policy queryPolicy( GUID policyGuid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/policy/ibatis/hydranium/PolicyMappingDriver.java ================================================ package com.pinecone.hydra.policy.ibatis.hydranium; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver; import com.pinecone.hydra.system.component.ResourceDispenserCenter; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; public class PolicyMappingDriver extends ArchMappingDriver implements KOIMappingDriver { protected KOIMasterManipulator mKOIMasterManipulator; public PolicyMappingDriver( Processum superiorProcess ) { super( superiorProcess ); } // Temp , TODO public PolicyMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) { super( superiorProcess, ibatisClient, dispenserCenter, PolicyMappingDriver.class.getPackageName().replace( "hydranium", "" ) ); this.mKOIMasterManipulator = new PolicyMasterManipulatorImpl( this ); } @Override public KOIMasterManipulator getMasterManipulator() { return this.mKOIMasterManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/policy/ibatis/hydranium/PolicyMasterManipulatorImpl.java ================================================ package com.pinecone.hydra.policy.ibatis.hydranium; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.policy.ibatis.PolicyFileMappingMapper; import com.pinecone.hydra.policy.ibatis.PolicyMapper; import com.pinecone.hydra.storage.policy.source.PolicyFileMappingManipulator; import com.pinecone.hydra.storage.policy.source.PolicyManipulator; import com.pinecone.hydra.storage.policy.source.PolicyMasterManipulator; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; import javax.annotation.Resource; import java.util.Map; public class PolicyMasterManipulatorImpl implements PolicyMasterManipulator { @Resource @Structure( type = PolicyMapper.class ) PolicyManipulator policyMapping; @Resource @Structure( type = PolicyFileMappingMapper.class ) PolicyFileMappingMapper policyFileMappingMapper; public PolicyMasterManipulatorImpl() { } public PolicyMasterManipulatorImpl( KOIMappingDriver driver ) { driver.autoConstruct( PolicyMasterManipulatorImpl.class, Map.of(), this ); } @Override public PolicyManipulator getPolicyManipulator() { return this.policyMapping; } @Override public PolicyFileMappingManipulator getPolicyFileMappingManipulator() { return this.policyFileMappingMapper; } @Override public KOISkeletonMasterManipulator getSkeletonMasterManipulator() { return null; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/queue/ibatis/AtlasExecuteQueueMapper.java ================================================ package com.pinecone.hydra.queue.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.iqueue.DPQueueManipulator; import com.pinecone.hydra.unit.iqueue.QueueMeta; import com.pinecone.hydra.unit.iqueue.entity.GenericQueueElement; import com.pinecone.hydra.unit.iqueue.entity.QueueElement; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.ArrayList; import java.util.List; @Mapper @IbatisDataAccessObject public interface AtlasExecuteQueueMapper extends DPQueueManipulator { @Override @Insert({ "" }) void pushBack(@Param("element") QueueElement queueElement, @Param("field") String sharedSegmentField, @Param("segmentName") String sharedSegmentName, @Param("meta") QueueMeta meta); @Override @Insert({ "" }) void pushFront(@Param("element") QueueElement queueElement, @Param("field") String sharedSegmentField, @Param("segmentName") String sharedSegmentName, @Param("meta") QueueMeta meta); @Override @Update({ "" }) void incrementLinkedPriorities(@Param("element") QueueElement queueElement, @Param("field") String sharedSegmentField, @Param("segmentName") String sharedSegmentName, @Param("meta") QueueMeta meta); @Override @Select({ "" }) GenericQueueElement popFront ( @Param("currentPos") long currentPos, @Param("field") String sharedSegmentField, @Param("segmentName") String sharedSegmentName, @Param("meta") QueueMeta meta ); @Override default List batchPopFront(long currentPos, String sharedSegmentField, String sharedSegmentName, QueueMeta meta, long limit, long offset) { List queueElements = this.batchPopFront0(currentPos, sharedSegmentField, sharedSegmentName, meta, limit, offset); ArrayList arrayList = new ArrayList<>(queueElements); long i = 0; for( QueueElement element : arrayList ) { Long indexPriority = this.getIndexPriority(currentPos + i, sharedSegmentField, sharedSegmentName, meta); element.setIndexPriority( indexPriority ); i++; } return arrayList; } @Select({ "" }) List batchPopFront0( @Param("currentPos") long currentPos, @Param("sharedSegmentField") String sharedSegmentField, @Param("sharedSegmentName") String sharedSegmentName, @Param("meta") QueueMeta meta, @Param("limit") long limit, @Param("offset") long offset ); @Override default List batchPopBack(String sharedSegmentField, String sharedSegmentName, QueueMeta meta, long limit, long offset) { List queueElements = this.batchPopBack0(sharedSegmentField, sharedSegmentName, meta, limit, offset); return new ArrayList<>(queueElements); } @Select({ "" }) List batchPopBack0( @Param("sharedSegmentField") String sharedSegmentField, @Param("sharedSegmentName") String sharedSegmentName, @Param("meta") QueueMeta meta, @Param("limit") long limit, @Param("offset") long offset ); @Override @Select({ "" }) QueueElement popBack( @Param("field") String sharedSegmentField, @Param("segmentName") String sharedSegmentName, @Param("meta") QueueMeta meta ); @Override @Select({ "" }) long queryQueueSize( @Param("field") String sharedSegmentField, @Param("segmentName") String sharedSegmentName, @Param("meta") QueueMeta meta ); @Delete("DELETE FROM ${meta.QueueTable} WHERE id = #{currentPos} AND ${field} = ${segmentName}") QueueElement remove( @Param("currentPos") long currentPos, @Param("field") String sharedSegmentField, @Param("segmentName") String sharedSegmentName, @Param("meta") QueueMeta meta ); @Override @Select("SELECT `id` AS enumId, `object_guid` AS objectGuid, `priority`, " + "`linked_priority` AS linkedPriority, `bias` " + "FROM ${meta.QueueTable} WHERE id = #{id} AND ${field} = ${segmentName}") GenericQueueElement query( @Param("id") long enumId, @Param("field") String sharedSegmentField, @Param("segmentName") String sharedSegmentName, @Param("meta") QueueMeta meta ); @Override default List fetchElementByPriority(long priority, String sharedSegmentField, String sharedSegmentName, QueueMeta meta, long limit, long offset) { return new ArrayList<>(this.fetchElementByPriority0( priority, sharedSegmentField, sharedSegmentName,meta,limit,offset )); } @Select({ "" }) List fetchElementByPriority0( @Param("priority") long priority, @Param("sharedSegmentField") String sharedSegmentField, @Param("sharedSegmentName") String sharedSegmentName, @Param("meta") QueueMeta meta, @Param("limit") long limit, @Param("offset") long offset ); @Override default List fetchElement(String sharedSegmentField, String sharedSegmentName, QueueMeta meta, long limit,long offset) { List elements = this.fetchElement0(sharedSegmentField, sharedSegmentName, meta, limit, offset); return new ArrayList<>(elements); } @Select({ "" }) List fetchElement0(@Param("sharedSegmentField") String sharedSegmentField, @Param("sharedSegmentName") String sharedSegmentName, @Param("meta") QueueMeta meta, @Param("limit") long limit, @Param("offset") long offset); @Override @Select({ "" }) List fetchElementGuid(@Param("sharedSegmentField") String sharedSegmentField, @Param("sharedSegmentName") String sharedSegmentName, @Param("meta") QueueMeta meta, @Param("limit") long limit, @Param("offset") long offset); @Override @Select({ "" }) GenericQueueElement getByIndex( @Param("index") long index, @Param("field") String sharedSegmentField, @Param("segmentName") String sharedSegmentName, @Param("meta") QueueMeta meta ); @Override @Select({ "" }) Long nextPos(@Param("currentPos") long currentPos, @Param("sharedSegmentField") String sharedSegmentField, @Param("sharedSegmentName") String sharedSegmentName, @Param("meta") QueueMeta meta); @Override @Select({ "" }) Long getIndexPriority(@Param("currentPos") long currentPos, @Param("sharedSegmentField") String sharedSegmentField, @Param("sharedSegmentName") String sharedSegmentName, @Param("meta") QueueMeta meta); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/queue/ibatis/AtlasStratumQueueMapper.java ================================================ package com.pinecone.hydra.queue.ibatis; import com.pinecone.hydra.unit.iqueue.DPStratumQueueManipulator; import com.pinecone.hydra.unit.iqueue.QueueMeta; import com.pinecone.hydra.unit.iqueue.entity.GenericStratumQueueElement; import com.pinecone.hydra.unit.iqueue.entity.QueueElement; import com.pinecone.hydra.unit.iqueue.entity.QueueStratumElement; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; @Mapper @IbatisDataAccessObject public interface AtlasStratumQueueMapper extends DPStratumQueueManipulator { @Override @Insert({ "" }) void pushBack(@Param("element") QueueStratumElement queueElement, @Param("field") String sharedSegmentField, @Param("segmentName") String sharedSegmentName, @Param("meta") QueueMeta meta); @Override @Select({ "" }) GenericStratumQueueElement popFront( @Param("field") String sharedSegmentField, @Param("segmentName") String sharedSegmentName, @Param("meta") QueueMeta queueMeta ); @Override @Delete({ "" }) void removeFront(@Param("field") String sharedSegmentField, @Param("segmentName") String sharedSegmentName, @Param("meta") QueueMeta queueMeta); @Override @Select({ "" }) long isEmpty(@Param("field") String sharedSegmentField, @Param("segmentName") String sharedSegmentName, @Param("meta") QueueMeta queueMeta); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/queue/ibatis/QueueExistMapper.java ================================================ package com.pinecone.hydra.queue.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.iqueue.QueueExistManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; @Mapper @IbatisDataAccessObject public interface QueueExistMapper extends QueueExistManipulator { @Override @Insert("INSERT INTO `hydra_global_queue_exist` (`layer_guid`, `is_exist`) VALUES (#{layer_guid},1)") void insertQueueExist(GUID layerGuid ); @Override @Insert("INSERT INTO `hydra_global_queue_exist` (`layer_guid`, `is_exist`) VALUES (#{layer_guid},0)") void insertQueueNotExist(GUID layerGuid ); @Override @Update("UPDATE `hydra_global_queue_exist` SET `is_exist` = 1 WHERE `layer_guid` = #{layerGuid}") void setQueueExist(GUID layerGuid); @Override @Update("UPDATE `hydra_global_queue_exist` SET `is_exist` = 2 WHERE `layer_guid` = #{layerGuid}") void setQueueNotExist(GUID layerGuid); @Override default boolean isExist( GUID layerGuid ){ Integer isExist = this.isExist0(layerGuid); if( isExist == null ) { this.insertQueueNotExist( layerGuid ); return false; } return isExist == 1; } @Select("SELECT `is_exist` FROM `hydra_global_queue_exist` WHERE `layer_guid` = #{layerGuid}") Integer isExist0( GUID layerGuid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/queue/ibatis/hydranium/QueueMappingDriver.java ================================================ package com.pinecone.hydra.queue.ibatis.hydranium; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver; import com.pinecone.hydra.registry.ibatis.hydranium.RegistryMappingDriver; import com.pinecone.hydra.system.component.ResourceDispenserCenter; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; public class QueueMappingDriver extends ArchMappingDriver implements KOIMappingDriver { protected KOIMasterManipulator mKOIMasterManipulator; public QueueMappingDriver(Processum superiorProcess) { super(superiorProcess); } public QueueMappingDriver(Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) { super( superiorProcess, ibatisClient, dispenserCenter, QueueMappingDriver.class.getPackageName().replace( "hydranium", "" ) ); this.mKOIMasterManipulator = new QueueMasterManipulatorImpl( this ); } @Override public KOIMasterManipulator getMasterManipulator() { return this.mKOIMasterManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/queue/ibatis/hydranium/QueueMasterManipulatorImpl.java ================================================ package com.pinecone.hydra.queue.ibatis.hydranium; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.queue.ibatis.AtlasExecuteQueueMapper; import com.pinecone.hydra.queue.ibatis.AtlasStratumQueueMapper; import com.pinecone.hydra.queue.ibatis.QueueExistMapper; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; import com.pinecone.hydra.unit.iqueue.DPQueueManipulator; import com.pinecone.hydra.unit.iqueue.DPStratumQueueManipulator; import com.pinecone.hydra.unit.iqueue.QueueExistManipulator; import com.pinecone.hydra.unit.iqueue.QueueMasterManipulator; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class QueueMasterManipulatorImpl implements QueueMasterManipulator { @Resource @Structure( type = AtlasExecuteQueueMapper.class ) protected DPQueueManipulator mDPQueueManipulator; @Resource @Structure( type = AtlasStratumQueueMapper.class ) protected DPStratumQueueManipulator mDPStratumQueueManipulator; @Resource @Structure( type = QueueExistMapper.class ) protected QueueExistManipulator mQueueExistManipulator; @Override public KOISkeletonMasterManipulator getSkeletonMasterManipulator() { return null; } public QueueMasterManipulatorImpl() {} public QueueMasterManipulatorImpl( KOIMappingDriver driver ) { driver.autoConstruct( QueueMasterManipulatorImpl.class, Map.of(), this ); } @Override public DPQueueManipulator getDPQueueManipulator() { return this.mDPQueueManipulator; } @Override public DPStratumQueueManipulator getDPStratumQueueManipulator() { return this.mDPStratumQueueManipulator; } @Override public QueueExistManipulator getQueueExistManipulator() { return this.mQueueExistManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/RegistryAttributesMapper.java ================================================ package com.pinecone.hydra.registry.ibatis; import java.util.List; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.entity.ElementNode; import com.pinecone.hydra.registry.entity.GenericAttributes; import com.pinecone.hydra.registry.entity.Attributes; import com.pinecone.hydra.registry.source.RegistryAttributesManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; @Mapper @IbatisDataAccessObject public interface RegistryAttributesMapper extends RegistryAttributesManipulator { @Override @Delete( "DELETE FROM `hydra_registry_node_attributes` WHERE `guid`=#{guid}" ) void remove ( GUID guid ); @Override @Insert( "INSERT INTO `hydra_registry_node_attributes` (`guid`, `key`, `value`) VALUES (#{guid}, #{key}, #{value})" ) void insertAttribute( @Param("guid") GUID guid, @Param("key") String key, @Param("value") String value ); @Override @Select( "SELECT `id` AS `enumId`, `guid`, `key`, `value` FROM `hydra_registry_node_attributes` WHERE `guid`=#{guid}" ) List > getAttributesByGuid( GUID guid ); @Override @Update( "UPDATE `hydra_registry_node_attributes` SET `value`=#{value} WHERE `guid`=#{guid} AND `key`=#{key}") void updateAttribute( @Param("guid") GUID guid, @Param("key") String key, @Param("value") String value ); @Override default Attributes getAttributes( GUID guid, ElementNode element ) { List > raws = this.getAttributesByGuid( guid ); Attributes attributes = new GenericAttributes( guid, element, this ); if ( raws.isEmpty() ) { return attributes; } for ( Map raw : raws ) { attributes.setAttribute( (String) raw.get( "key" ), (String) raw.get( "value" ) ); } return attributes; } @Override @Select( "SELECT COUNT(*) FROM `hydra_registry_node_attributes` WHERE `guid` = #{guid} AND `key` = #{key}" ) boolean containsKey( @Param("guid") GUID guid, @Param("key") String key ); @Override @Delete( "DELETE FROM `hydra_registry_node_attributes` WHERE `guid` = #{guid}" ) void clearAttributes( @Param("guid") GUID guid ); @Override @Delete( "DELETE FROM `hydra_registry_node_attributes` WHERE `guid` = #{guid} AND `key` = #{key} AND `value` = #{value}" ) void removeAttributeWithValue( @Param("guid") GUID guid, @Param("key") String key, @Param("value") String value ); @Override @Delete( "DELETE FROM `hydra_registry_node_attributes` WHERE `guid` = #{guid} AND `key` = #{key}" ) void removeAttribute( @Param("guid") GUID guid, @Param("key") String key ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/RegistryConfigNodeMapper.java ================================================ package com.pinecone.hydra.registry.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.registry.entity.ConfigNode; import com.pinecone.hydra.registry.entity.GenericProperties; import com.pinecone.hydra.registry.entity.GenericTextFile; import com.pinecone.hydra.registry.source.RegistryConfigNodeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.time.LocalDateTime; import java.util.List; @Mapper @IbatisDataAccessObject public interface RegistryConfigNodeMapper extends RegistryConfigNodeManipulator { @Insert("INSERT INTO `hydra_registry_config_node` (`guid`, `data_affinity_guid`, `create_time`, `update_time`,`name`) VALUES (#{guid},#{dataAffinityGuid},#{createTime},#{updateTime},#{name})") void insert( ConfigNode configNode ); @Delete("DELETE FROM `hydra_registry_config_node` WHERE `guid`=#{guid}") void remove( @Param("guid") GUID guid ); @Override @Select( "SELECT COUNT(`id`) FROM `hydra_registry_config_node` WHERE guid = #{guid}" ) boolean isConfigNode( GUID guid ); @Select("SELECT `type` FROM `hydra_registry_nodes` WHERE `guid`=#{guid}") UOI getUOIByGUID( GUID guid ); @Select("SELECT `id` AS `enumId`, `guid`, `data_affinity_guid` AS dataAffinityGuid, `create_time` AS createTime, `update_time` updateTime, `name` FROM `hydra_registry_config_node` WHERE `guid` = #{guid}") GenericProperties getPropertiesNode( GUID guid ); @Select("SELECT `id` AS `enumId`, `guid`, `data_affinity_guid` AS dataAffinityGuid, `create_time` AS createTime, `update_time` updateTime, `name` FROM `hydra_registry_config_node` WHERE `guid`=#{guid}") GenericTextFile getTextConfigNode(GUID guid ); @Override default ConfigNode getConfigNode (GUID guid ) { String objectName = this.getUOIByGUID(guid).getObjectName(); if ( objectName.equals( GenericTextFile.class.getName()) ){ return this.getTextConfigNode(guid); } else if ( objectName.equals(GenericProperties.class.getName()) ){ return this.getPropertiesNode(guid); } return null; } @Override default void update( ConfigNode configNode ) { if (configNode.getUpdateTime() != null){ this.updateUpdateTime(configNode.getUpdateTime(),configNode.getGuid()); } if (configNode.getName() != null){ //updateName(configNode.getName(),configNode.getGuid()); } } @Override @Select( "SELECT `guid` FROM `hydra_registry_config_node` WHERE `name` = #{name}" ) List getGuidsByName( String name ); @Override @Select( "SELECT `guid` FROM `hydra_registry_config_node` WHERE `name` = #{name} AND `guid` = #{guid}" ) List getGuidsByNameID( @Param("name") String name, @Param("guid") GUID guid ); @Update( "UPDATE `hydra_registry_config_node` SET `update_time` = #{updateTime} WHERE `guid` = #{guid}" ) void updateUpdateTime(@Param("updateTime") LocalDateTime updateTime,@Param("guid") GUID guid); @Select( "SELECT `guid` FROM `hydra_registry_config_node`" ) List dumpGuid(); @Update( "UPDATE `hydra_registry_config_node` SET `name` = #{name} WHERE `guid` = #{guid}" ) void updateName( @Param("guid") GUID guid ,@Param("name") String name ); @Select( "SELECT `data_affinity_guid` FROM `hydra_registry_config_node` WHERE `guid` = #{guid}" ) GUID getDataAffinityGuid ( GUID guid ); @Update( "UPDATE `hydra_registry_config_node` SET `data_affinity_guid` = #{affinityGuid} WHERE `guid` = #{guid}" ) void setDataAffinityGuid( @Param("guid") GUID guid, @Param("affinityGuid") GUID affinityGuid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/RegistryNSNodeMapper.java ================================================ package com.pinecone.hydra.registry.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.entity.GenericNamespace; import com.pinecone.hydra.registry.entity.Namespace; import com.pinecone.hydra.registry.source.RegistryNSNodeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @Mapper @IbatisDataAccessObject public interface RegistryNSNodeMapper extends RegistryNSNodeManipulator { @Override @Insert("INSERT INTO `hydra_registry_namespace` (`guid`, `create_time`, `name`, `update_time`) VALUES (#{guid},#{createTime},#{name},#{updateTime})") void insert(Namespace namespace); @Override @Delete("DELETE FROM `hydra_registry_namespace` WHERE `guid`=#{guid}") void remove( GUID guid ); @Override @Select( "SELECT COUNT(`id`) FROM `hydra_registry_namespace` WHERE guid = #{guid}" ) boolean isNamespaceNode( GUID guid ); @Override @Select("SELECT `id` AS `enumId`, `guid`, `create_time` AS createTime, `name`, `update_time` AS updateTime FROM `hydra_registry_namespace` WHERE guid=#{guid}") GenericNamespace getNamespaceWithMeta( GUID guid ); @Override @Update("UPDATE `hydra_registry_namespace` SET `create_time`=#{createTime},`name`=#{name},`update_time`=#{updateTime} WHERE `guid`=#{guid}") void update(Namespace namespace); @Override @Select("SELECT `guid` FROM `hydra_registry_namespace` WHERE `name`=#{name}") List getGuidsByName(String name); @Override @Select( "SELECT `guid` FROM `hydra_registry_namespace` WHERE `name` = #{name} AND `guid` = #{guid}" ) List getGuidsByNameID( @Param( "name" ) String name, @Param( "guid" ) GUID guid ); @Override @Select("SELECT `guid` FROM `hydra_registry_namespace`") List dumpGuid(); @Override @Update( "UPDATE `hydra_registry_namespace` SET `name` = #{name} WHERE `guid` = #{guid}" ) void updateName( @Param("guid") GUID guid ,@Param("name") String name ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/RegistryNSNodeMetaMapper.java ================================================ package com.pinecone.hydra.registry.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.entity.GenericNamespaceMeta; import com.pinecone.hydra.registry.entity.NamespaceMeta; import com.pinecone.hydra.registry.source.RegistryNSNodeMetaManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Select; @Mapper @IbatisDataAccessObject public interface RegistryNSNodeMetaMapper extends RegistryNSNodeMetaManipulator { @Insert("INSERT INTO `hydra_registry_ns_node_meta` (`guid`) VALUES (#{guid})") void insert( NamespaceMeta namespaceMeta); @Delete("DELETE FROM `hydra_registry_ns_node_meta` WHERE `guid`=#{guid}") void remove( GUID guid ); @Select("SELECT `id` AS `enumId`, `guid` FROM `hydra_registry_ns_node_meta` WHERE guid=#{guid}") GenericNamespaceMeta getNamespaceNodeMeta(GUID guid); void update( NamespaceMeta namespaceMeta); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/RegistryNodeMetaMapper.java ================================================ package com.pinecone.hydra.registry.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.entity.ConfigNodeMeta; import com.pinecone.hydra.registry.entity.GenericConfigNodeMeta; import com.pinecone.hydra.registry.source.RegistryNodeMetaManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Select; @Mapper @IbatisDataAccessObject public interface RegistryNodeMetaMapper extends RegistryNodeMetaManipulator { @Insert("INSERT INTO `hydra_registry_conf_node_meta` (`guid`) VALUES (#{guid})") void insert(ConfigNodeMeta configNodeMeta); @Delete("DELETE FROM `hydra_registry_conf_node_meta` WHERE `guid`=#{guid}") void remove(GUID guid); @Select("SELECT `id` AS `enumId`, `guid` FROM `hydra_registry_conf_node_meta` WHERE `guid`=#{guid}") GenericConfigNodeMeta getConfigNodeMeta(GUID guid); void update(ConfigNodeMeta configNodeMeta); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/RegistryNodeOwnerMapper.java ================================================ package com.pinecone.hydra.registry.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.LinkedType; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @IbatisDataAccessObject public interface RegistryNodeOwnerMapper extends TireOwnerManipulator { @Insert("INSERT INTO `hydra_registry_node_tree` (`guid`, `linked_type`) VALUES ( #{guid}, #{linkedType} )") void insertRootNode( @Param("guid") GUID guid, @Param("linkedType") LinkedType linkedType ); @Insert( "INSERT INTO `hydra_registry_node_tree` (`guid`, `parent_guid`,`linked_type`) VALUES (#{targetGuid}, #{parentGuid}, #{linkedType})" ) void insert( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID, @Param("linkedType") LinkedType linkedType ); @Update( "UPDATE `hydra_registry_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}" ) void update( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID, @Param("linkedType") LinkedType linkedType ); @Update( "UPDATE `hydra_registry_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid} WHERE `guid` = #{targetGuid}" ) void updateParentGuid( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID ); @Update( "UPDATE `hydra_registry_node_tree` SET `guid` = #{targetGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}" ) void updateLinkedType( @Param("targetGuid") GUID targetGuid, @Param("linkedType") LinkedType linkedType ); @Delete( "DELETE FROM `hydra_registry_node_tree` WHERE `guid`=#{subordinateGuid} AND `linked_type` = 'Owned'" ) void remove( @Param("subordinateGuid") GUID subordinateGuid, @Param("ownerGuid") GUID ownerGuid ); @Delete( "DELETE FROM `hydra_registry_node_tree` WHERE `guid`=#{subordinateGuid} AND `linked_type` = 'Owned'" ) void removeBySubordinate( GUID subordinateGuid ); // @Delete("DELETE FROM `hydra_registry_node_owner` WHERE `owner_guid`=#{ownerGuid}") // void removeByOwner(GUID ownerGuid); @Select( "SELECT `parent_guid` FROM `hydra_registry_node_tree` WHERE `guid`=#{subordinateGuid} AND linked_type = 'Owned'" ) GUID getOwner( GUID subordinateGuid ); @Select( "SELECT guid FROM hydra_registry_node_tree where parent_guid=#{guid} AND linked_type = 'Owned'" ) List getSubordinates( GUID guid ); @Update("UPDATE `hydra_registry_node_tree` SET `linked_type` = '#{linkedType}' WHERE `guid` = #{sourceGuid} AND `parent_guid` = #{targetGuid}") void setLinkedType( @Param("sourceGuid") GUID sourceGuid, @Param("targetGuid") GUID targetGuid, @Param("linkedType") LinkedType linkedType ); @Select("SELECT `linked_type` FROM `hydra_registry_node_tree` WHERE `guid` = #{childGuid} AND `parent_guid` =#{parentGuid}") LinkedType getLinkedType( @Param("childGuid") GUID childGuid,@Param("parentGuid") GUID parentGuid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/RegistryNodePathCacheMapper.java ================================================ package com.pinecone.hydra.registry.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; @IbatisDataAccessObject public interface RegistryNodePathCacheMapper extends TriePathCacheManipulator { @Insert("INSERT INTO `hydra_registry_node_path` (`path`, `guid`) VALUES ( #{path}, #{guid} )") void insert( @Param("guid") GUID guid, @Param("path") String path ); @Delete("DELETE FROM `hydra_registry_node_path` WHERE `guid`=#{guid}") void remove( GUID guid ); @Select("SELECT `path` FROM `hydra_registry_node_path` WHERE `guid`=#{guid}") String getPath( GUID guid ); @Select("SELECT `guid` FROM `hydra_registry_node_path` WHERE `guid`=#{guid}") GUID getNode( String path ); @Select("SELECT `guid` FROM `hydra_registry_node_path` WHERE `path`=#{path}") GUID queryGUIDByPath( String path ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/RegistryPropertiesMapper.java ================================================ package com.pinecone.hydra.registry.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.entity.GenericProperty; import com.pinecone.hydra.registry.entity.Properties; import com.pinecone.hydra.registry.entity.Property; import com.pinecone.hydra.registry.source.RegistryPropertiesManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import com.pinecone.ulf.util.guid.GUIDs; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Result; import org.apache.ibatis.annotations.Results; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.sql.Timestamp; import java.util.ArrayList; import java.util.List; import java.util.Map; @Mapper @IbatisDataAccessObject public interface RegistryPropertiesMapper extends RegistryPropertiesManipulator { @Insert("INSERT INTO hydra_registry_conf_node_properties (`guid`, `key`, `type`, `create_time`, `update_time`, `value`) VALUES (#{guid},#{key},#{type},#{createTime},#{updateTime},#{rawValue})") void insert( Property property ); @Delete("DELETE FROM `hydra_registry_conf_node_properties` WHERE `guid`=#{guid} AND `key`=#{key}") void remove( GUID guid, String key ); @Select("SELECT `id` AS `enumId`, `guid`, `key`, `type`, `create_time` AS createTime, `update_time` AS updateTime, `value` AS rawValue FROM hydra_registry_conf_node_properties WHERE `guid`=#{guid}") @Results({ @Result(column = "enumId", property = "enumId", javaType = Long.class) }) List getProperties0( GUID guid ); @Override default List getProperties( GUID guid, Properties parent ) { List raws = this.getProperties0( guid ); List ps = new ArrayList<>( raws.size() ); for( Map raw : raws ) { Property property = new GenericProperty( parent ); property.setEnumId( ( (Number) raw.get( "enumId" ) ).longValue() ); property.setGuid ( GUIDs.GUID128( (String) raw.get( "guid" ) ) ); property.setType ( (String) raw.get( "type" ) ); property.setKey ( (String) raw.get( "key" ) ); property.setCreateTime ( ( (Timestamp) raw.get("createTime") ).toLocalDateTime() ); property.setUpdateTime ( ( (Timestamp) raw.get("updateTime") ).toLocalDateTime() ); property.setRawValue ( raw.get( "rawValue" ) ); ps.add( property ); } return ps; } @SuppressWarnings( "unchecked" ) default List getProperties( GUID guid ) { return (List) this.getProperties0( guid ); } @Update( "UPDATE `hydra_registry_conf_node_properties` SET `key`=#{key}, `type`=#{type}, update_time=#{updateTime}, value=#{rawValue} WHERE `guid`=#{guid} AND `key`=#{key}" ) void update( Property property ); @Delete("DELETE FROM `hydra_registry_conf_node_properties` WHERE `guid` = #{guid}") void removeAll( GUID guid ); @Insert( "INSERT INTO `hydra_registry_conf_node_properties` (`guid`, `key`, `type`, `create_time`, `update_time`, `value`) SELECT\n" + "\t#{destinationGuid},\n" + "\t`key`,\n" + "\t`type`,\n" + "\t`create_time`,\n" + "\t`update_time`,\n" + "\t`value` \n" + "FROM\n" + "\t`hydra_registry_conf_node_properties` AS src \n" + "WHERE\n" + "\t`guid` = #{sourceGuid} \n" + "\tAND NOT EXISTS ( \n" + "\tSELECT `guid` FROM `hydra_registry_conf_node_properties` AS dest WHERE dest.`guid` = #{destinationGuid} AND dest.`key` = src.`key` \n" + "\t)" ) void copyPropertiesTo( @Param("sourceGuid") GUID sourceGuid, @Param("destinationGuid") GUID destinationGuid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/RegistryTextFileMapper.java ================================================ package com.pinecone.hydra.registry.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.registry.entity.GenericTextValue; import com.pinecone.hydra.registry.entity.TextValue; import com.pinecone.hydra.registry.source.RegistryTextFileManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; @Mapper @IbatisDataAccessObject public interface RegistryTextFileMapper extends RegistryTextFileManipulator { @Insert("INSERT INTO `hydra_registry_conf_node_text_value` (`guid`, `value`, `create_time`, `update_time`, `type`) VALUES (#{guid},#{value},#{createTime},#{updateTime},#{type})") void insert(TextValue textValue); @Delete("DELETE FROM `hydra_registry_conf_node_text_value` WHERE `guid`=#{guid}") void remove(GUID guid); @Select("SELECT `id` AS `enumId`, `guid`, `value`, `create_time` AS createTime, `update_time` AS updateTime, `type` FROM `hydra_registry_conf_node_text_value` WHERE guid=#{guid}") GenericTextValue getTextValue(GUID guid); @Update("UPDATE `hydra_registry_conf_node_text_value` SET `value`=#{value}, `update_time`=#{updateTime}, `type`=#{type} WHERE guid=#{guid}") void update(TextValue textValue); @Insert("INSERT INTO `hydra_registry_conf_node_text_value` (`guid`, `type`, `create_time`, `update_time`, `value`) SELECT #{destinationGuid}, `type`, `create_time`, `update_time`, `value` " + "FROM `hydra_registry_conf_node_text_value` WHERE `guid` = #{sourceGuid} AND `guid` != #{destinationGuid}") void copyTextValueTo( @Param("sourceGuid") GUID sourceGuid, @Param("destinationGuid") GUID destinationGuid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/RegistryTreeMapper.java ================================================ package com.pinecone.hydra.registry.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.LinkedType; import com.pinecone.hydra.unit.imperium.entity.TreeReparseLinkNode; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @IbatisDataAccessObject public interface RegistryTreeMapper extends TrieTreeManipulator { @Insert("INSERT INTO `hydra_registry_node_tree` (`guid`, `linked_type`) VALUES ( #{guid}, #{linkedType} )") void insertRootNode( @Param("guid") GUID guid, @Param("linkedType") LinkedType linkedType ); @Override default void insert ( TireOwnerManipulator ownerManipulator, GUIDImperialTrieNode node ){ this.insertTreeNode( node.getGuid(), node.getType(), node.getAttributesGUID(), node.getNodeMetadataGUID() ); ownerManipulator.insertRootNode( node.getGuid() ); } @Insert("INSERT INTO hydra_registry_nodes (`guid`, `type`,`base_data_guid`,`node_meta_guid`) VALUES (#{guid},#{type},#{baseDataGuid},#{nodeMetaGuid})") void insertTreeNode( @Param("guid") GUID guid, @Param("type") UOI type, @Param("baseDataGuid") GUID baseDataGuid, @Param("nodeMetaGuid") GUID nodeMetaGuid ); @Select("SELECT `id` AS `enumId`, `guid`, `type`, base_data_guid AS baseDataGUID, node_meta_guid AS nodeMetadataGUID FROM hydra_registry_nodes WHERE guid=#{guid}") GUIDImperialTrieNode getNodeExtendsFromMeta(GUID guid ); @Override default GUIDImperialTrieNode getNode( GUID guid ) { GUIDImperialTrieNode node = this.getNodeExtendsFromMeta( guid ); if ( node == null ) { return null; } List parent = this.fetchParentGuids( guid ); node.setParentGUID( parent ); return node; } @Select("SELECT COUNT( `id` ) FROM hydra_registry_nodes WHERE guid=#{guid}") boolean contains( GUID key ); @Select("SELECT id, guid, parent_guid, linked_type FROM hydra_registry_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}") GUIDImperialTrieNode getTreeNodeOnly(@Param("guid") GUID guid, @Param("parentGuid") GUID parentGuid ); @Select("SELECT count( * ) FROM hydra_registry_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}") long countNode( GUID guid, GUID parentGuid ); @Override default void purge( GUID guid ) { this.removeNodeMeta( guid ); this.removeTreeNode( guid ); this.removeOwnedTreeNode( guid ); } @Delete("DELETE FROM `hydra_registry_nodes` WHERE `guid`=#{guid}") void removeNodeMeta( @Param("guid") GUID guid ); @Delete("DELETE FROM `hydra_registry_node_tree` WHERE `guid` = #{guid}") void removeTreeNode( @Param("guid") GUID guid ); @Delete("DELETE FROM `hydra_registry_node_tree` WHERE `parent_guid` = #{parent_guid}") void removeTreeNodeByParentGuid( @Param("parent_guid") GUID parentGuid ); @Delete("DELETE FROM `hydra_registry_node_tree` WHERE `guid` = #{guid} AND `parent_guid` = #{parent_guid}") void removeTreeNodeYoke( @Param("guid") GUID guid, @Param("parent_guid") GUID parentGuid ); @Delete("DELETE FROM `hydra_registry_node_tree` WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}") void removeTreeNodeWithLinkedType( @Param("guid") GUID guid, @Param("linkedType") LinkedType linkedType ); @Delete("DELETE FROM `hydra_registry_node_tree` WHERE `guid`=#{chileGuid} AND `parent_guid`=#{parentGuid}") void removeInheritance( @Param("chileGuid") GUID childGuid, @Param("parentGuid") GUID parentGuid ); @Select("SELECT `id` AS `enumId`, `guid`, `parent_guid` AS parentGuid FROM `hydra_registry_node_tree` WHERE `parent_guid`=#{guid}") List getChildren(GUID guid ); @Select("SELECT `guid` FROM `hydra_registry_node_tree` WHERE `parent_guid` = #{parentGuid}") List fetchChildrenGuids( @Param("parentGuid") GUID parentGuid ); @Select("SELECT `parent_guid` FROM `hydra_registry_node_tree` WHERE `guid`=#{guid}") List fetchParentGuids( GUID guid ); @Update("UPDATE `hydra_registry_nodes` SET `type` = #{type} WHERE guid=#{guid}") void updateType( UOI type , GUID guid ); @Select( "SELECT guid FROM hydra_registry_node_tree WHERE parent_guid IS NULL " ) List fetchRoot(); @Override @Select( "SELECT COUNT( `guid` ) FROM hydra_registry_node_tree WHERE `parent_guid` IS NULL AND guid = #{guid}" ) boolean isRoot( GUID guid ); @Override @Select( "SELECT COUNT( `guid` ) FROM hydra_registry_node_tree WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}" ) long queryLinkedCount( @Param("guid") GUID guid, @Param("linkedType") LinkedType linkedType ); @Override @Select( "SELECT COUNT( `guid` ) FROM hydra_registry_node_tree WHERE `guid` = #{guid}" ) long queryAllLinkedCount( @Param("guid") GUID guid ); @Override @Insert( "INSERT INTO `hydra_registry_node_tree` (`guid`, `linked_type`,`tag_name`,`tag_guid`,`parent_guid`) " + "VALUES (#{originalGuid}, #{linkedType}, #{tagName}, #{tagGuid}, #{dirGuid})" ) void newLinkTag( @Param("originalGuid") GUID originalGuid, @Param("dirGuid") GUID dirGuid, @Param("tagName") String tagName, @Param("tagGuid") GUID tagGuid, @Param("linkedType") LinkedType linkedType ); @Override @Update( "UPDATE hydra_registry_node_tree SET tag_name = #{tagName} WHERE tag_guid =#{tagGuid}" ) void updateLinkTagName( @Param("tagGuid") GUID tagGuid, @Param("tagName") String tagName ); @Override @Select( "SELECT `guid` FROM hydra_registry_node_tree WHERE tag_name = #{tagName} AND parent_guid = #{dirGuid}" ) GUID getOriginalGuid( @Param("tagName") String tagName, @Param("dirGuid") GUID dirGuid ); @Override @Select( "SELECT `guid` FROM hydra_registry_node_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}" ) GUID getOriginalGuidByNodeGuid( @Param("tagName") String tagName, @Param("nodeGuid") GUID nodeGUID ); @Override @Select( "SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_registry_node_tree WHERE tag_name = #{tagName} AND parent_guid = #{parentDirGuid}" ) TreeReparseLinkNode getReparseLinkNode( @Param("tagName") String tagName, @Param("parentDirGuid") GUID parentDirGuid ); @Override @Select( "SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_registry_node_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}" ) TreeReparseLinkNode getReparseLinkNodeByNodeGuid( @Param("tagName") String tagName, @Param("nodeGuid") GUID nodeGUID ); @Override @Select( "SELECT `guid` FROM hydra_registry_node_tree WHERE `tag_name` = #{tagName}" ) List fetchOriginalGuid( String tagName ); @Override @Select( "SELECT `guid` FROM hydra_registry_node_tree WHERE `tag_name` = #{tagName} AND `parent_guid` IS NULL" ) List fetchOriginalGuidRoot( String tagName ); @Override @Select( "SELECT COUNT(*) FROM `hydra_registry_node_tree` WHERE `tag_guid` = #{guid}" ) boolean isTagGuid(GUID guid); @Override @Delete( "DELETE FROM `hydra_registry_node_tree` WHERE `tag_guid` = #{guid}" ) void removeReparseLink( GUID guid ); @Override @Select( "SELECT `guid` FROM `hydra_registry_node_tree` WHERE `tag_guid` = #{tagGuid}" ) GUID getOriginalGuidByTagGuid(GUID tagGuid); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/hydranium/RegistryMappingDriver.java ================================================ package com.pinecone.hydra.registry.ibatis.hydranium; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver; import com.pinecone.hydra.system.component.ResourceDispenserCenter; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; public class RegistryMappingDriver extends ArchMappingDriver implements KOIMappingDriver { protected KOIMasterManipulator mKOIMasterManipulator; public RegistryMappingDriver( Processum superiorProcess ) { super( superiorProcess ); } // Temp , TODO public RegistryMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) { super( superiorProcess, ibatisClient, dispenserCenter, RegistryMappingDriver.class.getPackageName().replace( "hydranium", "" ) ); this.mKOIMasterManipulator = new RegistryMasterManipulatorImpl( this ); } @Override public KOIMasterManipulator getMasterManipulator() { return this.mKOIMasterManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/hydranium/RegistryMasterManipulatorImpl.java ================================================ package com.pinecone.hydra.registry.ibatis.hydranium; import java.util.Map; import javax.annotation.Resource; import org.springframework.stereotype.Component; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.registry.ibatis.RegistryAttributesMapper; import com.pinecone.hydra.registry.ibatis.RegistryNSNodeMapper; import com.pinecone.hydra.registry.ibatis.RegistryNSNodeMetaMapper; import com.pinecone.hydra.registry.ibatis.RegistryConfigNodeMapper; import com.pinecone.hydra.registry.ibatis.RegistryNodeMetaMapper; import com.pinecone.hydra.registry.ibatis.RegistryPropertiesMapper; import com.pinecone.hydra.registry.ibatis.RegistryTextFileMapper; import com.pinecone.hydra.registry.source.RegistryMasterManipulator; import com.pinecone.hydra.registry.source.RegistryNSNodeManipulator; import com.pinecone.hydra.registry.source.RegistryNSNodeMetaManipulator; import com.pinecone.hydra.registry.source.RegistryConfigNodeManipulator; import com.pinecone.hydra.registry.source.RegistryNodeMetaManipulator; import com.pinecone.hydra.registry.source.RegistryAttributesManipulator; import com.pinecone.hydra.registry.source.RegistryPropertiesManipulator; import com.pinecone.hydra.registry.source.RegistryTextFileManipulator; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; @Component public class RegistryMasterManipulatorImpl implements RegistryMasterManipulator { @Resource @Structure( type = RegistryConfigNodeMapper.class ) RegistryConfigNodeManipulator configNodeManipulator; @Resource @Structure( type = RegistryNSNodeMapper.class ) RegistryNSNodeManipulator namespaceNodeManipulator; @Resource @Structure( type = RegistryPropertiesMapper.class ) RegistryPropertiesManipulator registryPropertiesManipulator; @Resource @Structure( type = RegistryTextFileMapper.class ) RegistryTextFileManipulator registryTextFileManipulator; @Resource @Structure( type = RegistryNodeMetaMapper.class ) RegistryNodeMetaManipulator configNodeMetaManipulator; @Resource @Structure( type = RegistryNSNodeMetaMapper.class ) RegistryNSNodeMetaManipulator namespaceNodeMetaManipulator; @Resource @Structure( type = RegistryAttributesMapper.class ) RegistryAttributesManipulator registryAttributesManipulator; @Resource( type = RegistryMasterTreeManipulatorImpl.class ) KOISkeletonMasterManipulator skeletonMasterManipulator; public RegistryMasterManipulatorImpl() { } public RegistryMasterManipulatorImpl( KOIMappingDriver driver ) { driver.autoConstruct( RegistryMasterManipulatorImpl.class, Map.of(), this ); this.skeletonMasterManipulator = new RegistryMasterTreeManipulatorImpl( driver ); } @Override public KOISkeletonMasterManipulator getSkeletonMasterManipulator() { return this.skeletonMasterManipulator; } @Override public RegistryConfigNodeManipulator getConfigNodeManipulator() { return this.configNodeManipulator; } @Override public RegistryNSNodeManipulator getNSNodeManipulator() { return this.namespaceNodeManipulator; } @Override public RegistryPropertiesManipulator getPropertiesManipulator() { return this.registryPropertiesManipulator; } @Override public RegistryTextFileManipulator getTextFileManipulator() { return this.registryTextFileManipulator; } @Override public RegistryNodeMetaManipulator getNodeMetaManipulator() { return this.configNodeMetaManipulator; } @Override public RegistryNSNodeMetaManipulator getNSNodeMetaManipulator() { return this.namespaceNodeMetaManipulator; } @Override public RegistryAttributesManipulator getAttributesManipulator() { return this.registryAttributesManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/hydranium/RegistryMasterTreeManipulatorImpl.java ================================================ package com.pinecone.hydra.registry.ibatis.hydranium; import java.util.Map; import javax.annotation.Resource; import org.springframework.stereotype.Component; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.registry.ibatis.RegistryNodeOwnerMapper; import com.pinecone.hydra.registry.ibatis.RegistryNodePathCacheMapper; import com.pinecone.hydra.registry.ibatis.RegistryTreeMapper; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator; @Component public class RegistryMasterTreeManipulatorImpl implements TreeMasterManipulator { @Resource @Structure( type = RegistryNodePathCacheMapper.class ) TriePathCacheManipulator triePathCacheManipulator; @Resource @Structure( type = RegistryNodeOwnerMapper.class ) TireOwnerManipulator tireOwnerManipulator; @Resource @Structure( type = RegistryTreeMapper.class ) TrieTreeManipulator trieTreeManipulator; public RegistryMasterTreeManipulatorImpl() { } public RegistryMasterTreeManipulatorImpl( KOIMappingDriver driver ) { driver.autoConstruct( RegistryMasterTreeManipulatorImpl.class, Map.of(), this ); } @Override public TriePathCacheManipulator getTriePathCacheManipulator() { return this.triePathCacheManipulator; } @Override public TireOwnerManipulator getTireOwnerManipulator() { return this.tireOwnerManipulator; } @Override public TrieTreeManipulator getTrieTreeManipulator() { return this.trieTreeManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/scenario/ibatis/ScenarioCommonDataMapper.java ================================================ package com.pinecone.hydra.scenario.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.scenario.entity.GenericScenarioCommonData; import com.pinecone.hydra.scenario.entity.ScenarioCommonData; import com.pinecone.hydra.scenario.source.ScenarioCommonDataManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Select; @Mapper @IbatisDataAccessObject public interface ScenarioCommonDataMapper extends ScenarioCommonDataManipulator { @Insert("INSERT INTO `hydra_scenario_commom_data` (`guid`, `create_time`, `update_time`) VALUES (#{guid},#{createTime},#{updateTime})") void insert(ScenarioCommonData scenarioCommonData); @Delete("DELETE FROM `hydra_scenario_commom_data` WHERE `guid`=#{guid}") void remove(GUID guid); @Select("SELECT `id` AS `enumId`, `guid`, `create_time`, `update_time` FROM `hydra_scenario_commom_data` WHERE `guid`=#{guid}") GenericScenarioCommonData getScenarioCommonData(GUID guid); void update(ScenarioCommonData scenarioCommonData); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/scenario/ibatis/ScenarioNamespaceNodeMapper.java ================================================ package com.pinecone.hydra.scenario.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.scenario.entity.GenericNamespaceNode; import com.pinecone.hydra.scenario.entity.NamespaceNode; import com.pinecone.hydra.scenario.source.NamespaceNodeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Select; import java.util.List; @Mapper @IbatisDataAccessObject public interface ScenarioNamespaceNodeMapper extends NamespaceNodeManipulator { @Insert("INSERT INTO `hydra_scenario_namespace_node` (`guid`, `name`) VALUES (#{guid},#{name})") void insert(NamespaceNode namespaceNode); @Delete("DELETE FROM `hydra_scenario_namespace_node` WHERE `guid`=#{guid}") void remove(GUID guid); @Select("SELECT `id` AS `enumId`, `guid`, `name` FROM `hydra_scenario_namespace_node` WHERE guid=#{guid}") GenericNamespaceNode getNamespaceNode(GUID guid); void update(NamespaceNode namespaceNode); @Select("SELECT guid FROM hydra_scenario_namespace_node where name=#{name}") List getGuidsByName(String name); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/scenario/ibatis/ScenarioNamespaceNodeMetaMapper.java ================================================ package com.pinecone.hydra.scenario.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.scenario.entity.GenericNamespaceNodeMeta; import com.pinecone.hydra.scenario.entity.NamespaceNodeMeta; import com.pinecone.hydra.scenario.source.NamespaceNodeMetaManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Select; @Mapper @IbatisDataAccessObject public interface ScenarioNamespaceNodeMetaMapper extends NamespaceNodeMetaManipulator { @Insert("INSERT INTO `hydra_scenario_namespace_node_meta` (`guid`) VALUES (#{guid})") void insert(NamespaceNodeMeta namespaceNodeMeta); @Delete("DELETE FROM `hydra_scenario_namespace_node_meta` WHERE `guid`=#{guid}") void remove(GUID guid); @Select("SELECT `id` AS `enumId`, `guid` FROM `hydra_scenario_namespace_node_meta` WHERE `guid`=#{guid}") GenericNamespaceNodeMeta getNamespaceNodeMeta(GUID guid); void update(NamespaceNodeMeta namespaceNodeMeta); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/scenario/ibatis/ScenarioNodeOwnerMapper.java ================================================ package com.pinecone.hydra.scenario.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Mapper; import java.util.List; @Mapper @IbatisDataAccessObject public interface ScenarioNodeOwnerMapper extends TireOwnerManipulator { void insert(GUID subordinateGuid, GUID ownerGuid); void remove(GUID subordinateGuid,GUID ownerGuid); void removeBySubordinate(GUID subordinateGuid); void removeByOwner(GUID OwnerGuid); GUID getOwner(GUID subordinateGuid); List getSubordinates(GUID guid); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/scenario/ibatis/ScenarioNodePathCacheMapper.java ================================================ package com.pinecone.hydra.scenario.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; @Mapper @IbatisDataAccessObject public interface ScenarioNodePathCacheMapper extends TriePathCacheManipulator { @Insert("INSERT INTO hydra_scenario_node_path (path, guid) VALUES (#{path},#{guid})") void insert(@Param("guid") GUID guid, @Param("path") String path); @Delete("DELETE FROM hydra_scenario_node_path WHERE guid=#{guid}") void remove(GUID guid); @Select("SELECT path FROM hydra_scenario_node_path WHERE guid=#{guid}") String getPath(GUID guid); @Select("SELECT path FROM hydra_scenario_node_path WHERE path=#{path}") GUID getNode(String path); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/scenario/ibatis/ScenarioTreeMapper.java ================================================ package com.pinecone.hydra.scenario.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import java.util.ArrayList; import java.util.List; @Mapper @IbatisDataAccessObject public interface ScenarioTreeMapper extends TrieTreeManipulator { @Insert("INSERT INTO hydra_scenario_node_map (guid, type, base_data_guid, node_meta_guid) VALUES (#{guid},#{type},#{baseDataGUID},#{nodeMetadataGUID})") void insert (GUIDImperialTrieNode distributedConfTreeNode); @Select("SELECT COUNT( `id` ) FROM hydra_scenario_node_map WHERE guid=#{guid}") boolean contains( GUID key ); default GUIDImperialTrieNode getNode(GUID guid){ GUIDImperialTrieNode metaNode = this.getMetaNode(guid); List parentNodes = this.fetchParentGuids(guid); if (parentNodes != null){ metaNode.setParentGUID(parentNodes); }else { metaNode.setParentGUID(new ArrayList()); } return metaNode; } @Select("SELECT id, guid, type, base_data_guid AS baseDataGUID, node_meta_guid AS nodeMetadataGUID FROM hydra_scenario_node_map WHERE guid=#{guid}") GUIDImperialTrieNode getMetaNode(GUID guid); default void remove(GUID guid){ removeMeta(guid); removeParentNode(guid); } @Delete("DELETE FROM hydra_scenario_node_map WHERE guid=#{guid}") void removeMeta(GUID guid); @Delete("DELETE FROM hydra_scenario_node_tree WHERE guid=#{guid}") void removeParentNode(GUID guid); @Delete("DELETE FROM `hydra_scenario_node_tree` WHERE `guid`=#{childGuid} AND `parent_guid`=#{parentGuid}") void removeInheritance(@Param("childGuid") GUID childGuid, @Param("parentGuid") GUID parentGuid); @Select("SELECT `parent_guid` FROM `hydra_scenario_node_tree` WHERE `guid`=#{guid}") List fetchParentGuids(GUID guid); @Select("SELECT `path` FROM `hydra_scenario_node_path` WHERE `guid`=#{guid}") String getPath(GUID guid); void updatePath( GUID guid, String path); @Select("SELECT `guid` FROM `hydra_scenario_node_path` WHERE `path`=#{path}") GUID queryGUIDByPath( String path ); void insertOwnedNode(GUID nodeGUID,GUID parentGUID); @Select("SELECT guid FROM hydra_scenario_node_tree WHERE parent_guid=#{guid}") List getChild(GUID guid); @Delete("DELETE FROM `hydra_scenario_node_path` WHERE `guid`=#{guid}") void removePath(GUID guid); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/scenario/ibatis/hydranium/ScenarioMappingDriver.java ================================================ package com.pinecone.hydra.scenario.ibatis.hydranium; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver; import com.pinecone.hydra.system.component.ResourceDispenserCenter; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; public class ScenarioMappingDriver extends ArchMappingDriver implements KOIMappingDriver { protected KOIMasterManipulator mKOIMasterManipulator; public ScenarioMappingDriver( Processum superiorProcess ) { super( superiorProcess ); } public ScenarioMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) { super( superiorProcess, ibatisClient, dispenserCenter, ScenarioMappingDriver.class.getPackageName().replace( "hydranium", "" ) ); this.mKOIMasterManipulator = new ScenarioMasterManipulatorImpl( this ); } @Override public KOIMasterManipulator getMasterManipulator() { return this.mKOIMasterManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/scenario/ibatis/hydranium/ScenarioMasterManipulatorImpl.java ================================================ package com.pinecone.hydra.scenario.ibatis.hydranium; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.scenario.ibatis.ScenarioCommonDataMapper; import com.pinecone.hydra.scenario.ibatis.ScenarioNamespaceNodeMapper; import com.pinecone.hydra.scenario.ibatis.ScenarioNamespaceNodeMetaMapper; import com.pinecone.hydra.scenario.source.NamespaceNodeManipulator; import com.pinecone.hydra.scenario.source.NamespaceNodeMetaManipulator; import com.pinecone.hydra.scenario.source.ScenarioCommonDataManipulator; import com.pinecone.hydra.scenario.source.ScenarioMasterManipulator; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class ScenarioMasterManipulatorImpl implements ScenarioMasterManipulator { @Resource @Structure(type = ScenarioNamespaceNodeMapper.class) NamespaceNodeManipulator namespaceNodeManipulator; @Resource @Structure(type = ScenarioNamespaceNodeMetaMapper.class) NamespaceNodeMetaManipulator namespaceNodeMetaManipulator; @Resource @Structure(type = ScenarioCommonDataMapper.class) ScenarioCommonDataManipulator scenarioCommonDataManipulator; @Resource @Structure(type = ScenarioMasterTreeManipulatorImpl.class) KOISkeletonMasterManipulator skeletonMasterManipulator; public ScenarioMasterManipulatorImpl() { } public ScenarioMasterManipulatorImpl( KOIMappingDriver driver ) { driver.autoConstruct( ScenarioMasterManipulatorImpl.class, Map.of(), this ); this.skeletonMasterManipulator = new ScenarioMasterTreeManipulatorImpl( driver ); } @Override public NamespaceNodeManipulator getNamespaceNodeManipulator() { return this.namespaceNodeManipulator; } @Override public NamespaceNodeMetaManipulator getNSNodeMetaManipulator() { return this.namespaceNodeMetaManipulator; } @Override public ScenarioCommonDataManipulator getScenarioCommonDataManipulator() { return this.scenarioCommonDataManipulator; } @Override public KOISkeletonMasterManipulator getSkeletonMasterManipulator() { return this.skeletonMasterManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/scenario/ibatis/hydranium/ScenarioMasterTreeManipulatorImpl.java ================================================ package com.pinecone.hydra.scenario.ibatis.hydranium; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.scenario.ibatis.ScenarioNodeOwnerMapper; import com.pinecone.hydra.scenario.ibatis.ScenarioNodePathCacheMapper; import com.pinecone.hydra.scenario.ibatis.ScenarioTreeMapper; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class ScenarioMasterTreeManipulatorImpl implements TreeMasterManipulator { @Resource @Structure( type = ScenarioNodePathCacheMapper.class ) TriePathCacheManipulator triePathCacheManipulator; @Resource @Structure( type = ScenarioNodeOwnerMapper.class ) TireOwnerManipulator tireOwnerManipulator; @Resource @Structure( type = ScenarioTreeMapper.class ) TrieTreeManipulator trieTreeManipulator; public ScenarioMasterTreeManipulatorImpl() { } public ScenarioMasterTreeManipulatorImpl( KOIMappingDriver driver ) { driver.autoConstruct( ScenarioMasterTreeManipulatorImpl.class, Map.of(), this ); } @Override public TireOwnerManipulator getTireOwnerManipulator() { return this.tireOwnerManipulator; } @Override public TrieTreeManipulator getTrieTreeManipulator() { return this.trieTreeManipulator; } @Override public TriePathCacheManipulator getTriePathCacheManipulator() { return this.triePathCacheManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/AppNodeMetaMapper.java ================================================ package com.pinecone.hydra.service.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.kom.entity.ApplicationElement; import com.pinecone.hydra.service.kom.entity.GenericApplicationElement; import com.pinecone.hydra.service.kom.source.ApplicationMetaManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.time.LocalDateTime; @Mapper @IbatisDataAccessObject public interface AppNodeMetaMapper extends ApplicationMetaManipulator { @Insert( "INSERT INTO `hydra_service_app_node_meta` (`guid`, `name`, `path`, `type`, `alias`, resource_type, deployment_method, create_time, update_time) VALUES (#{metaGuid},#{name},#{path},#{type},#{alias},#{resourceType},#{deploymentMethod},#{createTime},#{updateTime})" ) void insert( ApplicationElement applicationElement ); @Delete( "DELETE FROM `hydra_service_app_node_meta` WHERE `guid`=#{guid}" ) void remove( @Param("guid") GUID guid ); @Select( "SELECT `id` AS `enumId`, `guid`, `name`, `path`, `type`, `alias`, `resource_type` AS resourceType, `deployment_method` AS deploymentMethod, `create_time` AS createTime, `update_time` AS updateTime FROM `hydra_service_app_node_meta` WHERE `guid`=#{guid}" ) GenericApplicationElement getApplicationElement( @Param("guid") GUID guid ); default GenericApplicationElement getApplicationElement( GUID guid, ServiceInstrument serviceInstrument){ GenericApplicationElement element = this.getApplicationElement( guid ); element.apply(serviceInstrument); return element; } @Update("UPDATE `hydra_service_app_node_meta` SET `name` = #{name}, `path` = #{path}, `type` = #{type}, `alias` = #{alias}, `resource_type` = #{resourceType}, `deployment_method` = #{deploymentMethod}, `update_time` = #{updateTime} WHERE `guid` = #{guid}") void update( ApplicationElement applicationElement ); @Update("UPDATE hydra_service_app_node_meta SET name = #{name} WHERE guid = #{guid}") void updateName( String name, GUID guid ); @Update("UPDATE hydra_service_app_node_meta SET path = #{path} WHERE guid = #{guid}") void updatePath( String path, GUID guid ); @Update("UPDATE hydra_service_app_node_meta SET type = #{type} WHERE guid = #{guid}") void updateType( String type, GUID guid ); @Update("UPDATE hydra_service_app_node_meta SET alias = #{alias} WHERE guid = #{guid}") void updateAlias( String alias, GUID guid ); @Update("UPDATE hydra_service_app_node_meta SET resource_type = #{resourceType} WHERE guid = #{guid}") void updateResourceType( String resourceType, GUID guid ); @Update("UPDATE hydra_service_app_node_meta SET deployment_method= #{deploymentMethod} WHERE guid = #{guid}") void updateDeploymentMethod( String deploymentMethod, GUID guid ); @Update("UPDATE hydra_service_app_node_meta SET update_time = #{updateTime} WHERE guid = #{guid}") void updateUpdateTime(LocalDateTime updateTime, GUID guid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/ApplicationNodeMapper.java ================================================ package com.pinecone.hydra.service.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.entity.ApplicationElement; import com.pinecone.hydra.service.kom.entity.GenericApplicationElement; import com.pinecone.hydra.service.kom.source.ApplicationNodeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @Mapper @IbatisDataAccessObject public interface ApplicationNodeMapper extends ApplicationNodeManipulator { @Insert("INSERT INTO `hydra_service_application_node` (`guid`, `name`) VALUES (#{guid},#{name})") void insert( ApplicationElement applicationElement ); @Delete("DELETE FROM `hydra_service_application_node` WHERE `guid`=#{guid}") void remove( @Param("guid")GUID guid ); @Select("SELECT `id` AS `enumId`, `guid`, `name` FROM `hydra_service_application_node` WHERE `guid`=#{guid}") GenericApplicationElement getApplicationNode(@Param("guid")GUID guid); @Update("UPDATE `hydra_service_application_node` SET name = #{name} WHERE guid = #{guid}") void update( ApplicationElement applicationElement ); @Override @Select( "SELECT `guid` FROM `hydra_service_application_node` WHERE `name` = #{name}" ) List getGuidsByName( String name ); @Override @Select( "SELECT `guid` FROM `hydra_service_application_node` WHERE `name` = #{name} AND `guid` = #{guid}" ) List getGuidsByNameID( @Param("name") String name, @Param("guid") GUID guid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/NamespaceRulesMapper.java ================================================ package com.pinecone.hydra.service.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.GenericNamespaceRules; import com.pinecone.hydra.service.kom.source.NamespaceRulesManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; @Mapper @IbatisDataAccessObject public interface NamespaceRulesMapper extends NamespaceRulesManipulator { @Insert("INSERT INTO `hydra_service_namespace_rules` (`guid`, `scope`, `name`, `description`) VALUES (#{guid},#{scope},#{name},#{description})") void insert(GenericNamespaceRules classificationRules); @Delete("DELETE FROM `hydra_service_namespace_rules` WHERE `guid`=#{guid}") void remove(@Param("guid")GUID guid); @Select("SELECT `id` AS `enumId`, `guid`, `scope`, `name`, `description` FROM `hydra_service_namespace_rules` WHERE `guid`=#{guid}") GenericNamespaceRules getNamespaceRules(@Param("guid")GUID guid); @Update("UPDATE `hydra_service_namespace_rules` SET `scope` = #{scope}, `name` = #{name}, `description` = #{description} WHERE `guid` = #{guid}") void update(GenericNamespaceRules classificationRules); @Update("UPDATE `hydra_service_namespace_rules` SET `scope` = #{scope} WHERE `guid` = #{guid}") void updateScope( @Param("scope") String scope, GUID guid ); @Update("UPDATE `hydra_service_namespace_rules` SET `name` = #{name} WHERE `guid` = #{guid}") void updateName( @Param("name") String name, GUID guid ); @Update("UPDATE `hydra_service_namespace_rules` SET `description` = #{description} WHERE `guid` = #{guid}") void updateDescription( @Param("description") String description, GUID guid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/ServiceInstanceMapper.java ================================================ package com.pinecone.hydra.service.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.entity.ServiceInstanceEntry; import com.pinecone.hydra.service.kom.source.ServiceInstanceManipulator; import com.pinecone.hydra.service.kom.entity.GenericServiceInstanceEntity; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; @Mapper @IbatisDataAccessObject public interface ServiceInstanceMapper extends ServiceInstanceManipulator { @Override @Insert("INSERT INTO `hydra_service_instances` " + "(`service_guid`, `guid`, `status`, `latest_start_time`, `latest_end_time`, `error_cause`, `run_count`, `deploy_guid`, `ip`) VALUES " + "(#{serviceGuid}, #{guid}, #{status}, #{latestStartTime}, #{latestEndTime}, #{errorCause}, #{runCount}, #{deployGuid}, #{ip})") void initServiceInstance(ServiceInstanceEntry element); @Override @Select("SELECT `id`, `service_guid`, `guid`, `status`, `latest_start_time`, `latest_end_time`, `error_cause`, `run_count`, `deploy_guid`, `ip`" + " FROM `hydra_service_instances` WHERE guid = #{instanceId}") GenericServiceInstanceEntity queryServiceInstance( GUID instanceId ); @Override @Update("UPDATE `hydra_service_instances` SET status = #{status}, run_count = #{runCount} WHERE guid = #{guid}") void updateServiceInstance(ServiceInstanceEntry element); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/ServiceMetaMapper.java ================================================ package com.pinecone.hydra.service.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.entity.GenericServiceElement; import com.pinecone.hydra.service.kom.entity.ServiceElement; import com.pinecone.hydra.service.kom.source.ServiceMetaManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.time.LocalDateTime; @Mapper @IbatisDataAccessObject public interface ServiceMetaMapper extends ServiceMetaManipulator { @Insert("INSERT INTO `hydra_service_serv_node_meta` (`guid`, `name`, `path`, `type`, `alias`, resource_type, service_type, create_time, update_time) VALUES (#{metaGuid},#{name},#{path},#{type},#{alias},#{resourceType},#{serviceType},#{createTime},#{updateTime})") void insert( ServiceElement serviceElement ); @Delete("DELETE FROM `hydra_service_serv_node_meta` WHERE `guid`=#{guid}") void remove( @Param("guid") GUID guid ); @Select("SELECT `id` AS `enumId`, `guid`, `name`, `path`, `type`, `alias`, `resource_type` AS resourceType, `service_type` AS serviceType, `create_time` AS createTime, `update_time` AS updateTime FROM `hydra_service_serv_node_meta` WHERE `guid`=#{guid}") GenericServiceElement getServiceMeta( @Param("guid") GUID guid ); @Update("UPDATE `hydra_service_serv_node_meta` SET `name` =#{name}, `path` = #{path}, `type` = #{type}, `alias` = #{alias}, `resource_type` = #{resourceType}, `service_type` = #{serviceType}, `update_time` = #{updateTime} WHERE `guid` = #{guid}") void update( ServiceElement serviceElement ); @Update("UPDATE `hydra_service_serv_node_meta` SET `name` = #{name} WHERE `guid` = #{guid}") void updateName( @Param("name") String name, @Param("guid") GUID guid ); @Update("UPDATE `hydra_service_serv_node_meta` SET `path` = #{path} WHERE `guid` = #{guid}") void updatePath( @Param("path") String path, @Param("guid") GUID guid ); @Update("UPDATE `hydra_service_serv_node_meta` SET `alias` = #{alias} WHERE `guid` = #{guid}") void updateAlias( @Param("alias") String alias, @Param("guid") GUID guid ); @Update("UPDATE `hydra_service_serv_node_meta` SET `resource_type` = #{resourceType} WHERE `guid` = #{guid}") void updateResourceType( @Param("resourceType") String resourceType, @Param("guid") GUID guid ); @Update("UPDATE `hydra_service_serv_node_meta` SET `service_type` = #{serviceType} WHERE `guid` = #{guid}") void updateServiceType( @Param("serviceType") String serviceType, @Param("guid") GUID guid ); @Update("UPDATE `hydra_service_serv_node_meta` SET `update_time` = #{updateTime} WHERE `guid` = #{guid}") void updateUpdateTime( @Param("updateTime") LocalDateTime updateTime, @Param("guid") GUID guid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/ServiceNamespaceMapper.java ================================================ package com.pinecone.hydra.service.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.entity.GenericNamespace; import com.pinecone.hydra.service.kom.entity.Namespace; import com.pinecone.hydra.service.kom.source.ServiceNamespaceManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @Mapper @IbatisDataAccessObject public interface ServiceNamespaceMapper extends ServiceNamespaceManipulator { @Insert("INSERT INTO `hydra_service_namespace_node` (`guid`, `name`, `rules_guid`) VALUES (#{guid},#{name},#{rulesGUID})") void insert( Namespace ns ); @Delete("DELETE FROM `hydra_service_namespace_node` WHERE `guid`=#{guid}") void remove( @Param("guid") GUID GUID ); @Select("SELECT `id` AS `enumId`, `guid`, `name`, `rules_guid` AS rulesGUID FROM `hydra_service_namespace_node` WHERE `guid`=#{guid}") GenericNamespace getNamespace( @Param("guid") GUID guid ); @Update("UPDATE `hydra_service_namespace_node` SET `name` = #{name} WHERE `guid` = #{guid}") void update( Namespace ns ); @Select("SELECT `id` AS `enumId`, `guid`, `name`, `rules_guid` AS rulesGUID FROM `hydra_service_namespace_node` WHERE name=#{name}") List fetchNamespaceNodeByName0( @Param("name") String name ); @SuppressWarnings( "unchecked" ) default List fetchNamespaceNodeByName( String name ){ return (List) this.fetchNamespaceNodeByName0( name ); } @Override @Select( "SELECT `guid` FROM `hydra_service_namespace_node` WHERE `name` = #{name}" ) List getGuidsByName(String name); @Override @Select( "SELECT `guid` FROM `hydra_service_namespace_node` WHERE `name` = #{name} AND `guid` = #{guid}" ) List getGuidsByNameID( @Param("name") String name, @Param("guid") GUID guid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/ServiceNodeMapper.java ================================================ package com.pinecone.hydra.service.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.entity.GenericServiceElement; import com.pinecone.hydra.service.kom.entity.ServiceElement; import com.pinecone.hydra.service.kom.source.ServiceNodeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.ArrayList; import java.util.List; @Mapper @IbatisDataAccessObject public interface ServiceNodeMapper extends ServiceNodeManipulator { @Insert("INSERT INTO `hydra_service_service_node` (`guid`, `name`) VALUES (#{guid},#{name})") void insert( GenericServiceElement serviceNode ); @Override @Delete("DELETE FROM `hydra_service_service_node` WHERE `guid`=#{guid}") void remove( @Param("guid")GUID guid ); @Override @Select("SELECT `id` AS `enumId`, `guid`, `name` FROM `hydra_service_service_node` WHERE `guid`=#{guid}") GenericServiceElement getServiceNode( @Param("guid") GUID guid ); @Update("UPDATE `hydra_service_service_node` SET `name` = #{name} WHERE `guid` = #{guid}") void update( GenericServiceElement serviceNode ); @Select("SELECT `id` AS `enumId`, `guid` , `name` FROM `hydra_service_service_node` WHERE name=#{name}") List fetchServiceNodeByName0( @Param("name") String name ); @Override @SuppressWarnings("unchecked") default List fetchServiceNodeByName( String name ) { return (List) this.fetchServiceNodeByName0( name ); } @Override @Select( "SELECT `guid` FROM `hydra_service_service_node` WHERE `name` = #{name}" ) List getGuidsByName( String name ); @Override @Select( "SELECT `guid` FROM `hydra_service_service_node` WHERE `name` = #{name} AND `guid` = #{guid}" ) List getGuidsByNameID( @Param("name") String name, @Param("guid") GUID guid ); @Override @SuppressWarnings("unchecked") default List fetchAllService(){ return (List) this.fetchAllService0(); } @Select("SELECT `id`, `guid`, `name` FROM `hydra_service_service_node` ") List fetchAllService0(); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/ServiceNodeMetaMapper.java ================================================ package com.pinecone.hydra.service.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.ServiceFamilyNode; import com.pinecone.hydra.service.kom.entity.GenericCommonMeta; import com.pinecone.hydra.service.kom.entity.Namespace; import com.pinecone.hydra.service.kom.source.NodeMetaManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; @Mapper @IbatisDataAccessObject public interface ServiceNodeMetaMapper extends NodeMetaManipulator { @Override @Insert("INSERT INTO `hydra_service_node_meta` (`guid`, `scenario`, primary_impl_lang, extra_information, `level`, `description`) VALUES (#{guid}, #{scenario}, #{primaryImplLang}, #{extraInformation}, #{level}, #{description})") void insert( ServiceFamilyNode node ); @Override @Insert("INSERT INTO `hydra_service_node_meta` (`guid`, `scenario`, primary_impl_lang, extra_information, `level`, `description`) VALUES (#{metaGuid}, #{scenario}, #{primaryImplLang}, #{extraInformation}, #{level}, #{description})") void insertNS( Namespace node ); @Override @Delete("DELETE FROM `hydra_service_node_meta` WHERE `guid`=#{guid}") void remove( @Param("guid")GUID guid ); @Override @Select("SELECT `id` AS `enumId`, `guid`, `scenario`, `primary_impl_lang` AS primaryImplLang, `extra_information` AS extraInformation, `level`, `description` FROM `hydra_service_node_meta` WHERE `guid`=#{guid}") GenericCommonMeta getNodeCommonMeta( @Param("guid") GUID guid ); @Override @Update("UPDATE `hydra_service_node_meta` SET `scenario` = #{scenario}, `primary_impl_lang` = #{primaryImplLang}, `extra_information` = #{extraInformation}, `level` = #{level}, `description` = #{description}") void update( ServiceFamilyNode node ); @Update("UPDATE `hydra_service_node_meta` SET `scenario` = #{scenario} WHERE `guid` = #{guid}") void updateScenario( @Param("scenario") String scenario, @Param("guid") GUID guid ); @Update("UPDATE `hydra_service_node_meta` SET `primary_impl_lang` = #{primaryImplLang} WHERE `guid` = #{guid}") void updatePrimaryImplLang( @Param("primaryImpLang") String primaryImplLang, @Param("guid") GUID guid ); @Update("UPDATE `hydra_service_node_meta` SET `extra_information` = #{extraInformation} WHERE `guid` = #{guid}") void updateExtraInformation( @Param("extraInformation") String extraInformation, @Param("guid") GUID guid ); @Update("UPDATE `hydra_service_node_meta` SET `level` = #{level} WHERE `guid` = #{guid}") void updateLevel( @Param("level") String level, @Param("guid") GUID guid ); @Update("UPDATE `hydra_service_node_meta` SET `description` = #{description} WHERE `guid` = #{guid}") void updateDescription( @Param("description") String description, @Param("guid") GUID guid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/ServiceNodeOwnerMapper.java ================================================ package com.pinecone.hydra.service.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.LinkedType; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @Mapper @IbatisDataAccessObject public interface ServiceNodeOwnerMapper extends TireOwnerManipulator { @Insert("INSERT INTO `hydra_service_node_tree` (`guid`, `linked_type`) VALUES ( #{guid}, #{linkedType} )") void insertRootNode( @Param("guid") GUID guid, @Param("linkedType") LinkedType linkedType ); @Insert( "INSERT INTO `hydra_service_node_tree` (`guid`, `parent_guid`,`linked_type`) VALUES (#{targetGuid}, #{parentGuid}, #{linkedType})" ) void insert( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID, @Param("linkedType") LinkedType linkedType ); @Update( "UPDATE `hydra_service_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}" ) void update( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID, @Param("linkedType") LinkedType linkedType ); @Update( "UPDATE `hydra_service_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid} WHERE `guid` = #{targetGuid}" ) void updateParentGuid( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID ); @Update( "UPDATE `hydra_service_node_tree` SET `guid` = #{targetGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}" ) void updateLinkedType( @Param("targetGuid") GUID targetGuid, @Param("linkedType") LinkedType linkedType ); @Delete( "DELETE FROM `hydra_service_node_tree` WHERE `guid`=#{subordinateGuid} AND `linked_type` = 'Owned'" ) void remove( @Param("subordinateGuid") GUID subordinateGuid, @Param("ownerGuid") GUID ownerGuid ); @Delete( "DELETE FROM `hydra_service_node_tree` WHERE `guid`=#{subordinateGuid} AND `linked_type` = 'Owned'" ) void removeBySubordinate( GUID subordinateGuid ); // @Delete("DELETE FROM `hydra_registry_node_owner` WHERE `owner_guid`=#{ownerGuid}") // void removeByOwner(GUID ownerGuid); @Select( "SELECT `parent_guid` FROM `hydra_service_node_tree` WHERE `guid`=#{subordinateGuid} AND linked_type = 'Owned'" ) GUID getOwner( GUID subordinateGuid ); @Select( "SELECT guid FROM hydra_service_node_tree where parent_guid=#{guid} AND linked_type = 'Owned'" ) List getSubordinates( GUID guid ); @Update("UPDATE `hydra_service_node_tree` SET `linked_type` = '#{linkedType}' WHERE `guid` = #{sourceGuid} AND `parent_guid` = #{targetGuid}") void setLinkedType( @Param("sourceGuid") GUID sourceGuid, @Param("targetGuid") GUID targetGuid, @Param("linkedType") LinkedType linkedType ); @Select("SELECT `linked_type` FROM `hydra_service_node_tree` WHERE `guid` = #{childGuid} AND `parent_guid` =#{parentGuid}") LinkedType getLinkedType( @Param("childGuid") GUID childGuid,@Param("parentGuid") GUID parentGuid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/ServicePathCacheMapper.java ================================================ package com.pinecone.hydra.service.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; @Mapper @IbatisDataAccessObject public interface ServicePathCacheMapper extends TriePathCacheManipulator { @Insert("INSERT INTO `hydra_service_node_cache_path` (`path`, `guid`) VALUES ( #{path}, #{guid} )") void insert(@Param("guid") GUID guid, @Param("path") String path ); @Insert("INSERT INTO `hydra_service_node_cache_path` (path, long_path, guid) VALUES ( #{path},#{longPath},#{guid} )") void insertLongPath( @Param("guid") GUID guid, @Param("path") String path, @Param("longPath") String longPath ); @Delete("DELETE FROM `hydra_service_node_cache_path` WHERE `guid`=#{guid}") void remove( GUID guid ); default String getPath( GUID guid ){ String longPath = this.getLongPath(guid); if ( longPath != null ){ return this.getPath0( guid )+this.getLongPath( guid ); } return this.getPath0( guid ); }; @Select("SELECT `long_path` FROM `hydra_service_node_cache_path` WHERE `guid`=#{guid}") String getLongPath( GUID guid ); @Select("SELECT `path` FROM `hydra_service_node_cache_path` WHERE `guid`=#{guid}") String getPath0( GUID guid ); @Select("SELECT `guid` FROM `hydra_service_node_cache_path` WHERE `guid`=#{guid}") GUID getNode( String path ); @Select("SELECT `guid` FROM `hydra_service_node_cache_path` WHERE `path`=#{path}") GUID queryGUIDByPath( String path ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/ServiceTreeMapper.java ================================================ package com.pinecone.hydra.service.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.unit.imperium.LinkedType; import com.pinecone.hydra.unit.imperium.entity.TreeReparseLinkNode; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @Mapper @IbatisDataAccessObject public interface ServiceTreeMapper extends TrieTreeManipulator { @Insert("INSERT INTO `hydra_service_node_tree` (`guid`, `linked_type`) VALUES ( #{guid}, #{linkedType} )") void insertRootNode(@Param("guid") GUID guid, @Param("linkedType") LinkedType linkedType ); @Override default void insert (TireOwnerManipulator ownerManipulator, GUIDImperialTrieNode node ){ this.insertTreeNode( node.getGuid(), node.getType(), node.getAttributesGUID(), node.getNodeMetadataGUID() ); ownerManipulator.insertRootNode( node.getGuid() ); } @Insert("INSERT INTO hydra_service_nodes (`guid`, `type`,`base_data_guid`,`node_metadata_guid`) VALUES (#{guid},#{type},#{baseDataGuid},#{nodeMetaGuid})") void insertTreeNode( @Param("guid") GUID guid, @Param("type") UOI type, @Param("baseDataGuid") GUID baseDataGuid, @Param("nodeMetaGuid") GUID nodeMetaGuid ); @Select("SELECT `id` AS `enumId`, `guid`, `type`, base_data_guid AS baseDataGUID, node_metadata_guid AS nodeMetadataGUID FROM hydra_service_nodes WHERE guid=#{guid}") GUIDImperialTrieNode getNodeExtendsFromMeta(GUID guid ); @Select("SELECT COUNT( `id` ) FROM hydra_service_nodes WHERE guid=#{guid}") boolean contains( GUID key ); @Override default GUIDImperialTrieNode getNode(GUID guid ) { GUIDImperialTrieNode node = this.getNodeExtendsFromMeta( guid ); if( node == null ) { return null; } List parent = this.fetchParentGuids( guid ); node.setParentGUID( parent ); return node; } @Select("SELECT id, guid, parent_guid, linked_type FROM hydra_service_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}") GUIDImperialTrieNode getTreeNodeOnly(@Param("guid") GUID guid, @Param("parentGuid") GUID parentGuid ); @Select("SELECT count( * ) FROM hydra_service_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}") long countNode( GUID guid, GUID parentGuid ); @Override default void purge( GUID guid ) { this.removeNodeMeta( guid ); this.removeTreeNode( guid ); this.removeOwnedTreeNode( guid ); } @Delete("DELETE FROM `hydra_service_nodes` WHERE `guid`=#{guid}") void removeNodeMeta( @Param("guid") GUID guid ); @Delete("DELETE FROM `hydra_service_node_tree` WHERE `guid` = #{guid}") void removeTreeNode( @Param("guid") GUID guid ); @Delete("DELETE FROM `hydra_service_node_tree` WHERE `parent_guid` = #{parent_guid}") void removeTreeNodeByParentGuid( @Param("parent_guid") GUID parentGuid ); @Delete("DELETE FROM `hydra_service_node_tree` WHERE `guid` = #{guid} AND `parent_guid` = #{parent_guid}") void removeTreeNodeYoke( @Param("guid") GUID guid, @Param("parent_guid") GUID parentGuid ); @Delete("DELETE FROM `hydra_service_node_tree` WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}") void removeTreeNodeWithLinkedType( @Param("guid") GUID guid, @Param("linkedType") LinkedType linkedType ); @Delete("DELETE FROM `hydra_service_node_tree` WHERE `guid`=#{chileGuid} AND `parent_guid`=#{parentGuid}") void removeInheritance( @Param("chileGuid") GUID childGuid, @Param("parentGuid") GUID parentGuid ); @Select("SELECT `id` AS `enumId`, `guid`, `parent_guid` AS parentGuid FROM `hydra_service_node_tree` WHERE `parent_guid`=#{guid}") List getChildren(GUID guid ); @Select("SELECT `guid` FROM `hydra_service_node_tree` WHERE `parent_guid` = #{parentGuid}") List fetchChildrenGuids( @Param("parentGuid") GUID parentGuid ); @Select("SELECT `parent_guid` FROM `hydra_service_node_tree` WHERE `guid`=#{guid}") List fetchParentGuids( GUID guid ); @Update("UPDATE `hydra_service_nodes` SET `type` = #{type} WHERE guid=#{guid}") void updateType( UOI type , GUID guid ); @Select( "SELECT guid FROM hydra_service_node_tree WHERE parent_guid IS NULL " ) List fetchRoot(); @Override @Select( "SELECT COUNT( `guid` ) FROM hydra_service_node_tree WHERE `parent_guid` IS NULL AND guid = #{guid}" ) boolean isRoot( GUID guid ); @Override @Select( "SELECT COUNT( `guid` ) FROM hydra_service_node_tree WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}" ) long queryLinkedCount( @Param("guid") GUID guid, @Param("linkedType") LinkedType linkedType ); @Override @Select( "SELECT COUNT( `guid` ) FROM hydra_service_node_tree WHERE `guid` = #{guid}" ) long queryAllLinkedCount( @Param("guid") GUID guid ); @Override @Insert( "INSERT INTO `hydra_service_node_tree` (`guid`, `linked_type`,`tag_name`,`tag_guid`,`parent_guid`) " + "VALUES (#{originalGuid}, #{linkedType}, #{tagName}, #{tagGuid}, #{dirGuid})" ) void newLinkTag( @Param("originalGuid") GUID originalGuid, @Param("dirGuid") GUID dirGuid, @Param("tagName") String tagName, @Param("tagGuid") GUID tagGuid, @Param("linkedType") LinkedType linkedType ); @Override @Update( "UPDATE hydra_service_node_tree SET tag_name = #{tagName} WHERE tag_guid =#{tagGuid}" ) void updateLinkTagName( @Param("tagGuid") GUID tagGuid, @Param("tagName") String tagName ); @Override @Select( "SELECT `guid` FROM hydra_service_node_tree WHERE tag_name = #{tagName} AND parent_guid = #{dirGuid}" ) GUID getOriginalGuid( @Param("tagName") String tagName, @Param("dirGuid") GUID dirGuid ); @Override @Select( "SELECT `guid` FROM hydra_service_node_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}" ) GUID getOriginalGuidByNodeGuid( @Param("tagName") String tagName, @Param("nodeGuid") GUID nodeGUID ); @Override @Select( "SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_service_node_tree WHERE tag_name = #{tagName} AND parent_guid = #{parentDirGuid}" ) TreeReparseLinkNode getReparseLinkNode(@Param("tagName") String tagName, @Param("parentDirGuid") GUID parentDirGuid ); @Override @Select( "SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_service_node_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}" ) TreeReparseLinkNode getReparseLinkNodeByNodeGuid( @Param("tagName") String tagName, @Param("nodeGuid") GUID nodeGUID ); @Override @Select( "SELECT `guid` FROM hydra_service_node_tree WHERE `tag_name` = #{tagName}" ) List fetchOriginalGuid( String tagName ); @Override @Select( "SELECT `guid` FROM hydra_service_node_tree WHERE `tag_name` = #{tagName} AND `parent_guid` IS NULL" ) List fetchOriginalGuidRoot( String tagName ); @Override @Select( "SELECT COUNT(*) FROM `hydra_service_node_tree` WHERE `tag_guid` = #{guid}" ) boolean isTagGuid(GUID guid); @Override @Delete( "DELETE FROM `hydra_service_node_tree` WHERE `tag_guid` = #{guid}" ) void removeReparseLink( GUID guid ); @Override @Select( "SELECT `guid` FROM `hydra_service_node_tree` WHERE `tag_guid` = #{tagGuid}" ) GUID getOriginalGuidByTagGuid(GUID tagGuid); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/hydranium/ServiceMappingDriver.java ================================================ package com.pinecone.hydra.service.ibatis.hydranium; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver; import com.pinecone.hydra.system.component.ResourceDispenserCenter; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; public class ServiceMappingDriver extends ArchMappingDriver implements KOIMappingDriver { protected KOIMasterManipulator mKOIMasterManipulator; public ServiceMappingDriver( Processum superiorProcess ) { super(superiorProcess); } public ServiceMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) { super( superiorProcess, ibatisClient, dispenserCenter, ServiceMappingDriver.class.getPackageName().replace( "hydranium", "" ) ); this.mKOIMasterManipulator = new ServiceMasterManipulatorImpl( this ); } @Override public KOIMasterManipulator getMasterManipulator() { return this.mKOIMasterManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/hydranium/ServiceMasterManipulatorImpl.java ================================================ package com.pinecone.hydra.service.ibatis.hydranium; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.service.ibatis.AppNodeMetaMapper; import com.pinecone.hydra.service.ibatis.ApplicationNodeMapper; import com.pinecone.hydra.service.ibatis.ServiceInstanceMapper; import com.pinecone.hydra.service.ibatis.ServiceNamespaceMapper; import com.pinecone.hydra.service.ibatis.NamespaceRulesMapper; import com.pinecone.hydra.service.ibatis.ServiceNodeMetaMapper; import com.pinecone.hydra.service.ibatis.ServiceMetaMapper; import com.pinecone.hydra.service.ibatis.ServiceNodeMapper; import com.pinecone.hydra.service.ibatis.ServiceNodeOwnerMapper; import com.pinecone.hydra.service.ibatis.ServiceTreeMapper; import com.pinecone.hydra.service.kom.source.ApplicationMetaManipulator; import com.pinecone.hydra.service.kom.source.ApplicationNodeManipulator; import com.pinecone.hydra.service.kom.source.ServiceInstanceManipulator; import com.pinecone.hydra.service.kom.source.ServiceNamespaceManipulator; import com.pinecone.hydra.service.kom.source.NamespaceRulesManipulator; import com.pinecone.hydra.service.kom.source.NodeMetaManipulator; import com.pinecone.hydra.service.kom.source.ServiceMasterManipulator; import com.pinecone.hydra.service.kom.source.ServiceMetaManipulator; import com.pinecone.hydra.service.kom.source.ServiceNodeManipulator; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class ServiceMasterManipulatorImpl implements ServiceMasterManipulator { @Resource @Structure(type = ServiceMasterTreeManipulatorImpl.class ) KOISkeletonMasterManipulator skeletonMasterManipulator; @Resource @Structure(type = ServiceTreeMapper.class ) TrieTreeManipulator trieTreeManipulator; @Resource @Structure(type = ServiceNodeMetaMapper.class ) NodeMetaManipulator nodeMetaManipulator; @Resource @Structure(type = ApplicationNodeMapper.class ) ApplicationNodeManipulator applicationNodeManipulator; @Resource @Structure( type = AppNodeMetaMapper.class ) ApplicationMetaManipulator applicationMetaManipulator; @Resource @Structure( type = ServiceNodeMapper.class ) ServiceNodeManipulator serviceNodeManipulator; @Resource @Structure( type = ServiceMetaMapper.class ) ServiceMetaManipulator serviceMetaManipulator; @Resource @Structure( type = ServiceNamespaceMapper.class ) ServiceNamespaceManipulator serviceNamespaceManipulator; @Resource @Structure( type = ServiceInstanceMapper.class ) ServiceInstanceManipulator serviceInstanceManipulator; @Resource @Structure( type = NamespaceRulesMapper.class ) NamespaceRulesManipulator namespaceRulesManipulator; @Resource @Structure( type = ServiceNodeOwnerMapper.class ) TireOwnerManipulator tireOwnerManipulator; public ServiceMasterManipulatorImpl() { } public ServiceMasterManipulatorImpl( KOIMappingDriver driver ) { driver.autoConstruct( ServiceMasterManipulatorImpl.class, Map.of(), this ); this.skeletonMasterManipulator = new ServiceMasterTreeManipulatorImpl( driver ); } @Override public TrieTreeManipulator getTrieTreeManipulator() { return this.trieTreeManipulator; } @Override public NodeMetaManipulator getNodeMetaManipulator() { return this.nodeMetaManipulator; } @Override public ApplicationNodeManipulator getApplicationNodeManipulator() { return this.applicationNodeManipulator; } @Override public ApplicationMetaManipulator getApplicationElementManipulator() { return this.applicationMetaManipulator; } @Override public ServiceNodeManipulator getServiceNodeManipulator() { return this.serviceNodeManipulator; } @Override public ServiceMetaManipulator getServiceMetaManipulator() { return this.serviceMetaManipulator; } @Override public ServiceNamespaceManipulator getNamespaceManipulator() { return this.serviceNamespaceManipulator; } @Override public NamespaceRulesManipulator getNamespaceRulesManipulator() { return this.namespaceRulesManipulator; } @Override public TireOwnerManipulator getTireOwnerManipulator() { return this.tireOwnerManipulator; } @Override public KOISkeletonMasterManipulator getSkeletonMasterManipulator() { return this.skeletonMasterManipulator; } @Override public ServiceInstanceManipulator getServiceInstanceManipulator() { return this.serviceInstanceManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/hydranium/ServiceMasterTreeManipulatorImpl.java ================================================ package com.pinecone.hydra.service.ibatis.hydranium; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.service.ibatis.ServiceNodeOwnerMapper; import com.pinecone.hydra.service.ibatis.ServicePathCacheMapper; import com.pinecone.hydra.service.ibatis.ServiceTreeMapper; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class ServiceMasterTreeManipulatorImpl implements TreeMasterManipulator { @Resource @Structure( type = ServicePathCacheMapper.class ) TriePathCacheManipulator triePathCacheManipulator; @Resource @Structure( type = ServiceNodeOwnerMapper.class ) TireOwnerManipulator tireOwnerManipulator; @Resource @Structure( type = ServiceTreeMapper.class ) TrieTreeManipulator trieTreeManipulator; public ServiceMasterTreeManipulatorImpl() { } public ServiceMasterTreeManipulatorImpl( KOIMappingDriver driver ) { driver.autoConstruct( ServiceMasterTreeManipulatorImpl.class, Map.of(), this ); } @Override public TireOwnerManipulator getTireOwnerManipulator() { return this.tireOwnerManipulator; } @Override public TrieTreeManipulator getTrieTreeManipulator() { return this.trieTreeManipulator; } @Override public TriePathCacheManipulator getTriePathCacheManipulator() { return this.triePathCacheManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/task/ibatis/AppNodeMapper.java ================================================ package com.pinecone.hydra.task.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.entity.AppElement; import com.pinecone.hydra.task.kom.entity.GenericAppElement; import com.pinecone.hydra.task.kom.source.AppNodeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @Mapper @IbatisDataAccessObject public interface AppNodeMapper extends AppNodeManipulator { @Override @Insert("INSERT INTO `hydra_task_app_node` " + "(`guid`, `name`, `type`, `create_time`, `update_time`) " + "VALUES (#{guid}, #{name}, #{type}, #{createTime}, #{updateTime})") void insert( AppElement appElement); @Override @Delete("DELETE FROM `hydra_task_app_node` WHERE `guid` = #{guid}") void remove(@Param("guid") GUID guid); @Select("SELECT `id` AS `enumId`, `guid`, `name`, `type`, " + "`create_time` AS `createTime`, `update_time` AS `updateTime` " + "FROM `hydra_task_app_node` WHERE `guid` = #{guid}") GenericAppElement getAppElement(@Param("guid") GUID guid); @Override default AppElement getAppElement(GUID guid, TaskInstrument instrument ) { GenericAppElement element = this.getAppElement( guid ); element.apply( instrument ); return element; } @Override @Update("UPDATE `hydra_task_app_node` SET " + "`name` = #{name}, " + "`type` = #{type}, " + "`create_time` = #{createTime}, " + "`update_time` = #{updateTime} " + "WHERE `guid` = #{guid}") void update( AppElement appElement); @Override @Select( "SELECT `guid` FROM `hydra_task_app_node` WHERE `name` = #{name}" ) List getGuidsByName( String name ); @Override @Select( "SELECT `guid` FROM `hydra_task_app_node` WHERE `name` = #{name} AND `guid` = #{guid}" ) List getGuidsByNameID( @Param("name") String name, @Param("guid") GUID guid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/task/ibatis/InstanceNodeMapper.java ================================================ package com.pinecone.hydra.task.ibatis; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.CollectionUtils; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.task.TaskInstanceStatus; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.instance.GenericInstanceEntry; import com.pinecone.hydra.task.kom.instance.InstanceEntry; import com.pinecone.hydra.task.kom.instance.source.InstanceNodeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import com.pinecone.slime.meta.TableIndex64Meta; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import java.time.LocalDateTime; import java.util.Collection; import java.util.List; @Mapper @IbatisDataAccessObject public interface InstanceNodeMapper extends InstanceNodeManipulator { @Override void insert( InstanceEntry instance ); void update( InstanceEntry instance ); GenericInstanceEntry queryByGuid0( GUID guid ); @Override default InstanceEntry queryByGuid( GUID guid, TaskInstrument instrument ) { GenericInstanceEntry entry = this.queryByGuid0( guid ); if ( entry == null ) { return null; } entry.apply( instrument ); return entry; } int countInstance(); long countInstanceByName( String name ); List fetchInstances0( @Param("offset") long offset, @Param("pageSize") long pageSize ); @Override @SuppressWarnings( "unchecked" ) default List fetchInstances( TaskInstrument instrument, long offset, long pageSize ) { List list = this.fetchInstances0( offset, pageSize ); for ( GenericInstanceEntry entry : list ) { entry.apply( instrument ); } return (List) list; } List queryByTaskGuid0( @Param("taskGuid") GUID taskGuid, @Param("offset") long offset, @Param("pageSize") long pageSize ); @Override @SuppressWarnings( "unchecked" ) default List queryByTaskGuid( TaskInstrument instrument, GUID taskGuid, long offset, long pageSize ) { List list = this.queryByTaskGuid0( taskGuid, offset, pageSize ); for ( GenericInstanceEntry entry : list ) { entry.apply( instrument ); } return (List) list; } long countInstanceByTaskGuid( GUID taskGuid ); GenericInstanceEntry findLastExecuted0( @Param("taskGuid") GUID taskGuid, @Param("bizTime") String bizTime ); @Override default InstanceEntry findLastExecuted( GUID taskGuid, TaskInstrument instrument, String bizTime ) { GenericInstanceEntry entry = this.findLastExecuted0( taskGuid, bizTime ); if ( entry == null ) { return null; } entry.apply( instrument ); return entry; } @Override TableIndex64Meta selectSchedulableIdRange( @Param("runStatuses") Collection runStatuses, @Param("targetTime") LocalDateTime targetTime, @Param( "actuallyPriority" ) @Nullable Short actuallyPriority ); List fetchSchedulableInstances0( @Param( "idMin" ) long idMin, @Param( "idMax" ) long idMax, @Param("runStatuses") Collection runStatuses, @Param( "targetTime" ) LocalDateTime targetTime, @Param( "actuallyPriority" ) @Nullable Short actuallyPriority ); @Override default List fetchSchedulableInstances( TaskInstrument instrument, long idMin, long idMax, Collection runStatuses, LocalDateTime targetTime, @Nullable Short actuallyPriority ) { List list = this.fetchSchedulableInstances0( idMin, idMax, runStatuses, targetTime, actuallyPriority ); for ( GenericInstanceEntry entry : list ) { entry.apply( instrument ); } return CollectionUtils.genericConvert( list ); } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/task/ibatis/TaskNamespaceMapper.java ================================================ package com.pinecone.hydra.task.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.task.kom.entity.GenericNamespace; import com.pinecone.hydra.task.kom.entity.Namespace; import com.pinecone.hydra.task.kom.source.TaskNamespaceManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @Mapper @IbatisDataAccessObject public interface TaskNamespaceMapper extends TaskNamespaceManipulator { @Insert("INSERT INTO `hydra_task_namespace_node` (`guid`, `name`) VALUES (#{guid},#{name})") void insert( Namespace ns ); @Delete("DELETE FROM `hydra_task_namespace_node` WHERE `guid`=#{guid}") void remove( @Param("guid") GUID GUID ); @Select("SELECT `id` AS `enumId`, `guid`, `name` FROM `hydra_task_namespace_node` WHERE `guid`=#{guid}") GenericNamespace getNamespace( @Param("guid") GUID guid ); @Update("UPDATE `hydra_task_namespace_node` SET `name` = #{name} WHERE `guid` = #{guid}") void update( Namespace ns ); @Select("SELECT `id` AS `enumId`, `guid`, `name` FROM `hydra_task_namespace_node` WHERE name=#{name}") List fetchNamespaceNodeByName0( @Param("name") String name ); @SuppressWarnings( "unchecked" ) default List fetchNamespaceNodeByName( String name ){ return (List) this.fetchNamespaceNodeByName0( name ); } @Override @Select( "SELECT `guid` FROM `hydra_task_namespace_node` WHERE `name` = #{name}" ) List getGuidsByName(String name); @Override @Select( "SELECT `guid` FROM `hydra_task_namespace_node` WHERE `name` = #{name} AND `guid` = #{guid}" ) List getGuidsByNameID( @Param("name") String name, @Param("guid") GUID guid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/task/ibatis/TaskNodeMapper.java ================================================ package com.pinecone.hydra.task.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.entity.GenericTaskElement; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.task.kom.source.TaskNodeManipulator; import com.pinecone.hydra.task.marshal.TaskScheduleCycle; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import com.pinecone.slime.meta.TableIndex64Meta; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import java.time.LocalDateTime; import java.util.Collection; import java.util.List; @Mapper @IbatisDataAccessObject public interface TaskNodeMapper extends TaskNodeManipulator { @Override void insert( TaskElement taskElement ); @Override void remove( @Param("guid") GUID guid ); GenericTaskElement getTaskNode0( @Param("guid") GUID guid ); @Override default TaskElement getTaskNode( GUID guid, TaskInstrument instrument ) { GenericTaskElement taskElement = this.getTaskNode0( guid ); taskElement.apply( instrument ); return taskElement; } @Override void update( TaskElement taskElement ); List fetchTaskNodeByName0( @Param("name") String name ); @Override @SuppressWarnings( "unchecked" ) default List fetchTaskNodeByName( String name ) { List list = this.fetchTaskNodeByName0( name ); return (List) list; } @Override @Select( "SELECT `guid` FROM `hydra_task_task_node` WHERE `name` = #{name}" ) List getGuidsByName( String name ); @Override @Select( "SELECT `guid` FROM `hydra_task_task_node` WHERE `name` = #{name} AND `guid` = #{guid}" ) List getGuidsByNameID( @Param("name") String name, @Param("guid") GUID guid ); @Override TableIndex64Meta selectSchedulableIdRange( @Param( "cycles" ) Collection cycles, @Param( "targetTime" ) LocalDateTime targetTime ); List fetchSchedulableTasksInRange0( @Param( "idMin" ) long idMin, @Param( "idMax" ) long idMax, @Param( "cycles" ) Collection cycles, @Param( "targetTime" ) LocalDateTime targetTime ); @Override @SuppressWarnings( "unchecked" ) default List fetchSchedulableTasksInRange( long idMin, long idMax, Collection cycles, LocalDateTime targetTime ) { List list = this.fetchSchedulableTasksInRange0( idMin, idMax, cycles, targetTime ); return (List) list; } @Override List listPage(int offset, int pageSize); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/task/ibatis/TaskNodeOwnerMapper.java ================================================ package com.pinecone.hydra.task.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.LinkedType; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @Mapper @IbatisDataAccessObject public interface TaskNodeOwnerMapper extends TireOwnerManipulator { @Insert("INSERT INTO `hydra_task_node_tree` (`guid`, `linked_type`) VALUES ( #{guid}, #{linkedType} )") void insertRootNode(@Param("guid") GUID guid, @Param("linkedType") LinkedType linkedType ); @Insert( "INSERT INTO `hydra_task_node_tree` (`guid`, `parent_guid`,`linked_type`) VALUES (#{targetGuid}, #{parentGuid}, #{linkedType})" ) void insert( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID, @Param("linkedType") LinkedType linkedType ); @Update( "UPDATE `hydra_task_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}" ) void update( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID, @Param("linkedType") LinkedType linkedType ); @Update( "UPDATE `hydra_task_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid} WHERE `guid` = #{targetGuid}" ) void updateParentGuid( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID ); @Update( "UPDATE `hydra_task_node_tree` SET `guid` = #{targetGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}" ) void updateLinkedType( @Param("targetGuid") GUID targetGuid, @Param("linkedType") LinkedType linkedType ); @Delete( "DELETE FROM `hydra_task_node_tree` WHERE `guid`=#{subordinateGuid} AND `linked_type` = 'Owned'" ) void remove( @Param("subordinateGuid") GUID subordinateGuid, @Param("ownerGuid") GUID ownerGuid ); @Delete( "DELETE FROM `hydra_task_node_tree` WHERE `guid`=#{subordinateGuid} AND `linked_type` = 'Owned'" ) void removeBySubordinate( GUID subordinateGuid ); // @Delete("DELETE FROM `hydra_registry_node_owner` WHERE `owner_guid`=#{ownerGuid}") // void removeByOwner(GUID ownerGuid); @Select( "SELECT `parent_guid` FROM `hydra_task_node_tree` WHERE `guid`=#{subordinateGuid} AND linked_type = 'Owned'" ) GUID getOwner( GUID subordinateGuid ); @Select( "SELECT guid FROM hydra_task_node_tree where parent_guid=#{guid} AND linked_type = 'Owned'" ) List getSubordinates( GUID guid ); @Update("UPDATE `hydra_task_node_tree` SET `linked_type` = '#{linkedType}' WHERE `guid` = #{sourceGuid} AND `parent_guid` = #{targetGuid}") void setLinkedType( @Param("sourceGuid") GUID sourceGuid, @Param("targetGuid") GUID targetGuid, @Param("linkedType") LinkedType linkedType ); @Select("SELECT `linked_type` FROM `hydra_task_node_tree` WHERE `guid` = #{childGuid} AND `parent_guid` =#{parentGuid}") LinkedType getLinkedType( @Param("childGuid") GUID childGuid,@Param("parentGuid") GUID parentGuid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/task/ibatis/TaskPathCacheMapper.java ================================================ package com.pinecone.hydra.task.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; @Mapper @IbatisDataAccessObject public interface TaskPathCacheMapper extends TriePathCacheManipulator { @Insert("INSERT INTO `hydra_task_node_cache_path` (`path`, `guid`) VALUES ( #{path}, #{guid} )") void insert(@Param("guid") GUID guid, @Param("path") String path ); @Insert("INSERT INTO `hydra_task_node_cache_path` (path, long_path, guid) VALUES ( #{path},#{longPath},#{guid} )") void insertLongPath( @Param("guid") GUID guid, @Param("path") String path, @Param("longPath") String longPath ); @Delete("DELETE FROM `hydra_task_node_cache_path` WHERE `guid`=#{guid}") void remove( GUID guid ); default String getPath( GUID guid ){ String longPath = this.getLongPath(guid); if ( longPath != null ){ return this.getPath0( guid ) + this.getLongPath( guid ); } return this.getPath0( guid ); } @Select("SELECT `long_path` FROM `hydra_task_node_cache_path` WHERE `guid`=#{guid}") String getLongPath( GUID guid ); @Select("SELECT `path` FROM `hydra_task_node_cache_path` WHERE `guid`=#{guid}") String getPath0( GUID guid ); @Select("SELECT `guid` FROM `hydra_task_node_cache_path` WHERE `guid`=#{guid}") GUID getNode( String path ); @Select("SELECT `guid` FROM `hydra_task_node_cache_path` WHERE `path`=#{path}") GUID queryGUIDByPath( String path ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/task/ibatis/TaskTreeMapper.java ================================================ package com.pinecone.hydra.task.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.LinkedType; import com.pinecone.hydra.unit.imperium.entity.TreeReparseLinkNode; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @Mapper @IbatisDataAccessObject public interface TaskTreeMapper extends TrieTreeManipulator { @Insert("INSERT INTO `hydra_task_node_tree` (`guid`, `linked_type`) VALUES ( #{guid}, #{linkedType} )") void insertRootNode(@Param("guid") GUID guid, @Param("linkedType") LinkedType linkedType ); @Override default void insert (TireOwnerManipulator ownerManipulator, GUIDImperialTrieNode node ){ this.insertTreeNode( node.getGuid(), node.getType(), node.getAttributesGUID(), node.getNodeMetadataGUID() ); ownerManipulator.insertRootNode( node.getGuid() ); } @Insert("INSERT INTO hydra_task_nodes (`guid`, `type`,`base_data_guid`,`node_metadata_guid`) VALUES (#{guid},#{type},#{baseDataGuid},#{nodeMetaGuid})") void insertTreeNode(@Param("guid") GUID guid, @Param("type") UOI type, @Param("baseDataGuid") GUID baseDataGuid, @Param("nodeMetaGuid") GUID nodeMetaGuid ); @Select("SELECT `id` AS `enumId`, `guid`, `type`, base_data_guid AS baseDataGUID, node_metadata_guid AS nodeMetadataGUID FROM hydra_task_nodes WHERE guid=#{guid}") GUIDImperialTrieNode getNodeExtendsFromMeta(GUID guid ); @Select("SELECT COUNT( `id` ) FROM hydra_task_nodes WHERE guid=#{guid}") boolean contains( GUID key ); @Override default GUIDImperialTrieNode getNode(GUID guid ) { GUIDImperialTrieNode node = this.getNodeExtendsFromMeta( guid ); if( node == null ) { return null; } List parent = this.fetchParentGuids( guid ); node.setParentGUID( parent ); return node; } @Select("SELECT id, guid, parent_guid, linked_type FROM hydra_task_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}") GUIDImperialTrieNode getTreeNodeOnly(@Param("guid") GUID guid, @Param("parentGuid") GUID parentGuid ); @Select("SELECT count( * ) FROM hydra_task_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}") long countNode( GUID guid, GUID parentGuid ); @Override default void purge( GUID guid ) { this.removeNodeMeta( guid ); this.removeTreeNode( guid ); this.removeOwnedTreeNode( guid ); } @Delete("DELETE FROM `hydra_task_nodes` WHERE `guid`=#{guid}") void removeNodeMeta( @Param("guid") GUID guid ); @Delete("DELETE FROM `hydra_task_node_tree` WHERE `guid` = #{guid}") void removeTreeNode( @Param("guid") GUID guid ); @Delete("DELETE FROM `hydra_task_node_tree` WHERE `parent_guid` = #{parent_guid}") void removeTreeNodeByParentGuid( @Param("parent_guid") GUID parentGuid ); @Delete("DELETE FROM `hydra_task_node_tree` WHERE `guid` = #{guid} AND `parent_guid` = #{parent_guid}") void removeTreeNodeYoke( @Param("guid") GUID guid, @Param("parent_guid") GUID parentGuid ); @Delete("DELETE FROM `hydra_task_node_tree` WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}") void removeTreeNodeWithLinkedType( @Param("guid") GUID guid, @Param("linkedType") LinkedType linkedType ); @Delete("DELETE FROM `hydra_task_node_tree` WHERE `guid`=#{chileGuid} AND `parent_guid`=#{parentGuid}") void removeInheritance( @Param("chileGuid") GUID childGuid, @Param("parentGuid") GUID parentGuid ); @Select("SELECT `id` AS `enumId`, `guid`, `parent_guid` AS parentGuid FROM `hydra_task_node_tree` WHERE `parent_guid`=#{guid}") List getChildren(GUID guid ); @Select("SELECT `guid` FROM `hydra_task_node_tree` WHERE `parent_guid` = #{parentGuid}") List fetchChildrenGuids( @Param("parentGuid") GUID parentGuid ); @Select("SELECT `parent_guid` FROM `hydra_task_node_tree` WHERE `guid`=#{guid}") List fetchParentGuids( GUID guid ); @Update("UPDATE `hydra_task_nodes` SET `type` = #{type} WHERE guid=#{guid}") void updateType( UOI type , GUID guid ); @Select( "SELECT guid FROM hydra_task_node_tree WHERE parent_guid IS NULL " ) List fetchRoot(); @Override @Select( "SELECT COUNT( `guid` ) FROM hydra_task_node_tree WHERE `parent_guid` IS NULL AND guid = #{guid}" ) boolean isRoot( GUID guid ); @Override @Select( "SELECT COUNT( `guid` ) FROM hydra_task_node_tree WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}" ) long queryLinkedCount( @Param("guid") GUID guid, @Param("linkedType") LinkedType linkedType ); @Override @Select( "SELECT COUNT( `guid` ) FROM hydra_task_node_tree WHERE `guid` = #{guid}" ) long queryAllLinkedCount( @Param("guid") GUID guid ); @Override @Insert( "INSERT INTO `hydra_task_node_tree` (`guid`, `linked_type`,`tag_name`,`tag_guid`,`parent_guid`) " + "VALUES (#{originalGuid}, #{linkedType}, #{tagName}, #{tagGuid}, #{dirGuid})" ) void newLinkTag( @Param("originalGuid") GUID originalGuid, @Param("dirGuid") GUID dirGuid, @Param("tagName") String tagName, @Param("tagGuid") GUID tagGuid, @Param("linkedType") LinkedType linkedType ); @Override @Update( "UPDATE hydra_task_node_tree SET tag_name = #{tagName} WHERE tag_guid =#{tagGuid}" ) void updateLinkTagName( @Param("tagGuid") GUID tagGuid, @Param("tagName") String tagName ); @Override @Select( "SELECT `guid` FROM hydra_task_node_tree WHERE tag_name = #{tagName} AND parent_guid = #{dirGuid}" ) GUID getOriginalGuid( @Param("tagName") String tagName, @Param("dirGuid") GUID dirGuid ); @Override @Select( "SELECT `guid` FROM hydra_task_node_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}" ) GUID getOriginalGuidByNodeGuid( @Param("tagName") String tagName, @Param("nodeGuid") GUID nodeGUID ); @Override @Select( "SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_task_node_tree WHERE tag_name = #{tagName} AND parent_guid = #{parentDirGuid}" ) TreeReparseLinkNode getReparseLinkNode(@Param("tagName") String tagName, @Param("parentDirGuid") GUID parentDirGuid ); @Override @Select( "SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_task_node_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}" ) TreeReparseLinkNode getReparseLinkNodeByNodeGuid( @Param("tagName") String tagName, @Param("nodeGuid") GUID nodeGUID ); @Override @Select( "SELECT `guid` FROM hydra_task_node_tree WHERE `tag_name` = #{tagName}" ) List fetchOriginalGuid( String tagName ); @Override @Select( "SELECT `guid` FROM hydra_task_node_tree WHERE `tag_name` = #{tagName} AND `parent_guid` IS NULL" ) List fetchOriginalGuidRoot( String tagName ); @Override @Select( "SELECT COUNT(*) FROM `hydra_task_node_tree` WHERE `tag_guid` = #{guid}" ) boolean isTagGuid(GUID guid); @Override @Delete( "DELETE FROM `hydra_task_node_tree` WHERE `tag_guid` = #{guid}" ) void removeReparseLink( GUID guid ); @Override @Select( "SELECT `guid` FROM `hydra_task_node_tree` WHERE `tag_guid` = #{tagGuid}" ) GUID getOriginalGuidByTagGuid(GUID tagGuid); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/task/ibatis/hydranium/TaskMappingDriver.java ================================================ package com.pinecone.hydra.task.ibatis.hydranium; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver; import com.pinecone.hydra.system.component.ResourceDispenserCenter; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; public class TaskMappingDriver extends ArchMappingDriver implements KOIMappingDriver { protected KOIMasterManipulator mKOIMasterManipulator; public TaskMappingDriver( Processum superiorProcess ) { super( superiorProcess ); } public TaskMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) { super( superiorProcess, ibatisClient, dispenserCenter, TaskMappingDriver.class.getPackageName().replace( "hydranium", "" ) ); this.mKOIMasterManipulator = new TaskMasterManipulatorImpl( this ); } @Override public KOIMasterManipulator getMasterManipulator() { return this.mKOIMasterManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/task/ibatis/hydranium/TaskMasterManipulatorImpl.java ================================================ package com.pinecone.hydra.task.ibatis.hydranium; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.task.kom.instance.source.InstanceNodeManipulator; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; import com.pinecone.hydra.task.ibatis.AppNodeMapper; import com.pinecone.hydra.task.ibatis.TaskNamespaceMapper; import com.pinecone.hydra.task.ibatis.TaskNodeMapper; import com.pinecone.hydra.task.ibatis.TaskNodeOwnerMapper; import com.pinecone.hydra.task.ibatis.TaskTreeMapper; import com.pinecone.hydra.task.ibatis.InstanceNodeMapper; import com.pinecone.hydra.task.kom.source.AppNodeManipulator; import com.pinecone.hydra.task.kom.source.TaskMasterManipulator; import com.pinecone.hydra.task.kom.source.TaskNamespaceManipulator; import com.pinecone.hydra.task.kom.source.TaskNodeManipulator; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class TaskMasterManipulatorImpl implements TaskMasterManipulator { @Resource @Structure( type = TaskNodeOwnerMapper.class ) TireOwnerManipulator tireOwnerManipulator; @Resource @Structure(type = TaskTreeMapper.class ) TrieTreeManipulator trieTreeManipulator; @Resource @Structure(type = TaskNodeMapper.class) TaskNodeManipulator taskNodeManipulator; @Resource @Structure(type = AppNodeMapper.class ) AppNodeManipulator appNodeManipulator; @Resource @Structure( type = TaskNamespaceMapper.class ) TaskNamespaceManipulator taskNamespaceManipulator; @Resource @Structure(type = TaskMasterTreeManipulatorImpl.class) KOISkeletonMasterManipulator skeletonMasterManipulator; @Resource @Structure(type = InstanceNodeMapper.class) InstanceNodeManipulator instanceNodeManipulator; public TaskMasterManipulatorImpl() { } public TaskMasterManipulatorImpl( KOIMappingDriver driver ) { driver.autoConstruct( TaskMasterManipulatorImpl.class, Map.of(), this ); this.skeletonMasterManipulator = new TaskMasterTreeManipulatorImpl( driver ); } @Override public TrieTreeManipulator getTrieTreeManipulator() { return this.trieTreeManipulator; } @Override public TaskNodeManipulator getTaskNodeManipulator() { return this.taskNodeManipulator; } @Override public AppNodeManipulator getAppNodeManipulator() { return this.appNodeManipulator; } @Override public TaskNamespaceManipulator getNamespaceManipulator() { return this.taskNamespaceManipulator; } @Override public KOISkeletonMasterManipulator getSkeletonMasterManipulator() { return this.skeletonMasterManipulator; } @Override public TireOwnerManipulator getTireOwnerManipulator() { return this.tireOwnerManipulator; } @Override public InstanceNodeManipulator getInstanceNodeManipulator() { return this.instanceNodeManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/task/ibatis/hydranium/TaskMasterTreeManipulatorImpl.java ================================================ package com.pinecone.hydra.task.ibatis.hydranium; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.task.ibatis.TaskNodeOwnerMapper; import com.pinecone.hydra.task.ibatis.TaskPathCacheMapper; import com.pinecone.hydra.task.ibatis.TaskTreeMapper; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class TaskMasterTreeManipulatorImpl implements TreeMasterManipulator { @Resource @Structure( type = TaskPathCacheMapper.class ) TriePathCacheManipulator triePathCacheManipulator; @Resource @Structure( type = TaskNodeOwnerMapper.class ) TireOwnerManipulator tireOwnerManipulator; @Resource @Structure( type = TaskTreeMapper.class ) TrieTreeManipulator trieTreeManipulator; public TaskMasterTreeManipulatorImpl() { } public TaskMasterTreeManipulatorImpl( KOIMappingDriver driver ) { driver.autoConstruct( TaskMasterTreeManipulatorImpl.class, Map.of(), this ); } @Override public TireOwnerManipulator getTireOwnerManipulator() { return this.tireOwnerManipulator; } @Override public TrieTreeManipulator getTrieTreeManipulator() { return this.trieTreeManipulator; } @Override public TriePathCacheManipulator getTriePathCacheManipulator() { return this.triePathCacheManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/version/ibatis/VersionMapper.java ================================================ package com.pinecone.hydra.version.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.version.entity.TitanVersion; import com.pinecone.hydra.storage.version.entity.Version; import com.pinecone.hydra.storage.version.source.VersionManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import java.util.List; @IbatisDataAccessObject public interface VersionMapper extends VersionManipulator { @Insert("INSERT INTO `hydra_uofs_version` ( `version_guid`, `version`, `target_storage_object_guid`, `file_guid`) VALUES (#{versionGuid},#{version}, #{targetStorageObjectGuid}, #{fileGuid})") void insertObjectVersion(Version version); @Delete("DELETE FROM `hydra_uofs_version` WHERE `version` = #{version} AND `file_guid` = #{fileGuid}") void removeObjectVersion( String version, GUID fileGuid ); @Select("SELECT `target_storage_object_guid` FROM `hydra_uofs_version` WHERE `version` = #{version} AND file_guid = #{fileGuid}") GUID queryObjectGuid( String version, GUID fileGuid ); @Select("SELECT EXISTS(SELECT 1 FROM `hydra_uofs_version` WHERE `file_guid` = #{fileGuid})") boolean queryIsManage(@Param("fileGuid") GUID fileGuid); @Select("SELECT `target_storage_object_guid` FROM `hydra_uofs_version` WHERE `file_guid` = #{fileGuid}") List fetchVersions(GUID guid); @Select("SELECT `file_guid` FROM `hydra_uofs_version` WHERE target_storage_object_guid = #{fileGuid}") GUID getVersionFileByGuid(GUID fileGuid); @Select("SELECT `version`, `target_storage_object_guid`AS targetStorageObjectGuid, `file_guid` AS fileGuid, `enable_crc32` AS enableCrc32, `crc32`, `version_guid` AS versionGuid FROM `hydra_uofs_version` WHERE `target_storage_object_guid` = #{targetStorageObjectGuid}") TitanVersion queryByTargetStorageObjectGuid(GUID targetStorageObjectGuid); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/version/ibatis/VersionMappingMapper.java ================================================ package com.pinecone.hydra.version.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.version.entity.TitanVersionMapping; import com.pinecone.hydra.storage.version.entity.VersionMapping; import com.pinecone.hydra.storage.version.source.VersionMappingManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Select; import java.util.List; @IbatisDataAccessObject public interface VersionMappingMapper extends VersionMappingManipulator { @Insert("INSERT INTO `hydra_ucdn_version_mapping` ( `enable_version_guid`, `file_guid`,`version_guid`) VALUES (#{enableVersionGuid},#{fileGuid},#{versionGuid})") void insert(VersionMapping versionMapping); @Delete("DELETE FROM `hydra_ucdn_version_mapping` WHERE `enable_version_guid` = #{enableVersionGuid} AND `file_guid` = #{fileGuid} AND `version_guid` = #{versionGuid}") void remove(VersionMapping versionMapping); @Select("SELECT `enable_version_guid` AS enableVersionGuid, `file_guid` AS fileGuid, `version_guid` AS versionGuid FROM `hydra_ucdn_version_mapping` WHERE `file_guid` = #{fileGuid}") TitanVersionMapping queryVersionMapping(GUID fileGuid); @Insert("UPDATE `hydra_ucdn_version_mapping` " + "SET `enable_version_guid` = #{enableVersionGuid}, " + "`version_guid` = #{versionGuid} " + "WHERE `file_guid` = #{fileGuid}") void update(VersionMapping versionMapping); @Select("SELECT `enable_version_guid` AS enableVersionGuid, `file_guid` AS fileGuid, `version_guid` AS versionGuid FROM `hydra_ucdn_version_mapping`") List queryAllVersionMapper(); @Select("SELECT EXISTS(SELECT 1 FROM `hydra_ucdn_version_mapping` WHERE `enable_version_guid` = #{enableVersionGuid})") boolean isExistEnableVersionMapping(GUID enableVersionGuid); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/version/ibatis/hydranium/VersionMappingDriver.java ================================================ package com.pinecone.hydra.version.ibatis.hydranium; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver; import com.pinecone.hydra.system.component.ResourceDispenserCenter; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; public class VersionMappingDriver extends ArchMappingDriver implements KOIMappingDriver { protected KOIMasterManipulator mKOIMasterManipulator; public VersionMappingDriver( Processum superiorProcess ) { super( superiorProcess ); } // Temp , TODO public VersionMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) { super( superiorProcess, ibatisClient, dispenserCenter, VersionMappingDriver.class.getPackageName().replace( "hydranium", "" ) ); this.mKOIMasterManipulator = new VersionMasterManipulatorImpl( this ); } @Override public KOIMasterManipulator getMasterManipulator() { return this.mKOIMasterManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/version/ibatis/hydranium/VersionMasterManipulatorImpl.java ================================================ package com.pinecone.hydra.version.ibatis.hydranium; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.storage.version.source.VersionManipulator; import com.pinecone.hydra.storage.version.source.VersionMappingManipulator; import com.pinecone.hydra.storage.version.source.VersionMasterManipulator; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; import com.pinecone.hydra.version.ibatis.VersionMapper; import com.pinecone.hydra.version.ibatis.VersionMappingMapper; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class VersionMasterManipulatorImpl implements VersionMasterManipulator { @Resource @Structure( type = VersionMapper.class ) VersionManipulator versionManipulator; @Resource @Structure( type = VersionMappingMapper.class ) VersionMappingManipulator versionMappingManipulator; public VersionMasterManipulatorImpl() { } public VersionMasterManipulatorImpl( KOIMappingDriver driver ) { driver.autoConstruct( VersionMasterManipulatorImpl.class, Map.of(), this ); } @Override public VersionManipulator getVersionManipulator() { return this.versionManipulator; } @Override public VersionMappingManipulator getVersionMappingManipulator() { return this.versionMappingManipulator; } @Override public KOISkeletonMasterManipulator getSkeletonMasterManipulator() { return null; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/LineSegmentMapper.java ================================================ package com.pinecone.hydra.volume.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.source.LineSegmentManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; @IbatisDataAccessObject public interface LineSegmentMapper extends LineSegmentManipulator { @Insert("INSERT INTO `hydra_volume_line_segment` (`id_min`, `id_max`, `volume_guid`) VALUES ( #{idMin}, #{idMax}, #{volumeGuid} )") void insert(@Param("idMin") int idMin, @Param("idMax") int idMax, @Param("volumeGuid") GUID volumeGuid ); @Select("SELECT `volume_guid` FROM `hydra_volume_line_segment` WHERE id > id_min AND id < id_max") GUID getVolumeGuid( int id ); @Delete("DELETE FROM hydra_volume_line_segment WHERE id_min = #{idMin} AND id_max = #{idMax}") void delete( @Param("idMin") int idMin, @Param("idMax") int idMax ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/MirroredVolumeMapper.java ================================================ package com.pinecone.hydra.volume.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.entity.MirroredVolume; import com.pinecone.hydra.storage.volume.entity.local.mirrored.TitanLocalMirroredVolume; import com.pinecone.hydra.storage.volume.source.MirroredVolumeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Select; @IbatisDataAccessObject public interface MirroredVolumeMapper extends MirroredVolumeManipulator, PrimeLogicVolumeMapper { @Insert("INSERT INTO `hydra_uofs_volumes` (`guid`, `create_time`, `update_time`, `name`, `definition_capacity`, `used_size`, `quota_capacity`, `type`, `ext_config`) VALUES ( #{guid}, #{createTime}, #{updateTime}, #{name}, #{definitionCapacity}, #{usedSize}, #{quotaCapacity}, #{type}, #{extConfig} )") void insert( MirroredVolume mirroredVolume ); @Delete("DELETE FROM `hydra_uofs_volumes` where `guid` = #{guid}") void remove( GUID guid ); default TitanLocalMirroredVolume getMirroredVolume(GUID guid){ TitanLocalMirroredVolume mirroredVolume0 = this.getMirroredVolume0(guid); mirroredVolume0.setMirroredVolumeManipulator( this ); return mirroredVolume0; } @Select("SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`, `definition_capacity` AS definitionCapacity, `used_size` AS userdSize, `quota_capacity` AS quotaCapacity, `type`, `ext_config` AS extConfig FROM `hydra_uofs_volumes` WHERE `guid` = #{guid}") TitanLocalMirroredVolume getMirroredVolume0(GUID guid); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/MountPointMapper.java ================================================ package com.pinecone.hydra.volume.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.entity.MountPoint; import com.pinecone.hydra.storage.volume.entity.TitanMountPoint; import com.pinecone.hydra.storage.volume.source.MountPointManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Select; @IbatisDataAccessObject @Mapper public interface MountPointMapper extends MountPointManipulator { @Insert("INSERT INTO `hydra_uofs_volumes_mount_point` (`guid`, `volume_guid`, `create_time`, `update_time`, `name`, `mount_point`) VALUES (#{guid},#{volumeGuid},#{createTime},#{updateTime},#{name},#{mountPoint})") void insert( MountPoint mountPoint ); @Delete("DELETE FROM `hydra_uofs_volumes_mount_point` WHERE `guid` = #{guid}") void remove( GUID guid ); @Delete("DELETE FROM `hydra_uofs_volumes_mount_point` WHERE `volume_guid` = #{guid}") void removeByVolumeGuid( GUID guid ); default TitanMountPoint getMountPoint(GUID guid){ TitanMountPoint mountPoint0 = this.getMountPoint0( guid ); if ( mountPoint0 == null ){ return null; } mountPoint0.setMountPointManipulator( this ); return mountPoint0; } @Select("SELECT `id` AS enumId, `guid`, `volume_guid` AS volumeGuid, `create_time` AS createTime, `update_time` AS updateTime, `name`, `mount_point` AS mountPoint FROM `hydra_uofs_volumes_mount_point` WHERE `guid` = #{guid}") TitanMountPoint getMountPoint0(GUID guid); default TitanMountPoint getMountPointByVolumeGuid( GUID guid ){ TitanMountPoint mountPoint = this.getMountPointByVolumeGuid0(guid); if ( mountPoint == null ){ return null; } mountPoint.setMountPointManipulator( this ); return mountPoint; } @Select("SELECT `id` AS enumId, `guid`, `volume_guid` AS volumeGuid, `create_time` AS createTime, `update_time` AS updateTime, `name`, `mount_point` AS mountPoint FROM `hydra_uofs_volumes_mount_point` WHERE `volume_guid` = #{guid}") TitanMountPoint getMountPointByVolumeGuid0( GUID guid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/PhysicalVolumeMapper.java ================================================ package com.pinecone.hydra.volume.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.entity.PhysicalVolume; import com.pinecone.hydra.storage.volume.entity.Volume; import com.pinecone.hydra.storage.volume.entity.local.physical.TitanLocalPhysicalVolume; import com.pinecone.hydra.storage.volume.source.PhysicalVolumeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.ArrayList; import java.util.List; @IbatisDataAccessObject public interface PhysicalVolumeMapper extends PhysicalVolumeManipulator { @Insert("INSERT INTO `hydra_uofs_volumes` (`guid`, `create_time`, `update_time`, `name`, `type`, `ext_config`) VALUES ( #{guid}, #{createTime}, #{updateTime}, #{name}, #{type}, #{extConfig} )") void insert( PhysicalVolume physicalVolume ); @Delete("DELETE FROM `hydra_uofs_volumes` where `guid` = #{guid}") void remove( GUID guid ); @Override default TitanLocalPhysicalVolume getPhysicalVolume(GUID guid){ TitanLocalPhysicalVolume physicalVolume0 = this.getPhysicalVolume0( guid ); if(physicalVolume0 == null){ return null; } physicalVolume0.setPhysicalVolumeManipulator( this ); return physicalVolume0; } @Select("SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`, `type`, `ext_config` AS extConfig FROM `hydra_uofs_volumes` WHERE `guid` = #{guid} AND type = 'PhysicalVolume'") TitanLocalPhysicalVolume getPhysicalVolume0(GUID guid); @Override default TitanLocalPhysicalVolume getPhysicalVolumeByName( String name ){ TitanLocalPhysicalVolume physicalVolumeByName0 = this.getPhysicalVolumeByName0(name); physicalVolumeByName0.setPhysicalVolumeManipulator( this ); return physicalVolumeByName0; } @Select("SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`, `type`, `ext_config` AS extConfig FROM `hydra_uofs_volumes` WHERE `name` = #{name}") TitanLocalPhysicalVolume getPhysicalVolumeByName0( String name ); default TitanLocalPhysicalVolume getSmallestCapacityPhysicalVolume(){ TitanLocalPhysicalVolume physicalVolume0 = this.getSmallestCapacityPhysicalVolume0(); physicalVolume0.setPhysicalVolumeManipulator( this ); return physicalVolume0; } @Select("SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`, `type`, `ext_config` AS extConfig FROM `hydra_uofs_volumes` WHERE type = 'PhysicalVolume' ORDER BY ( `definition_capacity` - hydra_uofs_volumes.`used_size` ) ASC LIMIT 1") TitanLocalPhysicalVolume getSmallestCapacityPhysicalVolume0(); @Select("SELECT `logic_guid` FROM `hydra_volume_physical_logic` WHERE `physical_guid` = #{guid}") GUID getParent( GUID guid ); default List queryAllPhysicalVolumes(){ List physicalVolumes = this.queryAllPhysicalVolumes0(); return new ArrayList<>(physicalVolumes); } @Select("SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`, `type`, `ext_config` AS extConfig FROM hydra_uofs_volumes WHERE type = 'PhysicalVolume'") List queryAllPhysicalVolumes0(); @Update("UPDATE `hydra_uofs_volumes` SET `create_time` = #{createTime}, `name` = #{name}, `used_size` = #{usedSize} WHERE `guid` = #{guid}") void update( PhysicalVolume physicalVolume ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/PrimeLogicVolumeMapper.java ================================================ package com.pinecone.hydra.volume.ibatis; import java.util.List; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.source.LogicVolumeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; @IbatisDataAccessObject public interface PrimeLogicVolumeMapper extends LogicVolumeManipulator { @Select("SELECT `guid` FROM `hydra_uofs_volumes` WHERE `name` = #{name}") List getGuidsByName( String name ); @Select("SELECT `guid` FROM `hydra_uofs_volumes` WHERE `name` = #{name} AND `guid` = #{guid}") List getGuidsByNameID( @Param("name") String name, @Param("guid") GUID guid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/SQLiteVolumeMapper.java ================================================ package com.pinecone.hydra.volume.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.source.SQLiteVolumeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; @IbatisDataAccessObject public interface SQLiteVolumeMapper extends SQLiteVolumeManipulator { @Insert("INSERT INTO `hydra_volume_sqlite_volume` (`physics_volume_guid`, `volume_guid`) VALUES ( #{physicsGuid}, #{volumeGuid} )") void insert(@Param("physicsGuid") GUID physicsGuid, @Param("volumeGuid") GUID volumeGuid ); @Select("SELECT `physics_volume_guid` FROM `hydra_volume_sqlite_volume` WHERE `volume_guid` = #{volumeGuid}") GUID getPhysicsGuid( GUID volumeGuid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/SimpleVolumeMapper.java ================================================ package com.pinecone.hydra.volume.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.entity.SimpleVolume; import com.pinecone.hydra.storage.volume.entity.Volume; import com.pinecone.hydra.storage.volume.entity.local.simple.TitanLocalSimpleVolume; import com.pinecone.hydra.storage.volume.source.SimpleVolumeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.ArrayList; import java.util.List; @IbatisDataAccessObject public interface SimpleVolumeMapper extends SimpleVolumeManipulator, PrimeLogicVolumeMapper { @Insert("INSERT INTO `hydra_uofs_volumes` (`guid`, `create_time`, `update_time`, `name`, `type`, `ext_config`) VALUES ( #{guid}, #{createTime}, #{updateTime}, #{name}, #{type}, #{extConfig} )") void insert( SimpleVolume simpleVolume ); @Delete("DELETE FROM `hydra_uofs_volumes` where `guid` = #{guid}") void remove( GUID guid ); @Update("UPDATE `hydra_uofs_volumes` SET `create_time` = #{createTime}, `name` = #{name}, `used_size` = #{usedSize} WHERE `guid` = #{guid}") void update( SimpleVolume simpleVolume ); @Override default TitanLocalSimpleVolume getSimpleVolume(GUID guid){ TitanLocalSimpleVolume simpleVolume0 = this.getSimpleVolume0( guid ); simpleVolume0.setSimpleVolumeManipulator( this ); return simpleVolume0; } @Select("SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`, `type`, `ext_config` AS extConfig FROM `hydra_uofs_volumes` WHERE `guid` = #{guid}") TitanLocalSimpleVolume getSimpleVolume0(GUID guid); @Insert("INSERT INTO `hydra_volume_physical_logic` (`logic_guid`, `physical_guid`) VALUES ( #{logicGuid}, #{physicalGuid} )") void extendLogicalVolume( @Param("logicGuid") GUID logicGuid, @Param("physicalGuid") GUID physicalGuid ); @Select("SELECT `physical_guid` FROM `hydra_volume_physical_logic` WHERE `logic_guid` = #{logicGuid}") List listPhysicalVolume(GUID logicGuid ); default List queryAllSimpleVolumes(){ List titanLocalSimpleVolumes = this.queryAllSimpleVolumes0(); return new ArrayList<>(titanLocalSimpleVolumes); } @Select("SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`, `type`, `ext_config` AS extConfig FROM `hydra_uofs_volumes` WHERE type = 'SimpleVolume'") List queryAllSimpleVolumes0(); @Update("UPDATE `hydra_uofs_volumes` SET definition_capacity = #{definitionCapacity} WHERE guid = #{guid}") void updateDefinitionCapacity( @Param("guid") GUID guid, @Param("definitionCapacity") long definitionCapacity ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/SpannedVolumeMapper.java ================================================ package com.pinecone.hydra.volume.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.entity.SpannedVolume; import com.pinecone.hydra.storage.volume.entity.Volume; import com.pinecone.hydra.storage.volume.entity.local.spanned.TitanLocalSpannedVolume; import com.pinecone.hydra.storage.volume.source.SpannedVolumeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.ArrayList; import java.util.List; @IbatisDataAccessObject public interface SpannedVolumeMapper extends SpannedVolumeManipulator, PrimeLogicVolumeMapper { @Insert("INSERT INTO `hydra_uofs_volumes` (`guid`, `create_time`, `update_time`, `name`, `type`, `ext_config`) VALUES ( #{guid}, #{createTime}, #{updateTime}, #{name}, #{type}, #{extConfig} )") void insert( SpannedVolume spannedVolume ); @Delete("DELETE FROM `hydra_uofs_volumes` where `guid` = #{guid}") void remove( GUID guid ); @Update("UPDATE `hydra_uofs_volumes` SET `create_time` = #{createTime}, `name` = #{name}, `used_size` = #{usedSize} WHERE `guid` = #{guid}") void update( SpannedVolume spannedVolume ); @Override default TitanLocalSpannedVolume getSpannedVolume(GUID guid){ TitanLocalSpannedVolume spannedVolume0 = this.getSpannedVolume0( guid ); spannedVolume0.setSpannedVolumeManipulator( this ); return spannedVolume0; } @Select("SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`, `definition_capacity` AS definitionCapacity, `used_size` AS userdSize, `quota_capacity` AS quotaCapacity, `type`, `ext_config` AS extConfig FROM `hydra_uofs_volumes` WHERE `guid` = #{guid}") TitanLocalSpannedVolume getSpannedVolume0(GUID guid); default List queryAllSpannedVolume(){ List titanLocalSpannedVolumes = this.queryAllSpannedVolume0(); return new ArrayList<>(titanLocalSpannedVolumes); } @Select("SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`, `definition_capacity` AS definitionCapacity, `used_size` AS userdSize, `quota_capacity` AS quotaCapacity, `type`, `ext_config` AS extConfig FROM `hydra_uofs_volumes` WHERE type = 'SpannedVolume'") List queryAllSpannedVolume0(); @Update("UPDATE `hydra_uofs_volumes` SET definition_capacity = #{definitionCapacity} WHERE guid = #{guid}") void updateDefinitionCapacity(@Param("guid") GUID guid, @Param("definitionCapacity") long definitionCapacity ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/StripedVolumeMapper.java ================================================ package com.pinecone.hydra.volume.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.entity.StripedVolume; import com.pinecone.hydra.storage.volume.entity.Volume; import com.pinecone.hydra.storage.volume.entity.local.striped.TitanLocalStripedVolume; import com.pinecone.hydra.storage.volume.source.StripedVolumeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.ArrayList; import java.util.List; @IbatisDataAccessObject public interface StripedVolumeMapper extends StripedVolumeManipulator, PrimeLogicVolumeMapper { @Insert("INSERT INTO `hydra_uofs_volumes` (`guid`, `create_time`, `update_time`, `name`, `type`, `ext_config`) VALUES ( #{guid}, #{createTime}, #{updateTime}, #{name}, #{type}, #{extConfig} )") void insert( StripedVolume stripedVolume ); @Delete("DELETE FROM `hydra_uofs_volumes` where `guid` = #{guid}") void remove( GUID guid ); @Update("UPDATE `hydra_uofs_volumes` SET `create_time` = #{createTime}, `name` = #{name}, `used_size` = #{usedSize} WHERE `guid` = #{guid}") void update( StripedVolume stripedVolume ); @Override default TitanLocalStripedVolume getStripedVolume(GUID guid){ TitanLocalStripedVolume stripedVolume0 = this.getStripedVolume0( guid ); stripedVolume0.setStripedVolumeManipulator( this ); return stripedVolume0; } @Select("SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`, `type`, `ext_config` AS extConfig FROM `hydra_uofs_volumes` WHERE `guid` = #{guid}") TitanLocalStripedVolume getStripedVolume0(GUID guid); default List queryAllStripedVolume(){ List titanLocalStripedVolumes = this.queryAllStripedVolume0(); return new ArrayList<>(titanLocalStripedVolumes); } @Select("SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`, `type`, `ext_config` AS extConfig FROM `hydra_uofs_volumes` WHERE type = 'StripedVolume'") List queryAllStripedVolume0(); @Update("UPDATE `hydra_uofs_volumes` SET definition_capacity = #{definitionCapacity} WHERE guid = #{guid}") void updateDefinitionCapacity(@Param("guid") GUID guid, @Param("definitionCapacity") long definitionCapacity ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/VolumeAllocateMapper.java ================================================ package com.pinecone.hydra.volume.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.source.VolumeAllocateManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; @IbatisDataAccessObject public interface VolumeAllocateMapper extends VolumeAllocateManipulator { @Insert("INSERT INTO `hydra_volume_allocate` (`object_guid`, `child_volume_guid`, `parent_volume_guid`) VALUES ( #{objectGuid},#{childVolumeGuid},#{parentVoluemGuid} )") void insert(@Param("objectGuid") GUID objectGuid, @Param("childVolumeGuid") GUID childVolumeGuid, @Param("parentVolumeGuid") GUID parentVolumeGuid ); @Select("SELECT `child_volume_guid` FROM `hydra_volume_allocate` WHERE `object_guid` = #{objectGuid} AND `parent_volume_guid` = #{parentGuid}") GUID get( @Param("objectGuid") GUID objectGuid, @Param("parentGuid") GUID parentGuid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/VolumeCachePathMapper.java ================================================ package com.pinecone.hydra.volume.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; @IbatisDataAccessObject public interface VolumeCachePathMapper extends TriePathCacheManipulator { @Insert("INSERT INTO `hydra_volume_node_cache_path` (`path`, `guid`) VALUES ( #{path}, #{guid} )") void insert(@Param("guid") GUID guid, @Param("path") String path ); @Delete("DELETE FROM `hydra_volume_node_cache_path` WHERE `guid`=#{guid}") void remove( GUID guid ); @Select("SELECT `path` FROM `hydra_volume_node_cache_path` WHERE `guid`=#{guid}") String getPath( GUID guid ); @Select("SELECT `guid` FROM `hydra_volume_node_cache_path` WHERE `guid`=#{guid}") GUID getNode( String path ); @Select("SELECT `guid` FROM `hydra_volume_node_cache_path` WHERE `path`=#{path}") GUID queryGUIDByPath( String path ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/VolumeCapacityMapper.java ================================================ package com.pinecone.hydra.volume.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.entity.TitanVolumeCapacity64; import com.pinecone.hydra.storage.volume.entity.VolumeCapacity64; import com.pinecone.hydra.storage.volume.source.VolumeCapacityManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; @IbatisDataAccessObject public interface VolumeCapacityMapper extends VolumeCapacityManipulator { @Update("UPDATE `hydra_uofs_volumes` SET `definition_capacity` = #{definitionCapacity}, `used_size` = #{usedSize}, `quota_capacity` = #{quotaCapacity} WHERE `guid` = #{volumeGuid}") void insert( VolumeCapacity64 volumeCapacity ); void remove( GUID guid ); @Select("SELECT `guid` AS volumeGuid, `definition_capacity` AS definitionCapacity, `used_size` AS usedSize, `quota_capacity` AS quotaCapacity FROM `hydra_uofs_volumes` WHERE `guid` = #{guid}") TitanVolumeCapacity64 getVolumeCapacity(GUID guid); @Update("UPDATE `hydra_uofs_volumes` SET `used_size` = #{usedSize} WHERE `guid` = #{guid}") void update( GUID guid, long usedSize ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/VolumeOwnerMapper.java ================================================ package com.pinecone.hydra.volume.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Update; @IbatisDataAccessObject public interface VolumeOwnerMapper extends TireOwnerManipulator { @Insert("INSERT INTO `hydra_uofs_volumes_tree` (`guid`) VALUES ( #{guid} )") void insertRootNode(@Param("guid") GUID guid ); @Insert( "INSERT INTO `hydra_uofs_volumes_tree` (`guid`, `parent_guid`) VALUES (#{targetGuid}, #{parentGuid})" ) void insert( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID ); @Update( "UPDATE `hydra_uofs_volumes_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid} WHERE `guid` = #{targetGuid}" ) void update( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID ); @Update( "UPDATE `hydra_uofs_volumes_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid} WHERE `guid` = #{targetGuid}" ) void updateParentGuid( @Param("targetGuid") GUID targetGuid, @Param("parentGuid") GUID parentGUID ); @Delete( "DELETE FROM `hydra_uofs_volumes_tree` WHERE `guid`=#{subordinateGuid} " ) void remove( @Param("subordinateGuid") GUID subordinateGuid ); @Delete( "DELETE FROM `hydra_uofs_volumes_tree` WHERE `guid`=#{subordinateGuid} " ) void removeBySubordinate( GUID subordinateGuid ); // @Delete("DELETE FROM `hydra_registry_node_owner` WHERE `owner_guid`=#{ownerGuid}") // void removeByOwner(GUID ownerGuid); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/VolumeTreeMapper.java ================================================ package com.pinecone.hydra.volume.ibatis; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.uoi.UOI; import com.pinecone.hydra.storage.volume.source.VolumeTreeManipulator; import com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import java.util.List; @IbatisDataAccessObject public interface VolumeTreeMapper extends VolumeTreeManipulator { @Insert("INSERT INTO `hydra_uofs_volumes_tree` (`guid`) VALUES ( #{guid} )") void insertRootNode(@Param("guid") GUID guid); @Override default void insert ( TireOwnerManipulator ownerManipulator, GUIDImperialTrieNode node ){ this.insertTreeNode( node.getGuid(), node.getType(), node.getAttributesGUID(), node.getNodeMetadataGUID() ); ownerManipulator.insertRootNode( node.getGuid() ); } @Insert("INSERT INTO `hydra_volume_nodes` (`guid`, `type`,`base_data_guid`,`node_meta_guid`) VALUES (#{guid},#{type},#{baseDataGuid},#{nodeMetaGuid})") void insertTreeNode( @Param("guid") GUID guid, @Param("type") UOI type, @Param("baseDataGuid") GUID baseDataGuid, @Param("nodeMetaGuid") GUID nodeMetaGuid ); @Select("SELECT `id` AS `enumId`, `guid`, `type`, base_data_guid AS baseDataGUID, node_meta_guid AS nodeMetadataGUID FROM hydra_volume_nodes WHERE guid=#{guid}") GUIDImperialTrieNode getNodeExtendsFromMeta(GUID guid ); @Select("SELECT COUNT( `id` ) FROM hydra_volume_nodes WHERE guid=#{guid}") boolean contains( GUID key ); @Override default GUIDImperialTrieNode getNode(GUID guid ) { GUIDImperialTrieNode node = this.getNodeExtendsFromMeta( guid ); if( node == null ){ return node; } List parent = this.fetchParentGuids( guid ); node.setParentGUID( parent ); return node; } @Select("SELECT id, guid, parent_guid FROM hydra_uofs_volumes_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}") GUIDImperialTrieNode getTreeNodeOnly(@Param("guid") GUID guid, @Param("parentGuid") GUID parentGuid ); @Select("SELECT count( * ) FROM hydra_uofs_volumes_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}") long countNode( GUID guid, GUID parentGuid ); @Override default void purge( GUID guid ) { this.removeNodeMeta( guid ); this.removeTreeNode( guid ); } @Delete("DELETE FROM `hydra_volume_nodes` WHERE `guid`=#{guid}") void removeNodeMeta( @Param("guid") GUID guid ); @Delete("DELETE FROM `hydra_uofs_volumes_tree` WHERE `guid` = #{guid}") void removeTreeNode( @Param("guid") GUID guid ); @Delete("DELETE FROM `hydra_uofs_volumes_tree` WHERE `parent_guid` = #{parent_guid}") void removeTreeNodeByParentGuid( @Param("parent_guid") GUID parentGuid ); @Delete("DELETE FROM `hydra_uofs_volumes_tree` WHERE `guid` = #{guid} AND `parent_guid` = #{parent_guid}") void removeTreeNodeYoke( @Param("guid") GUID guid, @Param("parent_guid") GUID parentGuid ); @Delete("DELETE FROM `hydra_uofs_volumes_tree` WHERE `guid`=#{chileGuid} AND `parent_guid`=#{parentGuid}") void removeInheritance( @Param("chileGuid") GUID childGuid, @Param("parentGuid") GUID parentGuid ); @Select("SELECT `id` AS `enumId`, `guid`, `parent_guid` AS parentGuid FROM `hydra_uofs_volumes_tree` WHERE `parent_guid`=#{guid}") List getChildren(GUID guid ); @Select("SELECT `guid` FROM `hydra_uofs_volumes_tree` WHERE `parent_guid` = #{parentGuid}") List fetchChildrenGuids( @Param("parentGuid") GUID parentGuid ); @Select("SELECT `parent_guid` FROM `hydra_uofs_volumes_tree` WHERE `guid`=#{guid}") List fetchParentGuids( GUID guid ); @Update("UPDATE `hydra_volume_nodes` SET `type` = #{type} WHERE guid=#{guid}") void updateType( UOI type , GUID guid ); @Select( "SELECT guid FROM hydra_uofs_volumes_tree WHERE parent_guid IS NULL " ) List fetchRoot(); @Override @Select( "SELECT COUNT( `guid` ) FROM hydra_uofs_volumes_tree WHERE `parent_guid` IS NULL AND guid = #{guid}" ) boolean isRoot( GUID guid ); @Update("UPDATE hydra_uofs_volumes_tree SET parent_guid = #{parentGuid} WHERE guid = #{childGuid}") void addChild( @Param("childGuid") GUID childGuid, @Param("parentGuid") GUID parentGuid ); } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/hydranium/VolumeMappingDriver.java ================================================ package com.pinecone.hydra.volume.ibatis.hydranium; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver; import com.pinecone.hydra.system.component.ResourceDispenserCenter; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; public class VolumeMappingDriver extends ArchMappingDriver implements KOIMappingDriver { protected KOIMasterManipulator mKOIMasterManipulator; public VolumeMappingDriver( Processum superiorProcess ) { super(superiorProcess); } public VolumeMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) { super( superiorProcess, ibatisClient, dispenserCenter, VolumeMappingDriver.class.getPackageName().replace( "hydranium", "" ) ); this.mKOIMasterManipulator = new VolumeMasterManipulatorImpl( this ); } @Override public KOIMasterManipulator getMasterManipulator() { return this.mKOIMasterManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/hydranium/VolumeMasterManipulatorImpl.java ================================================ package com.pinecone.hydra.volume.ibatis.hydranium; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.storage.volume.source.LogicVolumeManipulator; import com.pinecone.hydra.storage.volume.source.MirroredVolumeManipulator; import com.pinecone.hydra.storage.volume.source.MountPointManipulator; import com.pinecone.hydra.storage.volume.source.PhysicalVolumeManipulator; import com.pinecone.hydra.storage.volume.source.SimpleVolumeManipulator; import com.pinecone.hydra.storage.volume.source.SpannedVolumeManipulator; import com.pinecone.hydra.storage.volume.source.SQLiteVolumeManipulator; import com.pinecone.hydra.storage.volume.source.StripedVolumeManipulator; import com.pinecone.hydra.storage.volume.source.VolumeAllocateManipulator; import com.pinecone.hydra.storage.volume.source.VolumeCapacityManipulator; import com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; import com.pinecone.hydra.volume.ibatis.MirroredVolumeMapper; import com.pinecone.hydra.volume.ibatis.MountPointMapper; import com.pinecone.hydra.volume.ibatis.PhysicalVolumeMapper; import com.pinecone.hydra.volume.ibatis.PrimeLogicVolumeMapper; import com.pinecone.hydra.volume.ibatis.SimpleVolumeMapper; import com.pinecone.hydra.volume.ibatis.SpannedVolumeMapper; import com.pinecone.hydra.volume.ibatis.SQLiteVolumeMapper; import com.pinecone.hydra.volume.ibatis.StripedVolumeMapper; import com.pinecone.hydra.volume.ibatis.VolumeAllocateMapper; import com.pinecone.hydra.volume.ibatis.VolumeCapacityMapper; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class VolumeMasterManipulatorImpl implements VolumeMasterManipulator { @Resource @Structure( type = VolumeMasterTreeManipulatorImpl.class ) KOISkeletonMasterManipulator skeletonMasterManipulator; @Resource @Structure( type = MirroredVolumeMapper.class ) MirroredVolumeManipulator mirroredVolumeManipulator; @Resource @Structure( type = MountPointMapper.class ) MountPointManipulator mountPointManipulator; @Resource @Structure( type = SimpleVolumeMapper.class ) SimpleVolumeManipulator simpleVolumeManipulator; @Resource @Structure( type = SpannedVolumeMapper.class ) SpannedVolumeManipulator spannedVolumeManipulator; @Resource @Structure( type = StripedVolumeMapper.class ) StripedVolumeManipulator stripedVolumeManipulator; @Resource @Structure( type = VolumeCapacityMapper.class ) VolumeCapacityManipulator volumeCapacityManipulator; @Resource @Structure( type = PhysicalVolumeMapper.class ) PhysicalVolumeManipulator physicalVolumeManipulator; @Resource @Structure( type = VolumeAllocateMapper.class ) VolumeAllocateManipulator volumeAllocateManipulator; @Resource @Structure( type = SQLiteVolumeMapper.class ) SQLiteVolumeManipulator sqliteVolumeManipulator; @Resource @Structure( type = PrimeLogicVolumeMapper.class ) protected LogicVolumeManipulator primeLogicVolumeManipulator; public VolumeMasterManipulatorImpl() { } public VolumeMasterManipulatorImpl( KOIMappingDriver driver ) { driver.autoConstruct( VolumeMasterManipulatorImpl.class, Map.of(), this ); this.skeletonMasterManipulator = new VolumeMasterTreeManipulatorImpl( driver ); } @Override public KOISkeletonMasterManipulator getSkeletonMasterManipulator() { return this.skeletonMasterManipulator; } @Override public MirroredVolumeManipulator getMirroredVolumeManipulator() { return this.mirroredVolumeManipulator; } @Override public MountPointManipulator getMountPointManipulator() { return this.mountPointManipulator; } @Override public SimpleVolumeManipulator getSimpleVolumeManipulator() { return this.simpleVolumeManipulator; } @Override public SpannedVolumeManipulator getSpannedVolumeManipulator() { return this.spannedVolumeManipulator; } @Override public StripedVolumeManipulator getStripedVolumeManipulator() { return this.stripedVolumeManipulator; } @Override public VolumeCapacityManipulator getVolumeCapacityManipulator() { return this.volumeCapacityManipulator; } @Override public PhysicalVolumeManipulator getPhysicalVolumeManipulator() { return this.physicalVolumeManipulator; } @Override public VolumeAllocateManipulator getVolumeAllocateManipulator() { return this.volumeAllocateManipulator; } @Override public SQLiteVolumeManipulator getSQLiteVolumeManipulator() { return this.sqliteVolumeManipulator; } @Override public LogicVolumeManipulator getPrimeLogicVolumeManipulator() { return this.primeLogicVolumeManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/hydranium/VolumeMasterTreeManipulatorImpl.java ================================================ package com.pinecone.hydra.volume.ibatis.hydranium; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import com.pinecone.hydra.volume.ibatis.VolumeCachePathMapper; import com.pinecone.hydra.volume.ibatis.VolumeOwnerMapper; import com.pinecone.hydra.volume.ibatis.VolumeTreeMapper; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class VolumeMasterTreeManipulatorImpl implements TreeMasterManipulator { @Resource @Structure( type = VolumeTreeMapper.class ) TrieTreeManipulator trieTreeManipulator; @Resource @Structure( type = VolumeCachePathMapper.class ) TriePathCacheManipulator triePathCacheManipulator; @Resource @Structure( type = VolumeOwnerMapper.class ) TireOwnerManipulator tireOwnerManipulator; public VolumeMasterTreeManipulatorImpl() { } public VolumeMasterTreeManipulatorImpl( KOIMappingDriver driver ) { driver.autoConstruct( VolumeMasterTreeManipulatorImpl.class, Map.of(), this ); } @Override public TireOwnerManipulator getTireOwnerManipulator() { return this.tireOwnerManipulator; } @Override public TrieTreeManipulator getTrieTreeManipulator() { return this.trieTreeManipulator; } @Override public TriePathCacheManipulator getTriePathCacheManipulator() { return this.triePathCacheManipulator; } } ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/resources/mapper/kernel/task/InstanceNodeMapper.xml ================================================ guid, task_guid, name, task_name, business_time, expect_time, fire_time, start_time, finish_time, schedule_host_time, submit_time, schedule_time, priority, image_path, actually_priority, run_status, schedule_cycle, schedule_type, task_type, dry_run, run_count, sequence_cnt, retry_cnt, latest_start_time, latest_end_time, error_cause, processor_name, create_time, update_time #{guid}, #{taskGuid}, #{instanceName}, #{taskName}, #{businessTime}, #{expectTime}, #{fireTime}, #{startTime}, #{finishTime}, #{scheduleHostTime}, #{submitTime}, #{scheduleTime}, #{priority}, #{imagePath}, #{actuallyPriority}, #{runStatus}, #{scheduleCycle}, #{scheduleType}, #{taskType}, #{dryRun}, #{runCount}, #{sequenceCnt}, #{retryCnt}, #{lastStartTime}, #{lastEndTime}, #{errorCause}, #{processorName}, #{createTime}, #{updateTime} INSERT INTO hydra_task_instances ( ) VALUES ( ) UPDATE hydra_task_instances SET task_guid = #{taskGuid}, name = #{instanceName}, task_name = #{taskName}, business_time = #{businessTime}, expect_time = #{expectTime}, fire_time = #{fireTime}, start_time = #{startTime}, finish_time = #{finishTime}, schedule_host_time = #{scheduleHostTime}, submit_time = #{submitTime}, schedule_time = #{scheduleTime}, priority = #{priority}, image_path = #{imagePath}, actually_priority = #{actuallyPriority}, run_status = #{runStatus}, schedule_cycle = #{kernelScheduleCycle}, schedule_type = #{kernelScheduleType}, task_type = #{taskType}, dry_run = #{dryRun}, run_count = #{runCount}, sequence_cnt = #{sequenceCnt}, retry_cnt = #{retryCnt}, latest_start_time = #{lastStartTime}, latest_end_time = #{lastEndTime}, error_cause = #{errorCause}, processor_name = #{processorName}, update_time = #{updateTime} WHERE guid = #{guid} ================================================ FILE: Hydra/hydra-kom-default-driver/src/main/resources/mapper/kernel/task/TaskNodeMapper.xml ================================================ INSERT INTO hydra_task_task_node ( guid, name, schedule_cron, image_path, type, resource_type, deployment_method, priority, actually_priority, dry_run, schedule_cycle, schedule_type, processor_name, enable, create_time, update_time ) VALUES ( #{guid}, #{name}, #{scheduleCron}, #{imagePath}, #{type}, #{resourceType}, #{deploymentMethod}, #{priority}, #{actuallyPriority}, #{dryRun}, #{scheduleCycle}, #{scheduleType}, #{processor_name}, #{enable}, #{createTime}, #{updateTime} ) UPDATE hydra_task_task_node SET name = #{name}, schedule_cron = #{scheduleCron}, image_path = #{imagePath}, type = #{type}, resource_type = #{resourceType}, deployment_method = #{deploymentMethod}, priority = #{priority}, actually_priority = #{actuallyPriority}, dry_run = #{dryRun}, schedule_cycle = #{scheduleCycle}, schedule_type = #{scheduleType}, schedule_start_time = #{scheduleStartTime}, schedule_end_time = #{scheduleEndTime}, next_schedule_time = #{nextScheduleTime}, processor_name = #{processorName} enable = #{enable}, update_time = #{updateTime} WHERE guid = #{guid} id, guid, owned_service_guid, name, schedule_cron, image_path, type, resource_type, deployment_method, priority, actually_priority, dry_run, schedule_cycle, schedule_type, schedule_start_time, schedule_end_time, enable, next_schedule_time, create_time, update_time ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/pom.xml ================================================ hydra com.pinecone.hydra 2.5.1 4.0.0 com.pinecone.hydra.sdk.grpc hydra-lib-grpc-service-sdk 1.2.1 11 11 UTF-8 1.62.2 4.28.2 4.28.2 org.slf4j slf4j-api com.pinecone pinecone 2.5.1 compile io.grpc grpc-netty-shaded ${grpc.version} io.grpc grpc-protobuf ${grpc.version} io.grpc grpc-stub ${grpc.version} jakarta.annotation jakarta.annotation-api 1.3.5 com.pinecone.hydra.kernel hydra-service-control 2.1.0 compile ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/grpc/client/GrpcAppointClient.java ================================================ package com.pinecone.hydra.grpc.client; import java.util.concurrent.TimeUnit; import com.pinecone.hydra.appoints.AppointNodus; import io.grpc.ManagedChannel; import io.grpc.ManagedChannelBuilder; public class GrpcAppointClient implements AppointNodus { protected String name; protected long messageNodeId; protected ManagedChannel managedChannel; protected final GrpcClientConfig grpcClientConfig; protected ManagedChannelBuilder channelBuilder; public GrpcAppointClient( String name, long messageNodeId, GrpcClientConfig config ) { this.name = name; this.messageNodeId = messageNodeId; this.grpcClientConfig = config; ManagedChannelBuilder builder = ManagedChannelBuilder .forAddress( config.getHost(), config.getPort() ) .usePlaintext(); if( config.getIdleTimeoutMillis() > 0 ) { builder.idleTimeout( config.getIdleTimeoutMillis(), TimeUnit.MILLISECONDS ); } if( config.getKeepAliveTimeSeconds() > 0 ) { builder.keepAliveTime( config.getKeepAliveTimeSeconds(), TimeUnit.SECONDS ); builder.keepAliveWithoutCalls( true ); } this.channelBuilder = builder; } public GrpcAppointClient( long messageNodeId, GrpcClientConfig config ) { this( "grpc-client-" + config.getHost() + "-" + config.getPort(), messageNodeId, config ); } public GrpcAppointClient( GrpcClientConfig config ) { this( config.getPort(), config ); } public ManagedChannelBuilder channelBuilder() { return this.channelBuilder; } public ManagedChannel getChannel() { return this.managedChannel; } @Override public String getName() { return this.name; } @Override public GrpcClientConfig getConfig() { return this.grpcClientConfig; } @Override public void close() { if( this.managedChannel != null ) { this.managedChannel.shutdownNow(); this.managedChannel = null; } } public void shutdown( long t, TimeUnit u ) throws InterruptedException { if( this.managedChannel != null ) { this.managedChannel.shutdown().awaitTermination( t, u ); this.managedChannel = null; } } public void shutdownNow() { if( this.managedChannel != null ) { this.managedChannel.shutdownNow(); this.managedChannel = null; } } @Override public void execute() throws Exception { if( this.managedChannel == null ) { this.managedChannel = this.channelBuilder.build(); } } @Override public long getMessageNodeId() { return this.messageNodeId; } public long getClientId() { return this.getMessageNodeId(); } public boolean isShutdown() { return this.managedChannel == null || this.managedChannel.isShutdown(); } public boolean isTerminated() { return this.managedChannel == null || this.managedChannel.isTerminated(); } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/grpc/client/GrpcClientConfig.java ================================================ package com.pinecone.hydra.grpc.client; import java.util.Map; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.framework.util.config.PatriarchalConfig; import com.pinecone.framework.util.json.JSONObject; public class GrpcClientConfig extends JSONConfig implements PatriarchalConfig { private final String host; private final int port; private final boolean enable; private final long idleTimeoutMillis; private final long keepAliveTimeSeconds; private final boolean autoReconnect; private final boolean enableHeartbeat; private final long heartbeatIntervalMillis; public GrpcClientConfig( JSONConfig parent ) { this( (Map) null, parent ); } public GrpcClientConfig( JSONObject thisScope, JSONConfig parent ) { this( thisScope.getMap(), parent ); } public GrpcClientConfig( JSONObject thisScope ) { this( thisScope.getMap(), null ); } public GrpcClientConfig( Map thisScope, JSONConfig parent ) { super( thisScope, parent ); this.host = this.optString( "host", "localhost" ); this.port = this.optInt( "port", 5888 ); this.enable = this.optBoolean( "enable", true ); this.idleTimeoutMillis = this.optLong( "idleTimeoutMillis", 30L ); this.keepAliveTimeSeconds = this.optLong( "keepAliveTimeoutSec", 30L ); this.autoReconnect = this.optBoolean( "autoReconnect", true ); this.enableHeartbeat = this.optBoolean( "enableHeartbeat", false ); this.heartbeatIntervalMillis = this.optLong( "heartbeatIntervalMills", 2000L ); } public GrpcClientConfig() { this( null ); } public String getHost() { return this.host; } public int getPort() { return this.port; } public boolean isEnable() { return this.enable; } public long getIdleTimeoutMillis() { return this.idleTimeoutMillis; } public long getKeepAliveTimeSeconds() { return this.keepAliveTimeSeconds; } public boolean isAutoReconnect() { return this.autoReconnect; } public boolean isEnableHeartbeat() { return this.enableHeartbeat; } public long getHeartbeatIntervalMillis() { return this.heartbeatIntervalMillis; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/grpc/server/GrpcAppointServer.java ================================================ package com.pinecone.hydra.grpc.server; import java.io.IOException; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import com.pinecone.framework.system.ProvokeHandleException; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.appoints.AppointNodus; import io.grpc.Server; import io.grpc.ServerBuilder; public class GrpcAppointServer implements AppointNodus { protected String name; protected long messageNodeId; protected Server grpcServer; protected GrpcProcess grpcProcess; protected Processum parentProcess; protected final GrpcServerConfig grpcServerConfig; protected ServerBuilder serverBuilder; public GrpcAppointServer( String name, long messageNodeId, GrpcServerConfig config, Processum parentProcess ) { this.name = name; this.messageNodeId = messageNodeId; this.grpcServerConfig = config; this.parentProcess = parentProcess; ServerBuilder builder = ServerBuilder.forPort( config.getPort() ); if( config.getHandshakeTimeoutMillis() > 0 ) { builder.handshakeTimeout( config.getHandshakeTimeoutMillis(), TimeUnit.MILLISECONDS ); } if( config.getKeepAliveTimeSeconds() > 0 ) { builder.keepAliveTime( config.getKeepAliveTimeSeconds(), TimeUnit.SECONDS ); } builder.keepAliveTimeout( config.getKeepAliveTimeoutSeconds(), TimeUnit.SECONDS ); builder.permitKeepAliveWithoutCalls( config.isPermitKeepAliveWithoutCalls() ); builder.maxInboundMessageSize( config.getMaxInboundMessageSize() ); builder.maxInboundMetadataSize( config.getMaxInboundMetadataSize() ); builder.executor( Executors.newCachedThreadPool() ); this.serverBuilder = builder; } public GrpcAppointServer( String name, long messageNodeId, GrpcServerConfig config ) { this( name, messageNodeId, config, null ); } public GrpcAppointServer( long messageNodeId, GrpcServerConfig config ) { this( messageNodeId, config, null ); } public GrpcAppointServer( long messageNodeId, GrpcServerConfig config, Processum parentProcess ) { this( "grpc-server-" + config.getHost() + "-" + config.getPort(), messageNodeId, config, parentProcess ); } public GrpcAppointServer( GrpcServerConfig config ) { this( config.getPort(), config ); } public GrpcAppointServer( GrpcServerConfig config, Processum parentProcess ) { this( config.getPort(), config, parentProcess ); } public ServerBuilder serverBuilder() { return this.serverBuilder; } @Override public String getName() { return this.name; } @Override public GrpcServerConfig getConfig() { return this.grpcServerConfig; } @Override public void close() { if( this.grpcServer != null ) { this.grpcServer.shutdownNow(); this.grpcServer = null; this.grpcProcess = null; } } public void shutdown() { if( this.grpcServer != null ) { this.grpcServer.shutdown(); this.grpcServer = null; this.grpcProcess = null; } } @Override public void execute() throws Exception { try { this.start(); } catch ( ProvokeHandleException e ) { if ( e.getCause() instanceof Exception ) { throw (Exception) e.getCause(); } } } public void startGrpcServerOnly() throws IOException { this.grpcServer.start(); } public void start( Processum parentProcess ) { if ( this.grpcServer == null ) { this.grpcServer = this.serverBuilder.build(); } this.grpcProcess = new GrpcProcess( this, parentProcess ); this.grpcProcess.start(); } public void start() { this.start( this.parentProcess ); } public GrpcProcess getProcess() { return this.grpcProcess; } public void awaitTermination() throws InterruptedException { this.grpcServer.awaitTermination(); } public void awaitTermination( long t, TimeUnit u ) throws InterruptedException { this.grpcServer.awaitTermination( t, u ); } @Override public long getMessageNodeId() { return this.messageNodeId; } public boolean isShutdown() { return this.grpcServer == null || this.grpcServer.isShutdown(); } public boolean isTerminated() { return this.grpcServer == null || this.grpcServer.isTerminated(); } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/grpc/server/GrpcProcess.java ================================================ package com.pinecone.hydra.grpc.server; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pinecone.framework.system.ProvokeHandleException; import com.pinecone.framework.system.executum.ArchProcessum; import com.pinecone.framework.system.executum.Processum; public class GrpcProcess extends ArchProcessum { protected Logger log = LoggerFactory.getLogger( this.getClass() ); protected Thread affinityThread; protected GrpcAppointServer grpcAppointServer; public GrpcProcess( GrpcAppointServer server, Processum parent ) { super( server.getName(), parent); this.grpcAppointServer = server; } @Override public void start() { if ( this.affinityThread != null ) { throw new IllegalStateException( "[GrpcAppointServer] Process has already started." ); } CompletableFuture future = new CompletableFuture<>(); this.affinityThread = new Thread(()->{ try { this.grpcAppointServer.startGrpcServerOnly(); log.info( "[GrpcAppointServer] Process has started. " ); future.complete(null); this.grpcAppointServer.awaitTermination(); log.info( "[GrpcAppointServer] Process has terminated. " ); } catch ( Exception e ) { future.completeExceptionally( e ); } }); this.affinityThread.setName( ( this.getName() + "-main-" + this.affinityThread.getName() ).toLowerCase() ); this.affinityThread.setDaemon( false ); this.setThreadAffinity( this.affinityThread ); this.affinityThread.start(); try { Object e = future.get(); log.info( "[GrpcAppointServer] Process redirect to parent thread. " ); if ( future.isCompletedExceptionally() ) { if ( e instanceof Exception ) { throw new ProvokeHandleException( ((Exception)e).getCause() ); } } } catch ( InterruptedException e ) { Thread.currentThread().interrupt(); } catch ( ExecutionException e ) { throw new ProvokeHandleException( e.getCause() ); } } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/grpc/server/GrpcServerConfig.java ================================================ package com.pinecone.hydra.grpc.server; import java.util.Map; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.framework.util.config.PatriarchalConfig; import com.pinecone.framework.util.json.JSONObject; public class GrpcServerConfig extends JSONConfig implements PatriarchalConfig { private final String host; private final int port; private final boolean enabled; private final long handshakeTimeoutMillis; private final long keepAliveTimeSeconds; private final long keepAliveTimeoutSeconds; private final int maxConcurrentCalls; private final int maxInboundMessageSize; private final int maxInboundMetadataSize; private final boolean permitKeepAliveWithoutCalls; public GrpcServerConfig( JSONConfig parent ) { this( (Map) null, parent ); } public GrpcServerConfig( JSONObject thisScope, JSONConfig parent ) { this( thisScope.getMap(), parent ); } public GrpcServerConfig( JSONObject thisScope ) { this( thisScope.getMap(), null ); } public GrpcServerConfig( Map thisScope, JSONConfig parent ) { super( thisScope, parent ); this.host = this.optString( "host", "0.0.0.0" ); this.port = this.optInt( "port", 5888 ); this.enabled = this.optBoolean( "enable", true ); this.handshakeTimeoutMillis = this.optLong( "handshakeTimeoutMillis", 0L ); this.keepAliveTimeSeconds = this.optLong( "keepAliveTimeoutSec", 0L ); this.keepAliveTimeoutSeconds = this.optLong( "keepAliveAckTimeoutSec", 20L ); this.maxConcurrentCalls = this.optInt( "maximumConnections", Integer.MAX_VALUE ); // gRPC inbound 限制 this.maxInboundMessageSize = this.optInt( "maxInboundMessageSize", 4 * 1024 * 1024 ); this.maxInboundMetadataSize = this.optInt( "maxInboundMetadataSize", 8 * 1024 ); // 是否允许无调用时keepalive this.permitKeepAliveWithoutCalls = this.optBoolean( "permitKeepAliveWithoutCalls", true ); } public GrpcServerConfig() { this(null ); } public String getHost() { return this.host; } public int getPort() { return this.port; } public boolean isEnabled() { return this.enabled; } public long getHandshakeTimeoutMillis() { return this.handshakeTimeoutMillis; } public long getKeepAliveTimeSeconds() { return this.keepAliveTimeSeconds; } public long getKeepAliveTimeoutSeconds() { return this.keepAliveTimeoutSeconds; } public int getMaxConcurrentCalls() { return this.maxConcurrentCalls; } public int getMaxInboundMessageSize() { return this.maxInboundMessageSize; } public int getMaxInboundMetadataSize() { return this.maxInboundMetadataSize; } public boolean isPermitKeepAliveWithoutCalls() { return this.permitKeepAliveWithoutCalls; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/client/GrpcServiceClient.java ================================================ package com.pinecone.hydra.service.registry.grpc.client; import com.pinecone.framework.util.StringUtils; import com.pinecone.hydra.appoints.AppointNodus; import com.pinecone.hydra.grpc.client.GrpcAppointClient; import com.pinecone.hydra.service.registry.client.ArchServiceClient; import com.pinecone.hydra.service.registry.client.ServiceClient; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.service.registry.ClientServiceRegisterException; import com.pinecone.hydra.service.registry.ServiceControlRPCException; import com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage; import com.pinecone.hydra.service.registry.grpc.server.cs.ControlStreamGrpc; import com.pinecone.hydra.service.registry.grpc.server.iface.ServiceLifecycleImpl; import com.pinecone.hydra.service.registry.grpc.server.iface.ServiceMetaManipulationIfaceImpl; import com.pinecone.hydra.service.registry.grpc.server.lifecycle.*; import com.pinecone.hydra.service.registry.grpc.server.meta.*; import com.pinecone.hydra.service.registry.server.ServiceLifecycleIface; import com.pinecone.hydra.service.registry.server.ServiceMetaManipulationIface; import io.grpc.stub.StreamObserver; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.concurrent.TimeUnit; public class GrpcServiceClient extends ArchServiceClient implements ServiceClient { protected final Logger mLogger = LoggerFactory.getLogger(this.getClass()); protected GrpcAppointClient mGrpcAppointClient; private StreamObserver controlStream; protected ServiceLifecycleGrpc.ServiceLifecycleBlockingStub mLifecycleStub; protected ServiceMetaGrpc.ServiceMetaBlockingStub mMetaManipulationStub; protected ServiceLifecycleIface mLifecycle; protected ServiceMetaManipulationIface mMetaManipulation; public GrpcServiceClient( @Nullable GUID serviceId, GrpcAppointClient appointClient, GuidAllocator guidAllocator ) { super(serviceId, guidAllocator); this.mGrpcAppointClient = appointClient; } public GrpcServiceClient( GrpcAppointClient appointClient, GuidAllocator guidAllocator ) { this(null, appointClient, guidAllocator); } private void initControlStream() { ControlStreamGrpc.ControlStreamStub asyncStub = ControlStreamGrpc.newStub( this.mGrpcAppointClient.getChannel() ); this.controlStream = asyncStub.connect( new StreamObserver() { @Override public void onNext(ControlMessage value) { } @Override public void onError(Throwable t) { } @Override public void onCompleted() { } } ); ControlMessage message = ControlMessage.newBuilder() .setClientId( this.getClientId() ) .build(); this.controlStream.onNext(message); } @Override protected void initRPCSubsystem() throws ServiceControlRPCException { } public long getClientId() { return this.mGrpcAppointClient.getClientId(); } @Override public void startService() throws ServiceControlRPCException { if ( !this.mGrpcAppointClient.isShutdown() ) { throw new IllegalStateException("gRPC client already started."); } try { this.mGrpcAppointClient.execute(); this.mLifecycleStub = ServiceLifecycleGrpc.newBlockingStub( this.mGrpcAppointClient.getChannel() ); this.mMetaManipulationStub = ServiceMetaGrpc.newBlockingStub( this.mGrpcAppointClient.getChannel() ); this.mLifecycle = new ServiceLifecycleImpl( this.mLifecycleStub ); this.mMetaManipulation = new ServiceMetaManipulationIfaceImpl( this.mMetaManipulationStub ); this.mLogger.info("gRPC initialization successful"); this.initControlStream(); } catch ( Exception e ) { throw new ServiceControlRPCException(e); } } @Override public void terminateService() { if ( !this.mGrpcAppointClient.isTerminated() ) { throw new IllegalStateException( "gRPC client not started." ); } this.deregister(); try { this.mGrpcAppointClient.shutdown( 5, TimeUnit.SECONDS ); } catch ( InterruptedException e ) { this.mGrpcAppointClient.shutdownNow(); } } @Override public GUID registerService(GUID serviceId, GUID deployGuid) throws ClientServiceRegisterException { RegisterServiceRequest.Builder builder = RegisterServiceRequest.newBuilder(); builder.setServiceId(serviceId.toString()); builder.setClientId( this.getClientId() ); if (deployGuid != null) { builder.setDeployId(deployGuid.toString()); } RegisterServiceRequest request = builder.build(); try { RegisterServiceReply reply = this.mLifecycleStub.registerService(request); String instanceId = reply.getInstanceId(); if ( StringUtils.isNotBlank( instanceId ) ) { this.mInstanceId = this.mGuidAllocator.parse(instanceId); this.mLogger.info( "Successfully register service : {}, instanceId: {}", serviceId, instanceId ); } } catch ( Exception e ) { this.mLogger.error("Register Service {} failed", serviceId.toString()); throw new ClientServiceRegisterException(e); } return this.mInstanceId; } @Override public void deregister() { if (this.mInstanceId != null) { InstanceIdRequest request = InstanceIdRequest.newBuilder() .setInstanceId(this.mInstanceId.toString()) .build(); this.mLifecycleStub.deregisterServiceByInstanceId(request); } } @Override public AppointNodus getAppointNodus() { return this.mGrpcAppointClient; } @Override public GuidAllocator getGuidAllocator() { return this.mGuidAllocator; } public ServiceLifecycleGrpc.ServiceLifecycleBlockingStub getLifecycleStub() { return this.mLifecycleStub; } public ServiceMetaGrpc.ServiceMetaBlockingStub getMetaManipulationStub() { return this.mMetaManipulationStub; } public ServiceLifecycleIface getServiceLifecycle() { return this.mLifecycle; } public ServiceMetaManipulationIface getMetaManipulation() { return this.mMetaManipulation; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/ClientMetaDataInterceptor.java ================================================ package com.pinecone.hydra.service.registry.grpc.server; import java.net.SocketAddress; import io.grpc.Context; import io.grpc.Contexts; import io.grpc.Grpc; import io.grpc.Metadata; import io.grpc.ServerCall; import io.grpc.ServerCallHandler; import io.grpc.ServerInterceptor; public class ClientMetaDataInterceptor implements ServerInterceptor { @Override public ServerCall.Listener interceptCall(ServerCall call, Metadata headers, ServerCallHandler next) { SocketAddress remoteAddr = call.getAttributes().get(Grpc.TRANSPORT_ATTR_REMOTE_ADDR); Context ctx = Context.current().withValue(ClientAddress.CLIENT_ADDR, remoteAddr); return Contexts.interceptCall(ctx, call, headers, next); } } final class ClientAddress { private ClientAddress() {} public static final io.grpc.Context.Key CLIENT_ADDR = io.grpc.Context.key( "client-addr" ); } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/GrpcControlStreamService.java ================================================ package com.pinecone.hydra.service.registry.grpc.server; import java.net.SocketAddress; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage; import com.pinecone.hydra.service.registry.grpc.server.cs.ControlStreamGrpc; import com.pinecone.hydra.service.registry.server.ServiceManager; import io.grpc.Context; import io.grpc.StatusRuntimeException; import io.grpc.stub.StreamObserver; public class GrpcControlStreamService extends ControlStreamGrpc.ControlStreamImplBase { private final ServiceManager serviceManager; private final GrpcServiceAppointServer appointServer; private final GuidAllocator guidAllocator; public GrpcControlStreamService( ServiceManager serviceManager, GrpcServiceAppointServer appointServer ) { this.serviceManager = serviceManager; this.appointServer = appointServer; this.guidAllocator = serviceManager.getServicesInstrument().getGuidAllocator(); } @Override public StreamObserver connect( StreamObserver responseObserver ) { final SocketAddress remoteAddr = ClientAddress.CLIENT_ADDR.get(); final String connectId = remoteAddr.toString() + "_" + this.guidAllocator.nextGUID().toString(); return new StreamObserver<>() { Long clientId = null; @Override public void onNext( ControlMessage message ) { if ( this.clientId == null ) { this.clientId = message.getClientId(); GrpcSession session = new GrpcSession( connectId, remoteAddr, responseObserver ); serviceManager.serviceEventHooker().afterNewConnectionInbound( this.clientId, connectId, session, null, () -> new GrpcServiceClientile(appointServer) ); } // 这里可以处理心跳或其他控制指令 } @Override public void onError( Throwable t ) { if ( t instanceof StatusRuntimeException ) { serviceManager.getLogger().info( "[ServiceLifecycle] `{}` has requested `cancelled` to detach, with what '{}', addr: `{}`.", this.clientId, t.getMessage(), remoteAddr.toString() ); } else { serviceManager.getLogger().error( "[ServiceFatality] `{}` has provoked `exception` to detach, with what '{}', addr: `{}`.", this.clientId, t.getMessage(), remoteAddr.toString() ); } this.detach(); } @Override public void onCompleted() { this.detach(); } private void detach() { if ( this.clientId != null ) { serviceManager.serviceEventHooker().afterConnectionDetach( this.clientId, connectId, null ); } } }; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/GrpcServiceAppointServer.java ================================================ package com.pinecone.hydra.service.registry.grpc.server; import java.util.concurrent.TimeUnit; import com.pinecone.hydra.grpc.server.GrpcAppointServer; import com.pinecone.hydra.grpc.server.GrpcProcess; import com.pinecone.hydra.grpc.server.GrpcServerConfig; import com.pinecone.hydra.service.registry.appoint.ServiceAppointServer; import com.pinecone.hydra.service.registry.server.ServiceManager; import io.grpc.ServerBuilder; import io.grpc.ServerInterceptors; public class GrpcServiceAppointServer implements ServiceAppointServer { protected ServiceManager serviceManager; protected final GrpcAppointServer grpcAppointServer;; public GrpcServiceAppointServer( GrpcAppointServer server ) { this.grpcAppointServer = server; } @Override public ServiceManager serviceManager() { return this.serviceManager; } @Override public ServiceAppointServer hookServiceManager( ServiceManager serviceManager ) { if (this.serviceManager != null) { throw new IllegalStateException("Manager has already hooked."); } this.serviceManager = serviceManager; ServerBuilder build = this.grpcAppointServer.serverBuilder(); build .addService(new GrpcServiceLifecycleService(serviceManager)) .addService(new GrpcServiceMetaService(serviceManager)) .addService(ServerInterceptors.intercept( new GrpcControlStreamService(serviceManager, this), new ClientMetaDataInterceptor() )) ; this.serviceManager.getLogger().info( "GrpcAppointServer[{}] has been hooked.", this.getName() ); return this; } @Override public String getName() { return this.grpcAppointServer.getName(); } @Override public GrpcServerConfig getConfig() { return this.grpcAppointServer.getConfig(); } @Override public void close() { this.grpcAppointServer.close(); } public void awaitTermination() throws InterruptedException { this.grpcAppointServer.awaitTermination(); } public void awaitTermination( long t, TimeUnit u ) throws InterruptedException { this.grpcAppointServer.awaitTermination( t, u ); } public GrpcProcess getProcess() { return this.grpcAppointServer.getProcess(); } @Override public void execute() throws Exception { this.grpcAppointServer.execute(); } @Override public long getMessageNodeId() { return this.grpcAppointServer.getMessageNodeId(); } @Override public boolean isTerminated() { return this.grpcAppointServer == null || this.grpcAppointServer.isTerminated(); } @Override public boolean isStarted() { return !this.grpcAppointServer.isShutdown(); } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/GrpcServiceClientile.java ================================================ package com.pinecone.hydra.service.registry.grpc.server; import java.net.SocketAddress; import java.util.Collection; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import com.pinecone.hydra.service.registry.appoint.ServiceClientile; import com.pinecone.hydra.service.registry.appoint.ServiceAppointServer; public class GrpcServiceClientile implements ServiceClientile { protected long mClientId = -1; // connectId -> GrpcSession protected final ConcurrentMap mSessions; protected final ServiceAppointServer mServiceAppointServer; protected SocketAddress mRemoteAddress; public GrpcServiceClientile( ServiceAppointServer serviceAppointServer ) { this.mServiceAppointServer = serviceAppointServer; this.mSessions = new ConcurrentHashMap<>(); } @Override public SocketAddress getRemoteAddress() { return this.mRemoteAddress; } @Override public void afterNewConnectionInbound( Long clientId, Object connectId, Object connection, Object context ) { if ( !(connection instanceof GrpcSession) ) { throw new IllegalArgumentException( "GrpcServiceClientile expects `GrpcSession`, but got: " + (connection == null ? "null" : connection.getClass().getName()) ); } GrpcSession session = (GrpcSession) connection; this.mClientId = clientId; this.mRemoteAddress = session.getRemoteAddress(); this.mSessions.put( connectId, session ); } @Override public void afterConnectionDetach( Long clientId, Object connectId, Object connection ) { GrpcSession removed = this.mSessions.remove( connectId ); if ( removed != null ) { removed.markClosed(); } } @Override public ServiceAppointServer serviceAppointServer() { return this.mServiceAppointServer; } @Override public long getClientId() { return this.mClientId; } @Override public int connectionCount() { return this.mSessions.size(); } @Override public boolean isDefunct() { return this.mSessions.isEmpty(); } @Override public GrpcSession queryNativeConnection(Object connectionIdentity) { return this.mSessions.get( connectionIdentity ); } @Override public Collection connections() { return this.mSessions.values(); } @Override public void shutdown() { // 尽力关闭所有 session:完成 outbound 流(server 侧主动结束) for ( GrpcSession s : this.mSessions.values() ) { if (s == null) { continue; } if ( s.markClosed() ) { try { // outbound 是 StreamObserver,onCompleted 可以安全调用(若已结束会抛异常则忽略) @SuppressWarnings("unchecked") io.grpc.stub.StreamObserver out = (io.grpc.stub.StreamObserver) s.getOutbound(); out.onCompleted(); } catch (Throwable ignored) { // best-effort close } } } this.mSessions.clear(); } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/GrpcServiceLifecycleService.java ================================================ package com.pinecone.hydra.service.registry.grpc.server; import com.pinecone.framework.system.ProvokeHandleException; import com.pinecone.hydra.service.registry.ClientServiceRegisterException; import com.pinecone.hydra.service.registry.dto.RegisterServiceDTO; import com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply; import com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest; import com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply; import com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply; import com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest; import com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest; import com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply; import com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest; import com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest; import com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleGrpc; import com.pinecone.hydra.service.registry.server.ServiceLifecycleService; import com.pinecone.hydra.service.registry.server.ServiceManager; import io.grpc.stub.StreamObserver; public class GrpcServiceLifecycleService extends ServiceLifecycleGrpc.ServiceLifecycleImplBase { private final ServiceLifecycleService lifecycleService; public GrpcServiceLifecycleService(ServiceManager serviceManager) { this.lifecycleService = serviceManager.serviceLifecycleService(); } @Override public void registerService( RegisterServiceRequest request, StreamObserver responseObserver ) { try { RegisterServiceDTO dto = new RegisterServiceDTO(); dto.setClientId(request.getClientId()); dto.setServiceId(request.getServiceId()); dto.setDeployId(request.getDeployId()); String instanceId = this.lifecycleService.registerService(dto); RegisterServiceReply.Builder builder = RegisterServiceReply.newBuilder(); if (instanceId != null) { builder.setInstanceId(instanceId); } else { builder.setInstanceId(""); } RegisterServiceReply reply = builder.build(); responseObserver.onNext(reply); responseObserver.onCompleted(); } catch ( ClientServiceRegisterException e ) { throw new ProvokeHandleException( e ); } } @Override public void deregisterServiceByClientId(ClientIdRequest request, StreamObserver responseObserver) { this.lifecycleService.deregisterServiceByClientId(request.getClientId()); EmptyReply reply = EmptyReply.newBuilder().build(); responseObserver.onNext(reply); responseObserver.onCompleted(); } @Override public void deregisterServiceByInstanceId(InstanceIdRequest request, StreamObserver responseObserver) { this.lifecycleService.deregisterServiceByInstanceId(request.getInstanceId()); EmptyReply reply = EmptyReply.newBuilder().build(); responseObserver.onNext(reply); responseObserver.onCompleted(); } @Override public void hasOwnedServiceByServiceId(ServiceIdRequest request, StreamObserver responseObserver) { boolean result = this.lifecycleService.hasOwnedServiceByServiceId( request.getServiceId() ); BoolReply reply = BoolReply.newBuilder() .setValue(result) .build(); responseObserver.onNext(reply); responseObserver.onCompleted(); } @Override public void hasOwnedServiceInstanceByClientId(ClientIdRequest request, StreamObserver responseObserver) { boolean result = this.lifecycleService.hasOwnedServiceInstance( request.getClientId() ); BoolReply reply = BoolReply.newBuilder() .setValue(result) .build(); responseObserver.onNext(reply); responseObserver.onCompleted(); } @Override public void hasOwnedServiceInstanceByInstanceId(InstanceIdRequest request, StreamObserver responseObserver) { boolean result = this.lifecycleService.hasOwnedServiceInstance( request.getInstanceId() ); BoolReply reply = BoolReply.newBuilder() .setValue(result) .build(); responseObserver.onNext(reply); responseObserver.onCompleted(); } @Override public void hasOwnedServiceClient(ClientIdRequest request, StreamObserver responseObserver) { boolean result = this.lifecycleService.hasOwnedServiceClient( request.getClientId() ); BoolReply reply = BoolReply.newBuilder() .setValue(result) .build(); responseObserver.onNext(reply); responseObserver.onCompleted(); } @Override public void countRegisteredService(EmptyRequest request, StreamObserver responseObserver) { Integer count = this.lifecycleService.countRegisteredService(); CountReply.Builder builder = CountReply.newBuilder(); if (count != null) { builder.setValue(count); } else { builder.setValue(0); } CountReply reply = builder.build(); responseObserver.onNext(reply); responseObserver.onCompleted(); } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/GrpcServiceMetaService.java ================================================ package com.pinecone.hydra.service.registry.grpc.server; import com.pinecone.hydra.service.registry.dto.ServiceMetaDTO; import com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest; import com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest; import com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest; import com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest; import com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest; import com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest; import com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply; import com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply; import com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaGrpc; import com.pinecone.hydra.service.registry.grpc.server.meta.StringReply; import com.pinecone.hydra.service.registry.server.ServiceManager; import com.pinecone.hydra.service.registry.server.ServiceMetaService; import io.grpc.stub.StreamObserver; import java.util.List; public class GrpcServiceMetaService extends ServiceMetaGrpc.ServiceMetaImplBase { private final ServiceMetaService serviceMetaService; public GrpcServiceMetaService(ServiceManager serviceManager) { this.serviceMetaService = serviceManager.getServiceMetaService(); } @Override public void fetchServiceInsMetaByClientId(ClientIdRequest request, StreamObserver responseObserver) { List list = this.serviceMetaService.fetchServiceInsMetaByClientId(request.getClientId()); ServiceMetaDTOListReply.Builder builder = ServiceMetaDTOListReply.newBuilder(); if (list != null) { for (ServiceMetaDTO dto : list) { builder.addMetas(this.toProto(dto)); } } ServiceMetaDTOListReply reply = builder.build(); responseObserver.onNext(reply); responseObserver.onCompleted(); } @Override public void fetchServiceInsMetaByServiceId(ServiceIdRequest request, StreamObserver responseObserver) { List list = this.serviceMetaService.fetchServiceInsMetaByServiceId(request.getServiceId()); ServiceMetaDTOListReply.Builder builder = ServiceMetaDTOListReply.newBuilder(); if (list != null) { for (ServiceMetaDTO dto : list) { builder.addMetas(this.toProto(dto)); } } ServiceMetaDTOListReply reply = builder.build(); responseObserver.onNext(reply); responseObserver.onCompleted(); } @Override public void queryServiceMetaByPath(PathRequest request, StreamObserver responseObserver) { ServiceMetaDTO dto = this.serviceMetaService.queryServiceMetaByPath(request.getPath() ); ServiceMetaDTOReply.Builder builder = ServiceMetaDTOReply.newBuilder(); if (dto != null) { builder.setMeta(this.toProto(dto)); } ServiceMetaDTOReply reply = builder.build(); responseObserver.onNext(reply); responseObserver.onCompleted(); } @Override public void queryServiceMetaByGuid(GuidRequest request, StreamObserver responseObserver) { ServiceMetaDTO dto = this.serviceMetaService.queryServiceMetaByGuid(request.getGuid()); ServiceMetaDTOReply.Builder builder = ServiceMetaDTOReply.newBuilder(); if (dto != null) { builder.setMeta(this.toProto(dto)); } ServiceMetaDTOReply reply = builder.build(); responseObserver.onNext(reply); responseObserver.onCompleted(); } @Override public void evalCreationStatement(EvalRequest request, StreamObserver responseObserver) { String result = this.serviceMetaService.evalCreationStatement(request.getJsonStatement()); StringReply.Builder builder = StringReply.newBuilder(); if (result != null) { builder.setValue(result); } StringReply reply = builder.build(); responseObserver.onNext(reply); responseObserver.onCompleted(); } @Override public void createNewService(CreateNewServiceRequest request, StreamObserver responseObserver) { String result = this.serviceMetaService.createNewService(request.getParentAppPath(), this.fromProto(request.getMeta())); StringReply.Builder builder = StringReply.newBuilder(); if (result != null) { builder.setValue(result); } StringReply reply = builder.build(); responseObserver.onNext(reply); responseObserver.onCompleted(); } private com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO toProto(ServiceMetaDTO dto) { com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder builder = com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.newBuilder(); if (dto.getGuid() != null) { builder.setGuid(dto.getGuid()); } if (dto.getName() != null) { builder.setName(dto.getName()); } if (dto.getType() != null) { builder.setType(dto.getType()); } if (dto.getDisplayName() != null) { builder.setDisplayName(dto.getDisplayName()); } if (dto.getDescription() != null) { builder.setDescription(dto.getDescription()); } if (dto.getFullName() != null) { builder.setFullName(dto.getFullName()); } if (dto.getGroupNamespace() != null) { builder.setGroupNamespace(dto.getGroupNamespace()); } if (dto.getGroupName() != null) { builder.setGroupName(dto.getGroupName()); } if (dto.getScenario() != null) { builder.setScenario(dto.getScenario()); } if (dto.getPrimaryImplLang() != null) { builder.setPrimaryImplLang(dto.getPrimaryImplLang()); } if (dto.getExtraInformation() != null) { builder.setExtraInformation(dto.getExtraInformation()); } if (dto.getLevel() != null) { builder.setLevel(dto.getLevel()); } return builder.build(); } private ServiceMetaDTO fromProto(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO proto) { ServiceMetaDTO dto = new ServiceMetaDTO(); dto.setGuid(proto.getGuid()); dto.setName(proto.getName()); dto.setType(proto.getType()); dto.setDisplayName(proto.getDisplayName()); dto.setDescription(proto.getDescription()); dto.setFullName(proto.getFullName()); dto.setGroupNamespace(proto.getGroupNamespace()); dto.setGroupName(proto.getGroupName()); dto.setScenario(proto.getScenario()); dto.setPrimaryImplLang(proto.getPrimaryImplLang()); dto.setExtraInformation(proto.getExtraInformation()); dto.setLevel(proto.getLevel()); return dto; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/GrpcSession.java ================================================ package com.pinecone.hydra.service.registry.grpc.server; import io.grpc.stub.StreamObserver; import java.net.SocketAddress; import java.util.concurrent.atomic.AtomicBoolean; public class GrpcSession { private final String connectId; private final SocketAddress remoteAddress; private final StreamObserver outbound; private final AtomicBoolean closed = new AtomicBoolean(false); private volatile long lastHeartbeatTime = System.currentTimeMillis(); public void refreshHeartbeat() { this.lastHeartbeatTime = System.currentTimeMillis(); } public boolean isTimeout( long timeoutMillis ) { return System.currentTimeMillis() - this.lastHeartbeatTime > timeoutMillis; } public GrpcSession( String connectId, SocketAddress remoteAddress, StreamObserver outbound ) { this.connectId = connectId; this.remoteAddress = remoteAddress; this.outbound = outbound; } public String getConnectId() { return this.connectId; } public SocketAddress getRemoteAddress() { return this.remoteAddress; } public StreamObserver getOutbound() { return this.outbound; } public boolean isClosed() { return this.closed.get(); } public boolean markClosed() { return this.closed.compareAndSet(false, true); } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/cs/ControlMessage.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: control_stream.proto package com.pinecone.hydra.service.registry.grpc.server.cs; /** * Protobuf type {@code ControlMessage} */ public final class ControlMessage extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:ControlMessage) ControlMessageOrBuilder { private static final long serialVersionUID = 0L; // Use ControlMessage.newBuilder() to construct. private ControlMessage(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ControlMessage() { payload_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ControlMessage(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.cs.ControlStreamOuterClass.internal_static_ControlMessage_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.cs.ControlStreamOuterClass.internal_static_ControlMessage_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.class, com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.Builder.class); } public static final int CLIENTID_FIELD_NUMBER = 1; private long clientId_ = 0L; /** * int64 clientId = 1; * @return The clientId. */ @java.lang.Override public long getClientId() { return clientId_; } public static final int PAYLOAD_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object payload_ = ""; /** * string payload = 2; * @return The payload. */ @java.lang.Override public java.lang.String getPayload() { java.lang.Object ref = payload_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); payload_ = s; return s; } } /** * string payload = 2; * @return The bytes for payload. */ @java.lang.Override public com.google.protobuf.ByteString getPayloadBytes() { java.lang.Object ref = payload_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); payload_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (clientId_ != 0L) { output.writeInt64(1, clientId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(payload_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, payload_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (clientId_ != 0L) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(1, clientId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(payload_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, payload_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage)) { return super.equals(obj); } com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage other = (com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage) obj; if (getClientId() != other.getClientId()) return false; if (!getPayload() .equals(other.getPayload())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + CLIENTID_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( getClientId()); hash = (37 * hash) + PAYLOAD_FIELD_NUMBER; hash = (53 * hash) + getPayload().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code ControlMessage} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:ControlMessage) com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessageOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.cs.ControlStreamOuterClass.internal_static_ControlMessage_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.cs.ControlStreamOuterClass.internal_static_ControlMessage_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.class, com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.Builder.class); } // Construct using com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; clientId_ = 0L; payload_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.pinecone.hydra.service.registry.grpc.server.cs.ControlStreamOuterClass.internal_static_ControlMessage_descriptor; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage getDefaultInstanceForType() { return com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.getDefaultInstance(); } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage build() { com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage buildPartial() { com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage result = new com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.clientId_ = clientId_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.payload_ = payload_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage) { return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage other) { if (other == com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.getDefaultInstance()) return this; if (other.getClientId() != 0L) { setClientId(other.getClientId()); } if (!other.getPayload().isEmpty()) { payload_ = other.payload_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { clientId_ = input.readInt64(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { payload_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private long clientId_ ; /** * int64 clientId = 1; * @return The clientId. */ @java.lang.Override public long getClientId() { return clientId_; } /** * int64 clientId = 1; * @param value The clientId to set. * @return This builder for chaining. */ public Builder setClientId(long value) { clientId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * int64 clientId = 1; * @return This builder for chaining. */ public Builder clearClientId() { bitField0_ = (bitField0_ & ~0x00000001); clientId_ = 0L; onChanged(); return this; } private java.lang.Object payload_ = ""; /** * string payload = 2; * @return The payload. */ public java.lang.String getPayload() { java.lang.Object ref = payload_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); payload_ = s; return s; } else { return (java.lang.String) ref; } } /** * string payload = 2; * @return The bytes for payload. */ public com.google.protobuf.ByteString getPayloadBytes() { java.lang.Object ref = payload_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); payload_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string payload = 2; * @param value The payload to set. * @return This builder for chaining. */ public Builder setPayload( java.lang.String value) { if (value == null) { throw new NullPointerException(); } payload_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * string payload = 2; * @return This builder for chaining. */ public Builder clearPayload() { payload_ = getDefaultInstance().getPayload(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * string payload = 2; * @param value The bytes for payload to set. * @return This builder for chaining. */ public Builder setPayloadBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); payload_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:ControlMessage) } // @@protoc_insertion_point(class_scope:ControlMessage) private static final com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage(); } public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public ControlMessage parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/cs/ControlMessageOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: control_stream.proto package com.pinecone.hydra.service.registry.grpc.server.cs; public interface ControlMessageOrBuilder extends // @@protoc_insertion_point(interface_extends:ControlMessage) com.google.protobuf.MessageOrBuilder { /** * int64 clientId = 1; * @return The clientId. */ long getClientId(); /** * string payload = 2; * @return The payload. */ java.lang.String getPayload(); /** * string payload = 2; * @return The bytes for payload. */ com.google.protobuf.ByteString getPayloadBytes(); } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/cs/ControlStreamGrpc.java ================================================ package com.pinecone.hydra.service.registry.grpc.server.cs; import static io.grpc.MethodDescriptor.generateFullMethodName; /** */ @javax.annotation.Generated( value = "by gRPC proto compiler (version 1.62.2)", comments = "Source: control_stream.proto") @io.grpc.stub.annotations.GrpcGenerated public final class ControlStreamGrpc { private ControlStreamGrpc() {} public static final java.lang.String SERVICE_NAME = "ControlStream"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor getConnectMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "Connect", requestType = com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.class, responseType = com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.class, methodType = io.grpc.MethodDescriptor.MethodType.BIDI_STREAMING) public static io.grpc.MethodDescriptor getConnectMethod() { io.grpc.MethodDescriptor getConnectMethod; if ((getConnectMethod = ControlStreamGrpc.getConnectMethod) == null) { synchronized (ControlStreamGrpc.class) { if ((getConnectMethod = ControlStreamGrpc.getConnectMethod) == null) { ControlStreamGrpc.getConnectMethod = getConnectMethod = io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.BIDI_STREAMING) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "Connect")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.getDefaultInstance())) .setSchemaDescriptor(new ControlStreamMethodDescriptorSupplier("Connect")) .build(); } } } return getConnectMethod; } /** * Creates a new async stub that supports all call types for the service */ public static ControlStreamStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory factory = new io.grpc.stub.AbstractStub.StubFactory() { @java.lang.Override public ControlStreamStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ControlStreamStub(channel, callOptions); } }; return ControlStreamStub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static ControlStreamBlockingStub newBlockingStub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory factory = new io.grpc.stub.AbstractStub.StubFactory() { @java.lang.Override public ControlStreamBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ControlStreamBlockingStub(channel, callOptions); } }; return ControlStreamBlockingStub.newStub(factory, channel); } /** * Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static ControlStreamFutureStub newFutureStub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory factory = new io.grpc.stub.AbstractStub.StubFactory() { @java.lang.Override public ControlStreamFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ControlStreamFutureStub(channel, callOptions); } }; return ControlStreamFutureStub.newStub(factory, channel); } /** */ public interface AsyncService { /** */ default io.grpc.stub.StreamObserver connect( io.grpc.stub.StreamObserver responseObserver) { return io.grpc.stub.ServerCalls.asyncUnimplementedStreamingCall(getConnectMethod(), responseObserver); } } /** * Base class for the server implementation of the service ControlStream. */ public static abstract class ControlStreamImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return ControlStreamGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service ControlStream. */ public static final class ControlStreamStub extends io.grpc.stub.AbstractAsyncStub { private ControlStreamStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ControlStreamStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ControlStreamStub(channel, callOptions); } /** */ public io.grpc.stub.StreamObserver connect( io.grpc.stub.StreamObserver responseObserver) { return io.grpc.stub.ClientCalls.asyncBidiStreamingCall( getChannel().newCall(getConnectMethod(), getCallOptions()), responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service ControlStream. */ public static final class ControlStreamBlockingStub extends io.grpc.stub.AbstractBlockingStub { private ControlStreamBlockingStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ControlStreamBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ControlStreamBlockingStub(channel, callOptions); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service ControlStream. */ public static final class ControlStreamFutureStub extends io.grpc.stub.AbstractFutureStub { private ControlStreamFutureStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ControlStreamFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ControlStreamFutureStub(channel, callOptions); } } private static final int METHODID_CONNECT = 0; private static final class MethodHandlers implements io.grpc.stub.ServerCalls.UnaryMethod, io.grpc.stub.ServerCalls.ServerStreamingMethod, io.grpc.stub.ServerCalls.ClientStreamingMethod, io.grpc.stub.ServerCalls.BidiStreamingMethod { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver responseObserver) { switch (methodId) { default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver invoke( io.grpc.stub.StreamObserver responseObserver) { switch (methodId) { case METHODID_CONNECT: return (io.grpc.stub.StreamObserver) serviceImpl.connect( (io.grpc.stub.StreamObserver) responseObserver); default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getConnectMethod(), io.grpc.stub.ServerCalls.asyncBidiStreamingCall( new MethodHandlers< com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage, com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage>( service, METHODID_CONNECT))) .build(); } private static abstract class ControlStreamBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { ControlStreamBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.cs.ControlStreamOuterClass.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("ControlStream"); } } private static final class ControlStreamFileDescriptorSupplier extends ControlStreamBaseDescriptorSupplier { ControlStreamFileDescriptorSupplier() {} } private static final class ControlStreamMethodDescriptorSupplier extends ControlStreamBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; ControlStreamMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (ControlStreamGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new ControlStreamFileDescriptorSupplier()) .addMethod(getConnectMethod()) .build(); } } } return result; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/cs/ControlStreamOuterClass.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: control_stream.proto package com.pinecone.hydra.service.registry.grpc.server.cs; public final class ControlStreamOuterClass { private ControlStreamOuterClass() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistryLite registry) { } public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions( (com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_ControlMessage_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_ControlMessage_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\024control_stream.proto\"3\n\016ControlMessage" + "\022\020\n\010clientId\030\001 \001(\003\022\017\n\007payload\030\002 \001(\t2@\n\rC" + "ontrolStream\022/\n\007Connect\022\017.ControlMessage" + "\032\017.ControlMessage(\0010\001B6\n2com.pinecone.hy" + "dra.service.registry.grpc.server.csP\001b\006p" + "roto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }); internal_static_ControlMessage_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_ControlMessage_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_ControlMessage_descriptor, new java.lang.String[] { "ClientId", "Payload", }); } // @@protoc_insertion_point(outer_class_scope) } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/iface/ServiceLifecycleImpl.java ================================================ package com.pinecone.hydra.service.registry.grpc.server.iface; import com.pinecone.hydra.service.kom.entity.ServiceInstanceEntry; import com.pinecone.hydra.service.registry.dto.RegisterServiceDTO; import com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply; import com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest; import com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply; import com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest; import com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest; import com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply; import com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest; import com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest; import com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleGrpc; import com.pinecone.hydra.service.registry.server.ServiceLifecycleIface; public class ServiceLifecycleImpl implements ServiceLifecycleIface { protected final ServiceLifecycleGrpc.ServiceLifecycleBlockingStub lifecycleBlockingStub; public ServiceLifecycleImpl( ServiceLifecycleGrpc.ServiceLifecycleBlockingStub lifecycleBlockingStub ) { this.lifecycleBlockingStub = lifecycleBlockingStub; } @Override public String registerService( RegisterServiceDTO serviceDTO ) { RegisterServiceRequest request = RegisterServiceRequest.newBuilder() .setClientId( serviceDTO.getClientId() ) .setServiceId( serviceDTO.getServiceId() ) .setDeployId( serviceDTO.getDeployId() ) .build(); RegisterServiceReply reply = this.lifecycleBlockingStub.registerService( request ); return reply.getInstanceId(); } @Override public boolean createInstanceMeta(ServiceInstanceEntry serviceInstanceEntry) { return false; } @Override public void deregisterServiceByClientId( Long clientId ) { ClientIdRequest request = ClientIdRequest.newBuilder().setClientId( clientId ).build(); this.lifecycleBlockingStub.deregisterServiceByClientId( request ); } @Override public void deregisterServiceByInstanceId( String instanceId ) { InstanceIdRequest request = InstanceIdRequest.newBuilder().setInstanceId( instanceId ).build(); this.lifecycleBlockingStub.deregisterServiceByInstanceId( request ); } @Override public boolean hasOwnedServiceByServiceId( String serviceId ) { ServiceIdRequest request = ServiceIdRequest.newBuilder().setServiceId( serviceId ).build(); BoolReply reply = this.lifecycleBlockingStub.hasOwnedServiceByServiceId( request ); return reply.getValue(); } @Override public boolean hasOwnedServiceInstance( Long clientId ) { ClientIdRequest request = ClientIdRequest.newBuilder().setClientId( clientId ).build(); BoolReply reply = this.lifecycleBlockingStub.hasOwnedServiceInstanceByClientId( request ); return reply.getValue(); } @Override public boolean hasOwnedServiceInstance( String instanceId ) { InstanceIdRequest request = InstanceIdRequest.newBuilder().setInstanceId( instanceId ).build(); BoolReply reply = this.lifecycleBlockingStub.hasOwnedServiceInstanceByInstanceId( request ); return reply.getValue(); } @Override public boolean hasOwnedServiceClient( Long clientId ) { ClientIdRequest request = ClientIdRequest.newBuilder().setClientId( clientId ).build(); BoolReply reply = this.lifecycleBlockingStub.hasOwnedServiceClient( request ); return reply.getValue(); } @Override public Integer countRegisteredService() { EmptyRequest request = EmptyRequest.newBuilder().build(); CountReply reply = this.lifecycleBlockingStub.countRegisteredService( request ); return reply.getValue(); } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/iface/ServiceMetaManipulationIfaceImpl.java ================================================ package com.pinecone.hydra.service.registry.grpc.server.iface; import java.util.ArrayList; import java.util.List; import com.pinecone.hydra.service.registry.dto.ServiceMetaDTO; import com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest; import com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest; import com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest; import com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest; import com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest; import com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest; import com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply; import com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply; import com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaGrpc; import com.pinecone.hydra.service.registry.grpc.server.meta.StringReply; import com.pinecone.hydra.service.registry.server.ServiceMetaManipulationIface; public class ServiceMetaManipulationIfaceImpl implements ServiceMetaManipulationIface { protected final ServiceMetaGrpc.ServiceMetaBlockingStub metaBlockingStub; public ServiceMetaManipulationIfaceImpl( ServiceMetaGrpc.ServiceMetaBlockingStub metaBlockingStub ) { this.metaBlockingStub = metaBlockingStub; } @Override public List fetchServiceInsMetaByClientId( long clientId ) { ClientIdRequest request = ClientIdRequest.newBuilder() .setClientId( clientId ) .build(); ServiceMetaDTOListReply reply = this.metaBlockingStub.fetchServiceInsMetaByClientId( request ); List result = new ArrayList<>(); for( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO proto : reply.getMetasList() ) { result.add( this.fromProto( proto ) ); } return result; } @Override public List fetchServiceInsMetaByServiceId( String serviceId ) { ServiceIdRequest request = ServiceIdRequest.newBuilder() .setServiceId( serviceId ) .build(); ServiceMetaDTOListReply reply = this.metaBlockingStub.fetchServiceInsMetaByServiceId( request ); List result = new ArrayList<>(); for( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO proto : reply.getMetasList() ) { result.add( this.fromProto( proto ) ); } return result; } @Override public ServiceMetaDTO queryServiceMetaByPath( String path ) { PathRequest request = PathRequest.newBuilder() .setPath( path ) .build(); ServiceMetaDTOReply reply = this.metaBlockingStub.queryServiceMetaByPath( request ); if( reply.hasMeta() ) { return this.fromProto( reply.getMeta() ); } return null; } @Override public ServiceMetaDTO queryServiceMetaByGuid( String guid ) { GuidRequest request = GuidRequest.newBuilder() .setGuid( guid ) .build(); ServiceMetaDTOReply reply = this.metaBlockingStub.queryServiceMetaByGuid( request ); if( reply.hasMeta() ) { return this.fromProto( reply.getMeta() ); } return null; } @Override public String evalCreationStatement( String jsonStatement ) { EvalRequest request = EvalRequest.newBuilder() .setJsonStatement( jsonStatement ) .build(); StringReply reply = this.metaBlockingStub.evalCreationStatement( request ); return reply.getValue(); } @Override public String createNewService( String parentAppPath, ServiceMetaDTO meta ) { CreateNewServiceRequest request = CreateNewServiceRequest.newBuilder() .setParentAppPath( parentAppPath ) .setMeta( this.toProto( meta ) ) .build(); StringReply reply = this.metaBlockingStub.createNewService( request ); return reply.getValue(); } protected ServiceMetaDTO fromProto( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO proto ) { ServiceMetaDTO dto = new ServiceMetaDTO(); dto.setGuid( proto.getGuid() ); dto.setName( proto.getName() ); dto.setType( proto.getType() ); dto.setDisplayName( proto.getDisplayName() ); dto.setDescription( proto.getDescription() ); dto.setFullName( proto.getFullName() ); dto.setGroupNamespace( proto.getGroupNamespace() ); dto.setGroupName( proto.getGroupName() ); dto.setScenario( proto.getScenario() ); dto.setPrimaryImplLang( proto.getPrimaryImplLang() ); dto.setExtraInformation( proto.getExtraInformation() ); dto.setLevel( proto.getLevel() ); return dto; } protected com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO toProto( ServiceMetaDTO dto ) { com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder builder = com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.newBuilder(); if( dto.getGuid() != null ) { builder.setGuid( dto.getGuid() ); } if( dto.getName() != null ) { builder.setName( dto.getName() ); } if( dto.getType() != null ) { builder.setType( dto.getType() ); } if( dto.getDisplayName() != null ) { builder.setDisplayName( dto.getDisplayName() ); } if( dto.getDescription() != null ) { builder.setDescription( dto.getDescription() ); } if( dto.getFullName() != null ) { builder.setFullName( dto.getFullName() ); } if( dto.getGroupNamespace() != null ) { builder.setGroupNamespace( dto.getGroupNamespace() ); } if( dto.getGroupName() != null ) { builder.setGroupName( dto.getGroupName() ); } if( dto.getScenario() != null ) { builder.setScenario( dto.getScenario() ); } if( dto.getPrimaryImplLang() != null ) { builder.setPrimaryImplLang( dto.getPrimaryImplLang() ); } if( dto.getExtraInformation() != null ) { builder.setExtraInformation( dto.getExtraInformation() ); } if( dto.getLevel() != null ) { builder.setLevel( dto.getLevel() ); } return builder.build(); } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/BoolReply.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_lifecycle.proto package com.pinecone.hydra.service.registry.grpc.server.lifecycle; /** * Protobuf type {@code BoolReply} */ public final class BoolReply extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:BoolReply) BoolReplyOrBuilder { private static final long serialVersionUID = 0L; // Use BoolReply.newBuilder() to construct. private BoolReply(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private BoolReply() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new BoolReply(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_BoolReply_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_BoolReply_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.Builder.class); } public static final int VALUE_FIELD_NUMBER = 1; private boolean value_ = false; /** * bool value = 1; * @return The value. */ @java.lang.Override public boolean getValue() { return value_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (value_ != false) { output.writeBool(1, value_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (value_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, value_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply)) { return super.equals(obj); } com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply other = (com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply) obj; if (getValue() != other.getValue()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( getValue()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code BoolReply} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:BoolReply) com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReplyOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_BoolReply_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_BoolReply_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.Builder.class); } // Construct using com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; value_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_BoolReply_descriptor; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply getDefaultInstanceForType() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.getDefaultInstance(); } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply build() { com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply buildPartial() { com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply result = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.value_ = value_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply) { return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply other) { if (other == com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.getDefaultInstance()) return this; if (other.getValue() != false) { setValue(other.getValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { value_ = input.readBool(); bitField0_ |= 0x00000001; break; } // case 8 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private boolean value_ ; /** * bool value = 1; * @return The value. */ @java.lang.Override public boolean getValue() { return value_; } /** * bool value = 1; * @param value The value to set. * @return This builder for chaining. */ public Builder setValue(boolean value) { value_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * bool value = 1; * @return This builder for chaining. */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000001); value_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:BoolReply) } // @@protoc_insertion_point(class_scope:BoolReply) private static final com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply(); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public BoolReply parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/BoolReplyOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_lifecycle.proto package com.pinecone.hydra.service.registry.grpc.server.lifecycle; public interface BoolReplyOrBuilder extends // @@protoc_insertion_point(interface_extends:BoolReply) com.google.protobuf.MessageOrBuilder { /** * bool value = 1; * @return The value. */ boolean getValue(); } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/ClientIdRequest.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_lifecycle.proto package com.pinecone.hydra.service.registry.grpc.server.lifecycle; /** * Protobuf type {@code ClientIdRequest} */ public final class ClientIdRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:ClientIdRequest) ClientIdRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ClientIdRequest.newBuilder() to construct. private ClientIdRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ClientIdRequest() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ClientIdRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_ClientIdRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_ClientIdRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.Builder.class); } public static final int CLIENTID_FIELD_NUMBER = 1; private long clientId_ = 0L; /** * int64 clientId = 1; * @return The clientId. */ @java.lang.Override public long getClientId() { return clientId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (clientId_ != 0L) { output.writeInt64(1, clientId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (clientId_ != 0L) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(1, clientId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest)) { return super.equals(obj); } com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest other = (com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest) obj; if (getClientId() != other.getClientId()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + CLIENTID_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( getClientId()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code ClientIdRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:ClientIdRequest) com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_ClientIdRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_ClientIdRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.Builder.class); } // Construct using com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; clientId_ = 0L; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_ClientIdRequest_descriptor; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest getDefaultInstanceForType() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.getDefaultInstance(); } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest build() { com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest buildPartial() { com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest result = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.clientId_ = clientId_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest) { return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest other) { if (other == com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.getDefaultInstance()) return this; if (other.getClientId() != 0L) { setClientId(other.getClientId()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { clientId_ = input.readInt64(); bitField0_ |= 0x00000001; break; } // case 8 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private long clientId_ ; /** * int64 clientId = 1; * @return The clientId. */ @java.lang.Override public long getClientId() { return clientId_; } /** * int64 clientId = 1; * @param value The clientId to set. * @return This builder for chaining. */ public Builder setClientId(long value) { clientId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * int64 clientId = 1; * @return This builder for chaining. */ public Builder clearClientId() { bitField0_ = (bitField0_ & ~0x00000001); clientId_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:ClientIdRequest) } // @@protoc_insertion_point(class_scope:ClientIdRequest) private static final com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest(); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public ClientIdRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/ClientIdRequestOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_lifecycle.proto package com.pinecone.hydra.service.registry.grpc.server.lifecycle; public interface ClientIdRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:ClientIdRequest) com.google.protobuf.MessageOrBuilder { /** * int64 clientId = 1; * @return The clientId. */ long getClientId(); } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/CountReply.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_lifecycle.proto package com.pinecone.hydra.service.registry.grpc.server.lifecycle; /** * Protobuf type {@code CountReply} */ public final class CountReply extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:CountReply) CountReplyOrBuilder { private static final long serialVersionUID = 0L; // Use CountReply.newBuilder() to construct. private CountReply(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private CountReply() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CountReply(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_CountReply_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_CountReply_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply.Builder.class); } public static final int VALUE_FIELD_NUMBER = 1; private int value_ = 0; /** * int32 value = 1; * @return The value. */ @java.lang.Override public int getValue() { return value_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (value_ != 0) { output.writeInt32(1, value_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (value_ != 0) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(1, value_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply)) { return super.equals(obj); } com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply other = (com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply) obj; if (getValue() != other.getValue()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValue(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code CountReply} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:CountReply) com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReplyOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_CountReply_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_CountReply_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply.Builder.class); } // Construct using com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; value_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_CountReply_descriptor; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply getDefaultInstanceForType() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply.getDefaultInstance(); } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply build() { com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply buildPartial() { com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply result = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.value_ = value_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply) { return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply other) { if (other == com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply.getDefaultInstance()) return this; if (other.getValue() != 0) { setValue(other.getValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { value_ = input.readInt32(); bitField0_ |= 0x00000001; break; } // case 8 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int value_ ; /** * int32 value = 1; * @return The value. */ @java.lang.Override public int getValue() { return value_; } /** * int32 value = 1; * @param value The value to set. * @return This builder for chaining. */ public Builder setValue(int value) { value_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * int32 value = 1; * @return This builder for chaining. */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000001); value_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:CountReply) } // @@protoc_insertion_point(class_scope:CountReply) private static final com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply(); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public CountReply parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/CountReplyOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_lifecycle.proto package com.pinecone.hydra.service.registry.grpc.server.lifecycle; public interface CountReplyOrBuilder extends // @@protoc_insertion_point(interface_extends:CountReply) com.google.protobuf.MessageOrBuilder { /** * int32 value = 1; * @return The value. */ int getValue(); } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/CreateInstanceMetaRequest.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_lifecycle.proto package com.pinecone.hydra.service.registry.grpc.server.lifecycle; /** * Protobuf type {@code CreateInstanceMetaRequest} */ public final class CreateInstanceMetaRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:CreateInstanceMetaRequest) CreateInstanceMetaRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateInstanceMetaRequest.newBuilder() to construct. private CreateInstanceMetaRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private CreateInstanceMetaRequest() { instanceGuid_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CreateInstanceMetaRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_CreateInstanceMetaRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_CreateInstanceMetaRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest.Builder.class); } public static final int INSTANCEGUID_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object instanceGuid_ = ""; /** * string instanceGuid = 1; * @return The instanceGuid. */ @java.lang.Override public java.lang.String getInstanceGuid() { java.lang.Object ref = instanceGuid_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instanceGuid_ = s; return s; } } /** * string instanceGuid = 1; * @return The bytes for instanceGuid. */ @java.lang.Override public com.google.protobuf.ByteString getInstanceGuidBytes() { java.lang.Object ref = instanceGuid_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); instanceGuid_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceGuid_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, instanceGuid_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceGuid_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, instanceGuid_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest)) { return super.equals(obj); } com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest other = (com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest) obj; if (!getInstanceGuid() .equals(other.getInstanceGuid())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + INSTANCEGUID_FIELD_NUMBER; hash = (53 * hash) + getInstanceGuid().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code CreateInstanceMetaRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:CreateInstanceMetaRequest) com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_CreateInstanceMetaRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_CreateInstanceMetaRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest.Builder.class); } // Construct using com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; instanceGuid_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_CreateInstanceMetaRequest_descriptor; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest getDefaultInstanceForType() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest.getDefaultInstance(); } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest build() { com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest buildPartial() { com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest result = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.instanceGuid_ = instanceGuid_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest) { return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest other) { if (other == com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest.getDefaultInstance()) return this; if (!other.getInstanceGuid().isEmpty()) { instanceGuid_ = other.instanceGuid_; bitField0_ |= 0x00000001; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { instanceGuid_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object instanceGuid_ = ""; /** * string instanceGuid = 1; * @return The instanceGuid. */ public java.lang.String getInstanceGuid() { java.lang.Object ref = instanceGuid_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instanceGuid_ = s; return s; } else { return (java.lang.String) ref; } } /** * string instanceGuid = 1; * @return The bytes for instanceGuid. */ public com.google.protobuf.ByteString getInstanceGuidBytes() { java.lang.Object ref = instanceGuid_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); instanceGuid_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string instanceGuid = 1; * @param value The instanceGuid to set. * @return This builder for chaining. */ public Builder setInstanceGuid( java.lang.String value) { if (value == null) { throw new NullPointerException(); } instanceGuid_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * string instanceGuid = 1; * @return This builder for chaining. */ public Builder clearInstanceGuid() { instanceGuid_ = getDefaultInstance().getInstanceGuid(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * string instanceGuid = 1; * @param value The bytes for instanceGuid to set. * @return This builder for chaining. */ public Builder setInstanceGuidBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); instanceGuid_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:CreateInstanceMetaRequest) } // @@protoc_insertion_point(class_scope:CreateInstanceMetaRequest) private static final com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest(); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public CreateInstanceMetaRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/CreateInstanceMetaRequestOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_lifecycle.proto package com.pinecone.hydra.service.registry.grpc.server.lifecycle; public interface CreateInstanceMetaRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:CreateInstanceMetaRequest) com.google.protobuf.MessageOrBuilder { /** * string instanceGuid = 1; * @return The instanceGuid. */ java.lang.String getInstanceGuid(); /** * string instanceGuid = 1; * @return The bytes for instanceGuid. */ com.google.protobuf.ByteString getInstanceGuidBytes(); } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/EmptyReply.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_lifecycle.proto package com.pinecone.hydra.service.registry.grpc.server.lifecycle; /** * Protobuf type {@code EmptyReply} */ public final class EmptyReply extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:EmptyReply) EmptyReplyOrBuilder { private static final long serialVersionUID = 0L; // Use EmptyReply.newBuilder() to construct. private EmptyReply(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private EmptyReply() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new EmptyReply(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_EmptyReply_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_EmptyReply_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply)) { return super.equals(obj); } com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply other = (com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply) obj; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code EmptyReply} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:EmptyReply) com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReplyOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_EmptyReply_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_EmptyReply_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.Builder.class); } // Construct using com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_EmptyReply_descriptor; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply getDefaultInstanceForType() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.getDefaultInstance(); } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply build() { com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply buildPartial() { com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply result = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply) { return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply other) { if (other == com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:EmptyReply) } // @@protoc_insertion_point(class_scope:EmptyReply) private static final com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply(); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public EmptyReply parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/EmptyReplyOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_lifecycle.proto package com.pinecone.hydra.service.registry.grpc.server.lifecycle; public interface EmptyReplyOrBuilder extends // @@protoc_insertion_point(interface_extends:EmptyReply) com.google.protobuf.MessageOrBuilder { } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/EmptyRequest.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_lifecycle.proto package com.pinecone.hydra.service.registry.grpc.server.lifecycle; /** * Protobuf type {@code EmptyRequest} */ public final class EmptyRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:EmptyRequest) EmptyRequestOrBuilder { private static final long serialVersionUID = 0L; // Use EmptyRequest.newBuilder() to construct. private EmptyRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private EmptyRequest() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new EmptyRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_EmptyRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_EmptyRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest)) { return super.equals(obj); } com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest other = (com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest) obj; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code EmptyRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:EmptyRequest) com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_EmptyRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_EmptyRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest.Builder.class); } // Construct using com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_EmptyRequest_descriptor; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest getDefaultInstanceForType() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest.getDefaultInstance(); } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest build() { com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest buildPartial() { com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest result = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest) { return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest other) { if (other == com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:EmptyRequest) } // @@protoc_insertion_point(class_scope:EmptyRequest) private static final com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest(); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public EmptyRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/EmptyRequestOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_lifecycle.proto package com.pinecone.hydra.service.registry.grpc.server.lifecycle; public interface EmptyRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:EmptyRequest) com.google.protobuf.MessageOrBuilder { } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/InstanceIdRequest.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_lifecycle.proto package com.pinecone.hydra.service.registry.grpc.server.lifecycle; /** * Protobuf type {@code InstanceIdRequest} */ public final class InstanceIdRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:InstanceIdRequest) InstanceIdRequestOrBuilder { private static final long serialVersionUID = 0L; // Use InstanceIdRequest.newBuilder() to construct. private InstanceIdRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private InstanceIdRequest() { instanceId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new InstanceIdRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_InstanceIdRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_InstanceIdRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.Builder.class); } public static final int INSTANCEID_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object instanceId_ = ""; /** * string instanceId = 1; * @return The instanceId. */ @java.lang.Override public java.lang.String getInstanceId() { java.lang.Object ref = instanceId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instanceId_ = s; return s; } } /** * string instanceId = 1; * @return The bytes for instanceId. */ @java.lang.Override public com.google.protobuf.ByteString getInstanceIdBytes() { java.lang.Object ref = instanceId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); instanceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, instanceId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, instanceId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest)) { return super.equals(obj); } com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest other = (com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest) obj; if (!getInstanceId() .equals(other.getInstanceId())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + INSTANCEID_FIELD_NUMBER; hash = (53 * hash) + getInstanceId().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code InstanceIdRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:InstanceIdRequest) com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_InstanceIdRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_InstanceIdRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.Builder.class); } // Construct using com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; instanceId_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_InstanceIdRequest_descriptor; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest getDefaultInstanceForType() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.getDefaultInstance(); } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest build() { com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest buildPartial() { com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest result = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.instanceId_ = instanceId_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest) { return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest other) { if (other == com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.getDefaultInstance()) return this; if (!other.getInstanceId().isEmpty()) { instanceId_ = other.instanceId_; bitField0_ |= 0x00000001; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { instanceId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object instanceId_ = ""; /** * string instanceId = 1; * @return The instanceId. */ public java.lang.String getInstanceId() { java.lang.Object ref = instanceId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instanceId_ = s; return s; } else { return (java.lang.String) ref; } } /** * string instanceId = 1; * @return The bytes for instanceId. */ public com.google.protobuf.ByteString getInstanceIdBytes() { java.lang.Object ref = instanceId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); instanceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string instanceId = 1; * @param value The instanceId to set. * @return This builder for chaining. */ public Builder setInstanceId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } instanceId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * string instanceId = 1; * @return This builder for chaining. */ public Builder clearInstanceId() { instanceId_ = getDefaultInstance().getInstanceId(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * string instanceId = 1; * @param value The bytes for instanceId to set. * @return This builder for chaining. */ public Builder setInstanceIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); instanceId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:InstanceIdRequest) } // @@protoc_insertion_point(class_scope:InstanceIdRequest) private static final com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest(); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public InstanceIdRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/InstanceIdRequestOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_lifecycle.proto package com.pinecone.hydra.service.registry.grpc.server.lifecycle; public interface InstanceIdRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:InstanceIdRequest) com.google.protobuf.MessageOrBuilder { /** * string instanceId = 1; * @return The instanceId. */ java.lang.String getInstanceId(); /** * string instanceId = 1; * @return The bytes for instanceId. */ com.google.protobuf.ByteString getInstanceIdBytes(); } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/RegisterServiceReply.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_lifecycle.proto package com.pinecone.hydra.service.registry.grpc.server.lifecycle; /** * Protobuf type {@code RegisterServiceReply} */ public final class RegisterServiceReply extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:RegisterServiceReply) RegisterServiceReplyOrBuilder { private static final long serialVersionUID = 0L; // Use RegisterServiceReply.newBuilder() to construct. private RegisterServiceReply(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private RegisterServiceReply() { instanceId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new RegisterServiceReply(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_RegisterServiceReply_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_RegisterServiceReply_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply.Builder.class); } public static final int INSTANCEID_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object instanceId_ = ""; /** * string instanceId = 1; * @return The instanceId. */ @java.lang.Override public java.lang.String getInstanceId() { java.lang.Object ref = instanceId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instanceId_ = s; return s; } } /** * string instanceId = 1; * @return The bytes for instanceId. */ @java.lang.Override public com.google.protobuf.ByteString getInstanceIdBytes() { java.lang.Object ref = instanceId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); instanceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, instanceId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, instanceId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply)) { return super.equals(obj); } com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply other = (com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply) obj; if (!getInstanceId() .equals(other.getInstanceId())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + INSTANCEID_FIELD_NUMBER; hash = (53 * hash) + getInstanceId().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code RegisterServiceReply} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:RegisterServiceReply) com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReplyOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_RegisterServiceReply_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_RegisterServiceReply_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply.Builder.class); } // Construct using com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; instanceId_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_RegisterServiceReply_descriptor; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply getDefaultInstanceForType() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply.getDefaultInstance(); } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply build() { com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply buildPartial() { com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply result = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.instanceId_ = instanceId_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply) { return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply other) { if (other == com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply.getDefaultInstance()) return this; if (!other.getInstanceId().isEmpty()) { instanceId_ = other.instanceId_; bitField0_ |= 0x00000001; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { instanceId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object instanceId_ = ""; /** * string instanceId = 1; * @return The instanceId. */ public java.lang.String getInstanceId() { java.lang.Object ref = instanceId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instanceId_ = s; return s; } else { return (java.lang.String) ref; } } /** * string instanceId = 1; * @return The bytes for instanceId. */ public com.google.protobuf.ByteString getInstanceIdBytes() { java.lang.Object ref = instanceId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); instanceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string instanceId = 1; * @param value The instanceId to set. * @return This builder for chaining. */ public Builder setInstanceId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } instanceId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * string instanceId = 1; * @return This builder for chaining. */ public Builder clearInstanceId() { instanceId_ = getDefaultInstance().getInstanceId(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * string instanceId = 1; * @param value The bytes for instanceId to set. * @return This builder for chaining. */ public Builder setInstanceIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); instanceId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:RegisterServiceReply) } // @@protoc_insertion_point(class_scope:RegisterServiceReply) private static final com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply(); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public RegisterServiceReply parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/RegisterServiceReplyOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_lifecycle.proto package com.pinecone.hydra.service.registry.grpc.server.lifecycle; public interface RegisterServiceReplyOrBuilder extends // @@protoc_insertion_point(interface_extends:RegisterServiceReply) com.google.protobuf.MessageOrBuilder { /** * string instanceId = 1; * @return The instanceId. */ java.lang.String getInstanceId(); /** * string instanceId = 1; * @return The bytes for instanceId. */ com.google.protobuf.ByteString getInstanceIdBytes(); } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/RegisterServiceRequest.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_lifecycle.proto package com.pinecone.hydra.service.registry.grpc.server.lifecycle; /** * Protobuf type {@code RegisterServiceRequest} */ public final class RegisterServiceRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:RegisterServiceRequest) RegisterServiceRequestOrBuilder { private static final long serialVersionUID = 0L; // Use RegisterServiceRequest.newBuilder() to construct. private RegisterServiceRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private RegisterServiceRequest() { serviceId_ = ""; deployId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new RegisterServiceRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_RegisterServiceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_RegisterServiceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest.Builder.class); } public static final int CLIENTID_FIELD_NUMBER = 1; private long clientId_ = 0L; /** * int64 clientId = 1; * @return The clientId. */ @java.lang.Override public long getClientId() { return clientId_; } public static final int SERVICEID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object serviceId_ = ""; /** * string serviceId = 2; * @return The serviceId. */ @java.lang.Override public java.lang.String getServiceId() { java.lang.Object ref = serviceId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); serviceId_ = s; return s; } } /** * string serviceId = 2; * @return The bytes for serviceId. */ @java.lang.Override public com.google.protobuf.ByteString getServiceIdBytes() { java.lang.Object ref = serviceId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); serviceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int DEPLOYID_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object deployId_ = ""; /** * string deployId = 3; * @return The deployId. */ @java.lang.Override public java.lang.String getDeployId() { java.lang.Object ref = deployId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); deployId_ = s; return s; } } /** * string deployId = 3; * @return The bytes for deployId. */ @java.lang.Override public com.google.protobuf.ByteString getDeployIdBytes() { java.lang.Object ref = deployId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); deployId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (clientId_ != 0L) { output.writeInt64(1, clientId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, serviceId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(deployId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, deployId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (clientId_ != 0L) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(1, clientId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, serviceId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(deployId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, deployId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest)) { return super.equals(obj); } com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest other = (com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest) obj; if (getClientId() != other.getClientId()) return false; if (!getServiceId() .equals(other.getServiceId())) return false; if (!getDeployId() .equals(other.getDeployId())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + CLIENTID_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( getClientId()); hash = (37 * hash) + SERVICEID_FIELD_NUMBER; hash = (53 * hash) + getServiceId().hashCode(); hash = (37 * hash) + DEPLOYID_FIELD_NUMBER; hash = (53 * hash) + getDeployId().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code RegisterServiceRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:RegisterServiceRequest) com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_RegisterServiceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_RegisterServiceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest.Builder.class); } // Construct using com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; clientId_ = 0L; serviceId_ = ""; deployId_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_RegisterServiceRequest_descriptor; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest getDefaultInstanceForType() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest.getDefaultInstance(); } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest build() { com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest buildPartial() { com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest result = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.clientId_ = clientId_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.serviceId_ = serviceId_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.deployId_ = deployId_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest) { return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest other) { if (other == com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest.getDefaultInstance()) return this; if (other.getClientId() != 0L) { setClientId(other.getClientId()); } if (!other.getServiceId().isEmpty()) { serviceId_ = other.serviceId_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getDeployId().isEmpty()) { deployId_ = other.deployId_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { clientId_ = input.readInt64(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { serviceId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { deployId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private long clientId_ ; /** * int64 clientId = 1; * @return The clientId. */ @java.lang.Override public long getClientId() { return clientId_; } /** * int64 clientId = 1; * @param value The clientId to set. * @return This builder for chaining. */ public Builder setClientId(long value) { clientId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * int64 clientId = 1; * @return This builder for chaining. */ public Builder clearClientId() { bitField0_ = (bitField0_ & ~0x00000001); clientId_ = 0L; onChanged(); return this; } private java.lang.Object serviceId_ = ""; /** * string serviceId = 2; * @return The serviceId. */ public java.lang.String getServiceId() { java.lang.Object ref = serviceId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); serviceId_ = s; return s; } else { return (java.lang.String) ref; } } /** * string serviceId = 2; * @return The bytes for serviceId. */ public com.google.protobuf.ByteString getServiceIdBytes() { java.lang.Object ref = serviceId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); serviceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string serviceId = 2; * @param value The serviceId to set. * @return This builder for chaining. */ public Builder setServiceId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } serviceId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * string serviceId = 2; * @return This builder for chaining. */ public Builder clearServiceId() { serviceId_ = getDefaultInstance().getServiceId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * string serviceId = 2; * @param value The bytes for serviceId to set. * @return This builder for chaining. */ public Builder setServiceIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); serviceId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object deployId_ = ""; /** * string deployId = 3; * @return The deployId. */ public java.lang.String getDeployId() { java.lang.Object ref = deployId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); deployId_ = s; return s; } else { return (java.lang.String) ref; } } /** * string deployId = 3; * @return The bytes for deployId. */ public com.google.protobuf.ByteString getDeployIdBytes() { java.lang.Object ref = deployId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); deployId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string deployId = 3; * @param value The deployId to set. * @return This builder for chaining. */ public Builder setDeployId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } deployId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * string deployId = 3; * @return This builder for chaining. */ public Builder clearDeployId() { deployId_ = getDefaultInstance().getDeployId(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * string deployId = 3; * @param value The bytes for deployId to set. * @return This builder for chaining. */ public Builder setDeployIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); deployId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:RegisterServiceRequest) } // @@protoc_insertion_point(class_scope:RegisterServiceRequest) private static final com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest(); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public RegisterServiceRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/RegisterServiceRequestOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_lifecycle.proto package com.pinecone.hydra.service.registry.grpc.server.lifecycle; public interface RegisterServiceRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:RegisterServiceRequest) com.google.protobuf.MessageOrBuilder { /** * int64 clientId = 1; * @return The clientId. */ long getClientId(); /** * string serviceId = 2; * @return The serviceId. */ java.lang.String getServiceId(); /** * string serviceId = 2; * @return The bytes for serviceId. */ com.google.protobuf.ByteString getServiceIdBytes(); /** * string deployId = 3; * @return The deployId. */ java.lang.String getDeployId(); /** * string deployId = 3; * @return The bytes for deployId. */ com.google.protobuf.ByteString getDeployIdBytes(); } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/ServiceIdRequest.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_lifecycle.proto package com.pinecone.hydra.service.registry.grpc.server.lifecycle; /** * Protobuf type {@code ServiceIdRequest} */ public final class ServiceIdRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:ServiceIdRequest) ServiceIdRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ServiceIdRequest.newBuilder() to construct. private ServiceIdRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ServiceIdRequest() { serviceId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ServiceIdRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_ServiceIdRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_ServiceIdRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest.Builder.class); } public static final int SERVICEID_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object serviceId_ = ""; /** * string serviceId = 1; * @return The serviceId. */ @java.lang.Override public java.lang.String getServiceId() { java.lang.Object ref = serviceId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); serviceId_ = s; return s; } } /** * string serviceId = 1; * @return The bytes for serviceId. */ @java.lang.Override public com.google.protobuf.ByteString getServiceIdBytes() { java.lang.Object ref = serviceId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); serviceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, serviceId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, serviceId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest)) { return super.equals(obj); } com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest other = (com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest) obj; if (!getServiceId() .equals(other.getServiceId())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + SERVICEID_FIELD_NUMBER; hash = (53 * hash) + getServiceId().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code ServiceIdRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:ServiceIdRequest) com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_ServiceIdRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_ServiceIdRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest.Builder.class); } // Construct using com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; serviceId_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_ServiceIdRequest_descriptor; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest getDefaultInstanceForType() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest.getDefaultInstance(); } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest build() { com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest buildPartial() { com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest result = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.serviceId_ = serviceId_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest) { return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest other) { if (other == com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest.getDefaultInstance()) return this; if (!other.getServiceId().isEmpty()) { serviceId_ = other.serviceId_; bitField0_ |= 0x00000001; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { serviceId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object serviceId_ = ""; /** * string serviceId = 1; * @return The serviceId. */ public java.lang.String getServiceId() { java.lang.Object ref = serviceId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); serviceId_ = s; return s; } else { return (java.lang.String) ref; } } /** * string serviceId = 1; * @return The bytes for serviceId. */ public com.google.protobuf.ByteString getServiceIdBytes() { java.lang.Object ref = serviceId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); serviceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string serviceId = 1; * @param value The serviceId to set. * @return This builder for chaining. */ public Builder setServiceId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } serviceId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * string serviceId = 1; * @return This builder for chaining. */ public Builder clearServiceId() { serviceId_ = getDefaultInstance().getServiceId(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * string serviceId = 1; * @param value The bytes for serviceId to set. * @return This builder for chaining. */ public Builder setServiceIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); serviceId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:ServiceIdRequest) } // @@protoc_insertion_point(class_scope:ServiceIdRequest) private static final com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest(); } public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public ServiceIdRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/ServiceIdRequestOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_lifecycle.proto package com.pinecone.hydra.service.registry.grpc.server.lifecycle; public interface ServiceIdRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:ServiceIdRequest) com.google.protobuf.MessageOrBuilder { /** * string serviceId = 1; * @return The serviceId. */ java.lang.String getServiceId(); /** * string serviceId = 1; * @return The bytes for serviceId. */ com.google.protobuf.ByteString getServiceIdBytes(); } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/ServiceLifecycleGrpc.java ================================================ package com.pinecone.hydra.service.registry.grpc.server.lifecycle; import static io.grpc.MethodDescriptor.generateFullMethodName; /** */ @javax.annotation.Generated( value = "by gRPC proto compiler (version 1.62.2)", comments = "Source: service_lifecycle.proto") @io.grpc.stub.annotations.GrpcGenerated public final class ServiceLifecycleGrpc { private ServiceLifecycleGrpc() {} public static final java.lang.String SERVICE_NAME = "ServiceLifecycle"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor getRegisterServiceMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "RegisterService", requestType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest.class, responseType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor getRegisterServiceMethod() { io.grpc.MethodDescriptor getRegisterServiceMethod; if ((getRegisterServiceMethod = ServiceLifecycleGrpc.getRegisterServiceMethod) == null) { synchronized (ServiceLifecycleGrpc.class) { if ((getRegisterServiceMethod = ServiceLifecycleGrpc.getRegisterServiceMethod) == null) { ServiceLifecycleGrpc.getRegisterServiceMethod = getRegisterServiceMethod = io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "RegisterService")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply.getDefaultInstance())) .setSchemaDescriptor(new ServiceLifecycleMethodDescriptorSupplier("RegisterService")) .build(); } } } return getRegisterServiceMethod; } private static volatile io.grpc.MethodDescriptor getCreateInstanceMetaMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "CreateInstanceMeta", requestType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest.class, responseType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor getCreateInstanceMetaMethod() { io.grpc.MethodDescriptor getCreateInstanceMetaMethod; if ((getCreateInstanceMetaMethod = ServiceLifecycleGrpc.getCreateInstanceMetaMethod) == null) { synchronized (ServiceLifecycleGrpc.class) { if ((getCreateInstanceMetaMethod = ServiceLifecycleGrpc.getCreateInstanceMetaMethod) == null) { ServiceLifecycleGrpc.getCreateInstanceMetaMethod = getCreateInstanceMetaMethod = io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateInstanceMeta")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.getDefaultInstance())) .setSchemaDescriptor(new ServiceLifecycleMethodDescriptorSupplier("CreateInstanceMeta")) .build(); } } } return getCreateInstanceMetaMethod; } private static volatile io.grpc.MethodDescriptor getDeregisterServiceByClientIdMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "DeregisterServiceByClientId", requestType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.class, responseType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor getDeregisterServiceByClientIdMethod() { io.grpc.MethodDescriptor getDeregisterServiceByClientIdMethod; if ((getDeregisterServiceByClientIdMethod = ServiceLifecycleGrpc.getDeregisterServiceByClientIdMethod) == null) { synchronized (ServiceLifecycleGrpc.class) { if ((getDeregisterServiceByClientIdMethod = ServiceLifecycleGrpc.getDeregisterServiceByClientIdMethod) == null) { ServiceLifecycleGrpc.getDeregisterServiceByClientIdMethod = getDeregisterServiceByClientIdMethod = io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeregisterServiceByClientId")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.getDefaultInstance())) .setSchemaDescriptor(new ServiceLifecycleMethodDescriptorSupplier("DeregisterServiceByClientId")) .build(); } } } return getDeregisterServiceByClientIdMethod; } private static volatile io.grpc.MethodDescriptor getDeregisterServiceByInstanceIdMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "DeregisterServiceByInstanceId", requestType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.class, responseType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor getDeregisterServiceByInstanceIdMethod() { io.grpc.MethodDescriptor getDeregisterServiceByInstanceIdMethod; if ((getDeregisterServiceByInstanceIdMethod = ServiceLifecycleGrpc.getDeregisterServiceByInstanceIdMethod) == null) { synchronized (ServiceLifecycleGrpc.class) { if ((getDeregisterServiceByInstanceIdMethod = ServiceLifecycleGrpc.getDeregisterServiceByInstanceIdMethod) == null) { ServiceLifecycleGrpc.getDeregisterServiceByInstanceIdMethod = getDeregisterServiceByInstanceIdMethod = io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeregisterServiceByInstanceId")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.getDefaultInstance())) .setSchemaDescriptor(new ServiceLifecycleMethodDescriptorSupplier("DeregisterServiceByInstanceId")) .build(); } } } return getDeregisterServiceByInstanceIdMethod; } private static volatile io.grpc.MethodDescriptor getHasOwnedServiceByServiceIdMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "HasOwnedServiceByServiceId", requestType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest.class, responseType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor getHasOwnedServiceByServiceIdMethod() { io.grpc.MethodDescriptor getHasOwnedServiceByServiceIdMethod; if ((getHasOwnedServiceByServiceIdMethod = ServiceLifecycleGrpc.getHasOwnedServiceByServiceIdMethod) == null) { synchronized (ServiceLifecycleGrpc.class) { if ((getHasOwnedServiceByServiceIdMethod = ServiceLifecycleGrpc.getHasOwnedServiceByServiceIdMethod) == null) { ServiceLifecycleGrpc.getHasOwnedServiceByServiceIdMethod = getHasOwnedServiceByServiceIdMethod = io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "HasOwnedServiceByServiceId")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.getDefaultInstance())) .setSchemaDescriptor(new ServiceLifecycleMethodDescriptorSupplier("HasOwnedServiceByServiceId")) .build(); } } } return getHasOwnedServiceByServiceIdMethod; } private static volatile io.grpc.MethodDescriptor getHasOwnedServiceInstanceByClientIdMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "HasOwnedServiceInstanceByClientId", requestType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.class, responseType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor getHasOwnedServiceInstanceByClientIdMethod() { io.grpc.MethodDescriptor getHasOwnedServiceInstanceByClientIdMethod; if ((getHasOwnedServiceInstanceByClientIdMethod = ServiceLifecycleGrpc.getHasOwnedServiceInstanceByClientIdMethod) == null) { synchronized (ServiceLifecycleGrpc.class) { if ((getHasOwnedServiceInstanceByClientIdMethod = ServiceLifecycleGrpc.getHasOwnedServiceInstanceByClientIdMethod) == null) { ServiceLifecycleGrpc.getHasOwnedServiceInstanceByClientIdMethod = getHasOwnedServiceInstanceByClientIdMethod = io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "HasOwnedServiceInstanceByClientId")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.getDefaultInstance())) .setSchemaDescriptor(new ServiceLifecycleMethodDescriptorSupplier("HasOwnedServiceInstanceByClientId")) .build(); } } } return getHasOwnedServiceInstanceByClientIdMethod; } private static volatile io.grpc.MethodDescriptor getHasOwnedServiceInstanceByInstanceIdMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "HasOwnedServiceInstanceByInstanceId", requestType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.class, responseType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor getHasOwnedServiceInstanceByInstanceIdMethod() { io.grpc.MethodDescriptor getHasOwnedServiceInstanceByInstanceIdMethod; if ((getHasOwnedServiceInstanceByInstanceIdMethod = ServiceLifecycleGrpc.getHasOwnedServiceInstanceByInstanceIdMethod) == null) { synchronized (ServiceLifecycleGrpc.class) { if ((getHasOwnedServiceInstanceByInstanceIdMethod = ServiceLifecycleGrpc.getHasOwnedServiceInstanceByInstanceIdMethod) == null) { ServiceLifecycleGrpc.getHasOwnedServiceInstanceByInstanceIdMethod = getHasOwnedServiceInstanceByInstanceIdMethod = io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "HasOwnedServiceInstanceByInstanceId")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.getDefaultInstance())) .setSchemaDescriptor(new ServiceLifecycleMethodDescriptorSupplier("HasOwnedServiceInstanceByInstanceId")) .build(); } } } return getHasOwnedServiceInstanceByInstanceIdMethod; } private static volatile io.grpc.MethodDescriptor getHasOwnedServiceClientMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "HasOwnedServiceClient", requestType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.class, responseType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor getHasOwnedServiceClientMethod() { io.grpc.MethodDescriptor getHasOwnedServiceClientMethod; if ((getHasOwnedServiceClientMethod = ServiceLifecycleGrpc.getHasOwnedServiceClientMethod) == null) { synchronized (ServiceLifecycleGrpc.class) { if ((getHasOwnedServiceClientMethod = ServiceLifecycleGrpc.getHasOwnedServiceClientMethod) == null) { ServiceLifecycleGrpc.getHasOwnedServiceClientMethod = getHasOwnedServiceClientMethod = io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "HasOwnedServiceClient")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.getDefaultInstance())) .setSchemaDescriptor(new ServiceLifecycleMethodDescriptorSupplier("HasOwnedServiceClient")) .build(); } } } return getHasOwnedServiceClientMethod; } private static volatile io.grpc.MethodDescriptor getCountRegisteredServiceMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "CountRegisteredService", requestType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest.class, responseType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor getCountRegisteredServiceMethod() { io.grpc.MethodDescriptor getCountRegisteredServiceMethod; if ((getCountRegisteredServiceMethod = ServiceLifecycleGrpc.getCountRegisteredServiceMethod) == null) { synchronized (ServiceLifecycleGrpc.class) { if ((getCountRegisteredServiceMethod = ServiceLifecycleGrpc.getCountRegisteredServiceMethod) == null) { ServiceLifecycleGrpc.getCountRegisteredServiceMethod = getCountRegisteredServiceMethod = io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "CountRegisteredService")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply.getDefaultInstance())) .setSchemaDescriptor(new ServiceLifecycleMethodDescriptorSupplier("CountRegisteredService")) .build(); } } } return getCountRegisteredServiceMethod; } /** * Creates a new async stub that supports all call types for the service */ public static ServiceLifecycleStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory factory = new io.grpc.stub.AbstractStub.StubFactory() { @java.lang.Override public ServiceLifecycleStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ServiceLifecycleStub(channel, callOptions); } }; return ServiceLifecycleStub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static ServiceLifecycleBlockingStub newBlockingStub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory factory = new io.grpc.stub.AbstractStub.StubFactory() { @java.lang.Override public ServiceLifecycleBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ServiceLifecycleBlockingStub(channel, callOptions); } }; return ServiceLifecycleBlockingStub.newStub(factory, channel); } /** * Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static ServiceLifecycleFutureStub newFutureStub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory factory = new io.grpc.stub.AbstractStub.StubFactory() { @java.lang.Override public ServiceLifecycleFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ServiceLifecycleFutureStub(channel, callOptions); } }; return ServiceLifecycleFutureStub.newStub(factory, channel); } /** */ public interface AsyncService { /** */ default void registerService(com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getRegisterServiceMethod(), responseObserver); } /** */ default void createInstanceMeta(com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getCreateInstanceMetaMethod(), responseObserver); } /** */ default void deregisterServiceByClientId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getDeregisterServiceByClientIdMethod(), responseObserver); } /** */ default void deregisterServiceByInstanceId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getDeregisterServiceByInstanceIdMethod(), responseObserver); } /** */ default void hasOwnedServiceByServiceId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getHasOwnedServiceByServiceIdMethod(), responseObserver); } /** */ default void hasOwnedServiceInstanceByClientId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getHasOwnedServiceInstanceByClientIdMethod(), responseObserver); } /** */ default void hasOwnedServiceInstanceByInstanceId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getHasOwnedServiceInstanceByInstanceIdMethod(), responseObserver); } /** */ default void hasOwnedServiceClient(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getHasOwnedServiceClientMethod(), responseObserver); } /** */ default void countRegisteredService(com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getCountRegisteredServiceMethod(), responseObserver); } } /** * Base class for the server implementation of the service ServiceLifecycle. */ public static abstract class ServiceLifecycleImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return ServiceLifecycleGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service ServiceLifecycle. */ public static final class ServiceLifecycleStub extends io.grpc.stub.AbstractAsyncStub { private ServiceLifecycleStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ServiceLifecycleStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ServiceLifecycleStub(channel, callOptions); } /** */ public void registerService(com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getRegisterServiceMethod(), getCallOptions()), request, responseObserver); } /** */ public void createInstanceMeta(com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getCreateInstanceMetaMethod(), getCallOptions()), request, responseObserver); } /** */ public void deregisterServiceByClientId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getDeregisterServiceByClientIdMethod(), getCallOptions()), request, responseObserver); } /** */ public void deregisterServiceByInstanceId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getDeregisterServiceByInstanceIdMethod(), getCallOptions()), request, responseObserver); } /** */ public void hasOwnedServiceByServiceId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getHasOwnedServiceByServiceIdMethod(), getCallOptions()), request, responseObserver); } /** */ public void hasOwnedServiceInstanceByClientId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getHasOwnedServiceInstanceByClientIdMethod(), getCallOptions()), request, responseObserver); } /** */ public void hasOwnedServiceInstanceByInstanceId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getHasOwnedServiceInstanceByInstanceIdMethod(), getCallOptions()), request, responseObserver); } /** */ public void hasOwnedServiceClient(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getHasOwnedServiceClientMethod(), getCallOptions()), request, responseObserver); } /** */ public void countRegisteredService(com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getCountRegisteredServiceMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service ServiceLifecycle. */ public static final class ServiceLifecycleBlockingStub extends io.grpc.stub.AbstractBlockingStub { private ServiceLifecycleBlockingStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ServiceLifecycleBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ServiceLifecycleBlockingStub(channel, callOptions); } /** */ public com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply registerService(com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getRegisterServiceMethod(), getCallOptions(), request); } /** */ public com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply createInstanceMeta(com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateInstanceMetaMethod(), getCallOptions(), request); } /** */ public com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply deregisterServiceByClientId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeregisterServiceByClientIdMethod(), getCallOptions(), request); } /** */ public com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply deregisterServiceByInstanceId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeregisterServiceByInstanceIdMethod(), getCallOptions(), request); } /** */ public com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply hasOwnedServiceByServiceId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getHasOwnedServiceByServiceIdMethod(), getCallOptions(), request); } /** */ public com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply hasOwnedServiceInstanceByClientId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getHasOwnedServiceInstanceByClientIdMethod(), getCallOptions(), request); } /** */ public com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply hasOwnedServiceInstanceByInstanceId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getHasOwnedServiceInstanceByInstanceIdMethod(), getCallOptions(), request); } /** */ public com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply hasOwnedServiceClient(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getHasOwnedServiceClientMethod(), getCallOptions(), request); } /** */ public com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply countRegisteredService(com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCountRegisteredServiceMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service ServiceLifecycle. */ public static final class ServiceLifecycleFutureStub extends io.grpc.stub.AbstractFutureStub { private ServiceLifecycleFutureStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ServiceLifecycleFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ServiceLifecycleFutureStub(channel, callOptions); } /** */ public com.google.common.util.concurrent.ListenableFuture registerService( com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getRegisterServiceMethod(), getCallOptions()), request); } /** */ public com.google.common.util.concurrent.ListenableFuture createInstanceMeta( com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getCreateInstanceMetaMethod(), getCallOptions()), request); } /** */ public com.google.common.util.concurrent.ListenableFuture deregisterServiceByClientId( com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getDeregisterServiceByClientIdMethod(), getCallOptions()), request); } /** */ public com.google.common.util.concurrent.ListenableFuture deregisterServiceByInstanceId( com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getDeregisterServiceByInstanceIdMethod(), getCallOptions()), request); } /** */ public com.google.common.util.concurrent.ListenableFuture hasOwnedServiceByServiceId( com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getHasOwnedServiceByServiceIdMethod(), getCallOptions()), request); } /** */ public com.google.common.util.concurrent.ListenableFuture hasOwnedServiceInstanceByClientId( com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getHasOwnedServiceInstanceByClientIdMethod(), getCallOptions()), request); } /** */ public com.google.common.util.concurrent.ListenableFuture hasOwnedServiceInstanceByInstanceId( com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getHasOwnedServiceInstanceByInstanceIdMethod(), getCallOptions()), request); } /** */ public com.google.common.util.concurrent.ListenableFuture hasOwnedServiceClient( com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getHasOwnedServiceClientMethod(), getCallOptions()), request); } /** */ public com.google.common.util.concurrent.ListenableFuture countRegisteredService( com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getCountRegisteredServiceMethod(), getCallOptions()), request); } } private static final int METHODID_REGISTER_SERVICE = 0; private static final int METHODID_CREATE_INSTANCE_META = 1; private static final int METHODID_DEREGISTER_SERVICE_BY_CLIENT_ID = 2; private static final int METHODID_DEREGISTER_SERVICE_BY_INSTANCE_ID = 3; private static final int METHODID_HAS_OWNED_SERVICE_BY_SERVICE_ID = 4; private static final int METHODID_HAS_OWNED_SERVICE_INSTANCE_BY_CLIENT_ID = 5; private static final int METHODID_HAS_OWNED_SERVICE_INSTANCE_BY_INSTANCE_ID = 6; private static final int METHODID_HAS_OWNED_SERVICE_CLIENT = 7; private static final int METHODID_COUNT_REGISTERED_SERVICE = 8; private static final class MethodHandlers implements io.grpc.stub.ServerCalls.UnaryMethod, io.grpc.stub.ServerCalls.ServerStreamingMethod, io.grpc.stub.ServerCalls.ClientStreamingMethod, io.grpc.stub.ServerCalls.BidiStreamingMethod { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver responseObserver) { switch (methodId) { case METHODID_REGISTER_SERVICE: serviceImpl.registerService((com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest) request, (io.grpc.stub.StreamObserver) responseObserver); break; case METHODID_CREATE_INSTANCE_META: serviceImpl.createInstanceMeta((com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest) request, (io.grpc.stub.StreamObserver) responseObserver); break; case METHODID_DEREGISTER_SERVICE_BY_CLIENT_ID: serviceImpl.deregisterServiceByClientId((com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest) request, (io.grpc.stub.StreamObserver) responseObserver); break; case METHODID_DEREGISTER_SERVICE_BY_INSTANCE_ID: serviceImpl.deregisterServiceByInstanceId((com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest) request, (io.grpc.stub.StreamObserver) responseObserver); break; case METHODID_HAS_OWNED_SERVICE_BY_SERVICE_ID: serviceImpl.hasOwnedServiceByServiceId((com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest) request, (io.grpc.stub.StreamObserver) responseObserver); break; case METHODID_HAS_OWNED_SERVICE_INSTANCE_BY_CLIENT_ID: serviceImpl.hasOwnedServiceInstanceByClientId((com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest) request, (io.grpc.stub.StreamObserver) responseObserver); break; case METHODID_HAS_OWNED_SERVICE_INSTANCE_BY_INSTANCE_ID: serviceImpl.hasOwnedServiceInstanceByInstanceId((com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest) request, (io.grpc.stub.StreamObserver) responseObserver); break; case METHODID_HAS_OWNED_SERVICE_CLIENT: serviceImpl.hasOwnedServiceClient((com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest) request, (io.grpc.stub.StreamObserver) responseObserver); break; case METHODID_COUNT_REGISTERED_SERVICE: serviceImpl.countRegisteredService((com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest) request, (io.grpc.stub.StreamObserver) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver invoke( io.grpc.stub.StreamObserver responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getRegisterServiceMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply>( service, METHODID_REGISTER_SERVICE))) .addMethod( getCreateInstanceMetaMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply>( service, METHODID_CREATE_INSTANCE_META))) .addMethod( getDeregisterServiceByClientIdMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply>( service, METHODID_DEREGISTER_SERVICE_BY_CLIENT_ID))) .addMethod( getDeregisterServiceByInstanceIdMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply>( service, METHODID_DEREGISTER_SERVICE_BY_INSTANCE_ID))) .addMethod( getHasOwnedServiceByServiceIdMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply>( service, METHODID_HAS_OWNED_SERVICE_BY_SERVICE_ID))) .addMethod( getHasOwnedServiceInstanceByClientIdMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply>( service, METHODID_HAS_OWNED_SERVICE_INSTANCE_BY_CLIENT_ID))) .addMethod( getHasOwnedServiceInstanceByInstanceIdMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply>( service, METHODID_HAS_OWNED_SERVICE_INSTANCE_BY_INSTANCE_ID))) .addMethod( getHasOwnedServiceClientMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply>( service, METHODID_HAS_OWNED_SERVICE_CLIENT))) .addMethod( getCountRegisteredServiceMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply>( service, METHODID_COUNT_REGISTERED_SERVICE))) .build(); } private static abstract class ServiceLifecycleBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { ServiceLifecycleBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("ServiceLifecycle"); } } private static final class ServiceLifecycleFileDescriptorSupplier extends ServiceLifecycleBaseDescriptorSupplier { ServiceLifecycleFileDescriptorSupplier() {} } private static final class ServiceLifecycleMethodDescriptorSupplier extends ServiceLifecycleBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; ServiceLifecycleMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (ServiceLifecycleGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new ServiceLifecycleFileDescriptorSupplier()) .addMethod(getRegisterServiceMethod()) .addMethod(getCreateInstanceMetaMethod()) .addMethod(getDeregisterServiceByClientIdMethod()) .addMethod(getDeregisterServiceByInstanceIdMethod()) .addMethod(getHasOwnedServiceByServiceIdMethod()) .addMethod(getHasOwnedServiceInstanceByClientIdMethod()) .addMethod(getHasOwnedServiceInstanceByInstanceIdMethod()) .addMethod(getHasOwnedServiceClientMethod()) .addMethod(getCountRegisteredServiceMethod()) .build(); } } } return result; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/ServiceLifecycleProto.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_lifecycle.proto package com.pinecone.hydra.service.registry.grpc.server.lifecycle; public final class ServiceLifecycleProto { private ServiceLifecycleProto() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistryLite registry) { } public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions( (com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_RegisterServiceRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_RegisterServiceRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_RegisterServiceReply_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_RegisterServiceReply_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_CreateInstanceMetaRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_CreateInstanceMetaRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_ClientIdRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_ClientIdRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_InstanceIdRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_InstanceIdRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_ServiceIdRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_ServiceIdRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_BoolReply_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_BoolReply_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_CountReply_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_CountReply_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_EmptyRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_EmptyRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_EmptyReply_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_EmptyReply_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\027service_lifecycle.proto\"O\n\026RegisterSer" + "viceRequest\022\020\n\010clientId\030\001 \001(\003\022\021\n\tservice" + "Id\030\002 \001(\t\022\020\n\010deployId\030\003 \001(\t\"*\n\024RegisterSe" + "rviceReply\022\022\n\ninstanceId\030\001 \001(\t\"1\n\031Create" + "InstanceMetaRequest\022\024\n\014instanceGuid\030\001 \001(" + "\t\"#\n\017ClientIdRequest\022\020\n\010clientId\030\001 \001(\003\"\'" + "\n\021InstanceIdRequest\022\022\n\ninstanceId\030\001 \001(\t\"" + "%\n\020ServiceIdRequest\022\021\n\tserviceId\030\001 \001(\t\"\032" + "\n\tBoolReply\022\r\n\005value\030\001 \001(\010\"\033\n\nCountReply" + "\022\r\n\005value\030\001 \001(\005\"\016\n\014EmptyRequest\"\014\n\nEmpty" + "Reply2\307\004\n\020ServiceLifecycle\022A\n\017RegisterSe" + "rvice\022\027.RegisterServiceRequest\032\025.Registe" + "rServiceReply\022<\n\022CreateInstanceMeta\022\032.Cr" + "eateInstanceMetaRequest\032\n.BoolReply\022<\n\033D" + "eregisterServiceByClientId\022\020.ClientIdReq" + "uest\032\013.EmptyReply\022@\n\035DeregisterServiceBy" + "InstanceId\022\022.InstanceIdRequest\032\013.EmptyRe" + "ply\022;\n\032HasOwnedServiceByServiceId\022\021.Serv" + "iceIdRequest\032\n.BoolReply\022A\n!HasOwnedServ" + "iceInstanceByClientId\022\020.ClientIdRequest\032" + "\n.BoolReply\022E\n#HasOwnedServiceInstanceBy" + "InstanceId\022\022.InstanceIdRequest\032\n.BoolRep" + "ly\0225\n\025HasOwnedServiceClient\022\020.ClientIdRe" + "quest\032\n.BoolReply\0224\n\026CountRegisteredServ" + "ice\022\r.EmptyRequest\032\013.CountReplyBT\n9com.p" + "inecone.hydra.service.registry.grpc.serv" + "er.lifecycleB\025ServiceLifecycleProtoP\001b\006p" + "roto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }); internal_static_RegisterServiceRequest_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_RegisterServiceRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_RegisterServiceRequest_descriptor, new java.lang.String[] { "ClientId", "ServiceId", "DeployId", }); internal_static_RegisterServiceReply_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_RegisterServiceReply_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_RegisterServiceReply_descriptor, new java.lang.String[] { "InstanceId", }); internal_static_CreateInstanceMetaRequest_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_CreateInstanceMetaRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_CreateInstanceMetaRequest_descriptor, new java.lang.String[] { "InstanceGuid", }); internal_static_ClientIdRequest_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_ClientIdRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_ClientIdRequest_descriptor, new java.lang.String[] { "ClientId", }); internal_static_InstanceIdRequest_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_InstanceIdRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_InstanceIdRequest_descriptor, new java.lang.String[] { "InstanceId", }); internal_static_ServiceIdRequest_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_ServiceIdRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_ServiceIdRequest_descriptor, new java.lang.String[] { "ServiceId", }); internal_static_BoolReply_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_BoolReply_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_BoolReply_descriptor, new java.lang.String[] { "Value", }); internal_static_CountReply_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_CountReply_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_CountReply_descriptor, new java.lang.String[] { "Value", }); internal_static_EmptyRequest_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_EmptyRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_EmptyRequest_descriptor, new java.lang.String[] { }); internal_static_EmptyReply_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_EmptyReply_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_EmptyReply_descriptor, new java.lang.String[] { }); } // @@protoc_insertion_point(outer_class_scope) } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ClientIdRequest.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_meta.proto package com.pinecone.hydra.service.registry.grpc.server.meta; /** * Protobuf type {@code pinecone.meta.ClientIdRequest} */ public final class ClientIdRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:pinecone.meta.ClientIdRequest) ClientIdRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ClientIdRequest.newBuilder() to construct. private ClientIdRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ClientIdRequest() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ClientIdRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ClientIdRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ClientIdRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest.Builder.class); } public static final int CLIENTID_FIELD_NUMBER = 1; private long clientId_ = 0L; /** * int64 clientId = 1; * @return The clientId. */ @java.lang.Override public long getClientId() { return clientId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (clientId_ != 0L) { output.writeInt64(1, clientId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (clientId_ != 0L) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(1, clientId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest)) { return super.equals(obj); } com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest other = (com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest) obj; if (getClientId() != other.getClientId()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + CLIENTID_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( getClientId()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code pinecone.meta.ClientIdRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:pinecone.meta.ClientIdRequest) com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ClientIdRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ClientIdRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest.Builder.class); } // Construct using com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; clientId_ = 0L; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ClientIdRequest_descriptor; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest getDefaultInstanceForType() { return com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest.getDefaultInstance(); } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest build() { com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest buildPartial() { com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest result = new com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.clientId_ = clientId_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest) { return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest other) { if (other == com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest.getDefaultInstance()) return this; if (other.getClientId() != 0L) { setClientId(other.getClientId()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { clientId_ = input.readInt64(); bitField0_ |= 0x00000001; break; } // case 8 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private long clientId_ ; /** * int64 clientId = 1; * @return The clientId. */ @java.lang.Override public long getClientId() { return clientId_; } /** * int64 clientId = 1; * @param value The clientId to set. * @return This builder for chaining. */ public Builder setClientId(long value) { clientId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * int64 clientId = 1; * @return This builder for chaining. */ public Builder clearClientId() { bitField0_ = (bitField0_ & ~0x00000001); clientId_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:pinecone.meta.ClientIdRequest) } // @@protoc_insertion_point(class_scope:pinecone.meta.ClientIdRequest) private static final com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest(); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public ClientIdRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ClientIdRequestOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_meta.proto package com.pinecone.hydra.service.registry.grpc.server.meta; public interface ClientIdRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:pinecone.meta.ClientIdRequest) com.google.protobuf.MessageOrBuilder { /** * int64 clientId = 1; * @return The clientId. */ long getClientId(); } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/CreateNewServiceRequest.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_meta.proto package com.pinecone.hydra.service.registry.grpc.server.meta; /** * Protobuf type {@code pinecone.meta.CreateNewServiceRequest} */ public final class CreateNewServiceRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:pinecone.meta.CreateNewServiceRequest) CreateNewServiceRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateNewServiceRequest.newBuilder() to construct. private CreateNewServiceRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private CreateNewServiceRequest() { parentAppPath_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CreateNewServiceRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_CreateNewServiceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_CreateNewServiceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest.Builder.class); } private int bitField0_; public static final int PARENTAPPPATH_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parentAppPath_ = ""; /** * string parentAppPath = 1; * @return The parentAppPath. */ @java.lang.Override public java.lang.String getParentAppPath() { java.lang.Object ref = parentAppPath_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parentAppPath_ = s; return s; } } /** * string parentAppPath = 1; * @return The bytes for parentAppPath. */ @java.lang.Override public com.google.protobuf.ByteString getParentAppPathBytes() { java.lang.Object ref = parentAppPath_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); parentAppPath_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int META_FIELD_NUMBER = 2; private com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO meta_; /** * .pinecone.meta.ServiceMetaDTO meta = 2; * @return Whether the meta field is set. */ @java.lang.Override public boolean hasMeta() { return ((bitField0_ & 0x00000001) != 0); } /** * .pinecone.meta.ServiceMetaDTO meta = 2; * @return The meta. */ @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getMeta() { return meta_ == null ? com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance() : meta_; } /** * .pinecone.meta.ServiceMetaDTO meta = 2; */ @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder getMetaOrBuilder() { return meta_ == null ? com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance() : meta_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parentAppPath_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parentAppPath_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getMeta()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parentAppPath_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parentAppPath_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getMeta()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest)) { return super.equals(obj); } com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest other = (com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest) obj; if (!getParentAppPath() .equals(other.getParentAppPath())) return false; if (hasMeta() != other.hasMeta()) return false; if (hasMeta()) { if (!getMeta() .equals(other.getMeta())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENTAPPPATH_FIELD_NUMBER; hash = (53 * hash) + getParentAppPath().hashCode(); if (hasMeta()) { hash = (37 * hash) + META_FIELD_NUMBER; hash = (53 * hash) + getMeta().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code pinecone.meta.CreateNewServiceRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:pinecone.meta.CreateNewServiceRequest) com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_CreateNewServiceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_CreateNewServiceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest.Builder.class); } // Construct using com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getMetaFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parentAppPath_ = ""; meta_ = null; if (metaBuilder_ != null) { metaBuilder_.dispose(); metaBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_CreateNewServiceRequest_descriptor; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest getDefaultInstanceForType() { return com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest.getDefaultInstance(); } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest build() { com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest buildPartial() { com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest result = new com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parentAppPath_ = parentAppPath_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.meta_ = metaBuilder_ == null ? meta_ : metaBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest) { return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest other) { if (other == com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest.getDefaultInstance()) return this; if (!other.getParentAppPath().isEmpty()) { parentAppPath_ = other.parentAppPath_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasMeta()) { mergeMeta(other.getMeta()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parentAppPath_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getMetaFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parentAppPath_ = ""; /** * string parentAppPath = 1; * @return The parentAppPath. */ public java.lang.String getParentAppPath() { java.lang.Object ref = parentAppPath_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parentAppPath_ = s; return s; } else { return (java.lang.String) ref; } } /** * string parentAppPath = 1; * @return The bytes for parentAppPath. */ public com.google.protobuf.ByteString getParentAppPathBytes() { java.lang.Object ref = parentAppPath_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); parentAppPath_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string parentAppPath = 1; * @param value The parentAppPath to set. * @return This builder for chaining. */ public Builder setParentAppPath( java.lang.String value) { if (value == null) { throw new NullPointerException(); } parentAppPath_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * string parentAppPath = 1; * @return This builder for chaining. */ public Builder clearParentAppPath() { parentAppPath_ = getDefaultInstance().getParentAppPath(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * string parentAppPath = 1; * @param value The bytes for parentAppPath to set. * @return This builder for chaining. */ public Builder setParentAppPathBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parentAppPath_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO meta_; private com.google.protobuf.SingleFieldBuilderV3< com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder> metaBuilder_; /** * .pinecone.meta.ServiceMetaDTO meta = 2; * @return Whether the meta field is set. */ public boolean hasMeta() { return ((bitField0_ & 0x00000002) != 0); } /** * .pinecone.meta.ServiceMetaDTO meta = 2; * @return The meta. */ public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getMeta() { if (metaBuilder_ == null) { return meta_ == null ? com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance() : meta_; } else { return metaBuilder_.getMessage(); } } /** * .pinecone.meta.ServiceMetaDTO meta = 2; */ public Builder setMeta(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO value) { if (metaBuilder_ == null) { if (value == null) { throw new NullPointerException(); } meta_ = value; } else { metaBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * .pinecone.meta.ServiceMetaDTO meta = 2; */ public Builder setMeta( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder builderForValue) { if (metaBuilder_ == null) { meta_ = builderForValue.build(); } else { metaBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * .pinecone.meta.ServiceMetaDTO meta = 2; */ public Builder mergeMeta(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO value) { if (metaBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && meta_ != null && meta_ != com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance()) { getMetaBuilder().mergeFrom(value); } else { meta_ = value; } } else { metaBuilder_.mergeFrom(value); } if (meta_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * .pinecone.meta.ServiceMetaDTO meta = 2; */ public Builder clearMeta() { bitField0_ = (bitField0_ & ~0x00000002); meta_ = null; if (metaBuilder_ != null) { metaBuilder_.dispose(); metaBuilder_ = null; } onChanged(); return this; } /** * .pinecone.meta.ServiceMetaDTO meta = 2; */ public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder getMetaBuilder() { bitField0_ |= 0x00000002; onChanged(); return getMetaFieldBuilder().getBuilder(); } /** * .pinecone.meta.ServiceMetaDTO meta = 2; */ public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder getMetaOrBuilder() { if (metaBuilder_ != null) { return metaBuilder_.getMessageOrBuilder(); } else { return meta_ == null ? com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance() : meta_; } } /** * .pinecone.meta.ServiceMetaDTO meta = 2; */ private com.google.protobuf.SingleFieldBuilderV3< com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder> getMetaFieldBuilder() { if (metaBuilder_ == null) { metaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder>( getMeta(), getParentForChildren(), isClean()); meta_ = null; } return metaBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:pinecone.meta.CreateNewServiceRequest) } // @@protoc_insertion_point(class_scope:pinecone.meta.CreateNewServiceRequest) private static final com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest(); } public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public CreateNewServiceRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/CreateNewServiceRequestOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_meta.proto package com.pinecone.hydra.service.registry.grpc.server.meta; public interface CreateNewServiceRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:pinecone.meta.CreateNewServiceRequest) com.google.protobuf.MessageOrBuilder { /** * string parentAppPath = 1; * @return The parentAppPath. */ java.lang.String getParentAppPath(); /** * string parentAppPath = 1; * @return The bytes for parentAppPath. */ com.google.protobuf.ByteString getParentAppPathBytes(); /** * .pinecone.meta.ServiceMetaDTO meta = 2; * @return Whether the meta field is set. */ boolean hasMeta(); /** * .pinecone.meta.ServiceMetaDTO meta = 2; * @return The meta. */ com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getMeta(); /** * .pinecone.meta.ServiceMetaDTO meta = 2; */ com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder getMetaOrBuilder(); } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/EvalRequest.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_meta.proto package com.pinecone.hydra.service.registry.grpc.server.meta; /** * Protobuf type {@code pinecone.meta.EvalRequest} */ public final class EvalRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:pinecone.meta.EvalRequest) EvalRequestOrBuilder { private static final long serialVersionUID = 0L; // Use EvalRequest.newBuilder() to construct. private EvalRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private EvalRequest() { jsonStatement_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new EvalRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_EvalRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_EvalRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest.Builder.class); } public static final int JSONSTATEMENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object jsonStatement_ = ""; /** * string jsonStatement = 1; * @return The jsonStatement. */ @java.lang.Override public java.lang.String getJsonStatement() { java.lang.Object ref = jsonStatement_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); jsonStatement_ = s; return s; } } /** * string jsonStatement = 1; * @return The bytes for jsonStatement. */ @java.lang.Override public com.google.protobuf.ByteString getJsonStatementBytes() { java.lang.Object ref = jsonStatement_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); jsonStatement_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(jsonStatement_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, jsonStatement_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(jsonStatement_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, jsonStatement_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest)) { return super.equals(obj); } com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest other = (com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest) obj; if (!getJsonStatement() .equals(other.getJsonStatement())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + JSONSTATEMENT_FIELD_NUMBER; hash = (53 * hash) + getJsonStatement().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code pinecone.meta.EvalRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:pinecone.meta.EvalRequest) com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_EvalRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_EvalRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest.Builder.class); } // Construct using com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; jsonStatement_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_EvalRequest_descriptor; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest getDefaultInstanceForType() { return com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest.getDefaultInstance(); } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest build() { com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest buildPartial() { com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest result = new com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.jsonStatement_ = jsonStatement_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest) { return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest other) { if (other == com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest.getDefaultInstance()) return this; if (!other.getJsonStatement().isEmpty()) { jsonStatement_ = other.jsonStatement_; bitField0_ |= 0x00000001; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { jsonStatement_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object jsonStatement_ = ""; /** * string jsonStatement = 1; * @return The jsonStatement. */ public java.lang.String getJsonStatement() { java.lang.Object ref = jsonStatement_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); jsonStatement_ = s; return s; } else { return (java.lang.String) ref; } } /** * string jsonStatement = 1; * @return The bytes for jsonStatement. */ public com.google.protobuf.ByteString getJsonStatementBytes() { java.lang.Object ref = jsonStatement_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); jsonStatement_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string jsonStatement = 1; * @param value The jsonStatement to set. * @return This builder for chaining. */ public Builder setJsonStatement( java.lang.String value) { if (value == null) { throw new NullPointerException(); } jsonStatement_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * string jsonStatement = 1; * @return This builder for chaining. */ public Builder clearJsonStatement() { jsonStatement_ = getDefaultInstance().getJsonStatement(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * string jsonStatement = 1; * @param value The bytes for jsonStatement to set. * @return This builder for chaining. */ public Builder setJsonStatementBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); jsonStatement_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:pinecone.meta.EvalRequest) } // @@protoc_insertion_point(class_scope:pinecone.meta.EvalRequest) private static final com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest(); } public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public EvalRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/EvalRequestOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_meta.proto package com.pinecone.hydra.service.registry.grpc.server.meta; public interface EvalRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:pinecone.meta.EvalRequest) com.google.protobuf.MessageOrBuilder { /** * string jsonStatement = 1; * @return The jsonStatement. */ java.lang.String getJsonStatement(); /** * string jsonStatement = 1; * @return The bytes for jsonStatement. */ com.google.protobuf.ByteString getJsonStatementBytes(); } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/GuidRequest.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_meta.proto package com.pinecone.hydra.service.registry.grpc.server.meta; /** * Protobuf type {@code pinecone.meta.GuidRequest} */ public final class GuidRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:pinecone.meta.GuidRequest) GuidRequestOrBuilder { private static final long serialVersionUID = 0L; // Use GuidRequest.newBuilder() to construct. private GuidRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GuidRequest() { guid_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new GuidRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_GuidRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_GuidRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest.Builder.class); } public static final int GUID_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object guid_ = ""; /** * string guid = 1; * @return The guid. */ @java.lang.Override public java.lang.String getGuid() { java.lang.Object ref = guid_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); guid_ = s; return s; } } /** * string guid = 1; * @return The bytes for guid. */ @java.lang.Override public com.google.protobuf.ByteString getGuidBytes() { java.lang.Object ref = guid_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); guid_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(guid_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, guid_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(guid_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, guid_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest)) { return super.equals(obj); } com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest other = (com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest) obj; if (!getGuid() .equals(other.getGuid())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + GUID_FIELD_NUMBER; hash = (53 * hash) + getGuid().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code pinecone.meta.GuidRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:pinecone.meta.GuidRequest) com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_GuidRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_GuidRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest.Builder.class); } // Construct using com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; guid_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_GuidRequest_descriptor; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest getDefaultInstanceForType() { return com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest.getDefaultInstance(); } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest build() { com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest buildPartial() { com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest result = new com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.guid_ = guid_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest) { return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest other) { if (other == com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest.getDefaultInstance()) return this; if (!other.getGuid().isEmpty()) { guid_ = other.guid_; bitField0_ |= 0x00000001; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { guid_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object guid_ = ""; /** * string guid = 1; * @return The guid. */ public java.lang.String getGuid() { java.lang.Object ref = guid_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); guid_ = s; return s; } else { return (java.lang.String) ref; } } /** * string guid = 1; * @return The bytes for guid. */ public com.google.protobuf.ByteString getGuidBytes() { java.lang.Object ref = guid_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); guid_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string guid = 1; * @param value The guid to set. * @return This builder for chaining. */ public Builder setGuid( java.lang.String value) { if (value == null) { throw new NullPointerException(); } guid_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * string guid = 1; * @return This builder for chaining. */ public Builder clearGuid() { guid_ = getDefaultInstance().getGuid(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * string guid = 1; * @param value The bytes for guid to set. * @return This builder for chaining. */ public Builder setGuidBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); guid_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:pinecone.meta.GuidRequest) } // @@protoc_insertion_point(class_scope:pinecone.meta.GuidRequest) private static final com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest(); } public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public GuidRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/GuidRequestOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_meta.proto package com.pinecone.hydra.service.registry.grpc.server.meta; public interface GuidRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:pinecone.meta.GuidRequest) com.google.protobuf.MessageOrBuilder { /** * string guid = 1; * @return The guid. */ java.lang.String getGuid(); /** * string guid = 1; * @return The bytes for guid. */ com.google.protobuf.ByteString getGuidBytes(); } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/PathRequest.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_meta.proto package com.pinecone.hydra.service.registry.grpc.server.meta; /** * Protobuf type {@code pinecone.meta.PathRequest} */ public final class PathRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:pinecone.meta.PathRequest) PathRequestOrBuilder { private static final long serialVersionUID = 0L; // Use PathRequest.newBuilder() to construct. private PathRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private PathRequest() { path_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new PathRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_PathRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_PathRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest.Builder.class); } public static final int PATH_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object path_ = ""; /** * string path = 1; * @return The path. */ @java.lang.Override public java.lang.String getPath() { java.lang.Object ref = path_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); path_ = s; return s; } } /** * string path = 1; * @return The bytes for path. */ @java.lang.Override public com.google.protobuf.ByteString getPathBytes() { java.lang.Object ref = path_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); path_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(path_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, path_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(path_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, path_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest)) { return super.equals(obj); } com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest other = (com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest) obj; if (!getPath() .equals(other.getPath())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PATH_FIELD_NUMBER; hash = (53 * hash) + getPath().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code pinecone.meta.PathRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:pinecone.meta.PathRequest) com.pinecone.hydra.service.registry.grpc.server.meta.PathRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_PathRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_PathRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest.Builder.class); } // Construct using com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; path_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_PathRequest_descriptor; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest getDefaultInstanceForType() { return com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest.getDefaultInstance(); } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest build() { com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest buildPartial() { com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest result = new com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.path_ = path_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest) { return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest other) { if (other == com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest.getDefaultInstance()) return this; if (!other.getPath().isEmpty()) { path_ = other.path_; bitField0_ |= 0x00000001; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { path_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object path_ = ""; /** * string path = 1; * @return The path. */ public java.lang.String getPath() { java.lang.Object ref = path_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); path_ = s; return s; } else { return (java.lang.String) ref; } } /** * string path = 1; * @return The bytes for path. */ public com.google.protobuf.ByteString getPathBytes() { java.lang.Object ref = path_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); path_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string path = 1; * @param value The path to set. * @return This builder for chaining. */ public Builder setPath( java.lang.String value) { if (value == null) { throw new NullPointerException(); } path_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * string path = 1; * @return This builder for chaining. */ public Builder clearPath() { path_ = getDefaultInstance().getPath(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * string path = 1; * @param value The bytes for path to set. * @return This builder for chaining. */ public Builder setPathBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); path_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:pinecone.meta.PathRequest) } // @@protoc_insertion_point(class_scope:pinecone.meta.PathRequest) private static final com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest(); } public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public PathRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/PathRequestOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_meta.proto package com.pinecone.hydra.service.registry.grpc.server.meta; public interface PathRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:pinecone.meta.PathRequest) com.google.protobuf.MessageOrBuilder { /** * string path = 1; * @return The path. */ java.lang.String getPath(); /** * string path = 1; * @return The bytes for path. */ com.google.protobuf.ByteString getPathBytes(); } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ServiceIdRequest.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_meta.proto package com.pinecone.hydra.service.registry.grpc.server.meta; /** * Protobuf type {@code pinecone.meta.ServiceIdRequest} */ public final class ServiceIdRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:pinecone.meta.ServiceIdRequest) ServiceIdRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ServiceIdRequest.newBuilder() to construct. private ServiceIdRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ServiceIdRequest() { serviceId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ServiceIdRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceIdRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceIdRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest.Builder.class); } public static final int SERVICEID_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object serviceId_ = ""; /** * string serviceId = 1; * @return The serviceId. */ @java.lang.Override public java.lang.String getServiceId() { java.lang.Object ref = serviceId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); serviceId_ = s; return s; } } /** * string serviceId = 1; * @return The bytes for serviceId. */ @java.lang.Override public com.google.protobuf.ByteString getServiceIdBytes() { java.lang.Object ref = serviceId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); serviceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, serviceId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, serviceId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest)) { return super.equals(obj); } com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest other = (com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest) obj; if (!getServiceId() .equals(other.getServiceId())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + SERVICEID_FIELD_NUMBER; hash = (53 * hash) + getServiceId().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code pinecone.meta.ServiceIdRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:pinecone.meta.ServiceIdRequest) com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceIdRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceIdRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest.Builder.class); } // Construct using com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; serviceId_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceIdRequest_descriptor; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest getDefaultInstanceForType() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest.getDefaultInstance(); } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest build() { com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest buildPartial() { com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest result = new com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.serviceId_ = serviceId_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest) { return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest other) { if (other == com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest.getDefaultInstance()) return this; if (!other.getServiceId().isEmpty()) { serviceId_ = other.serviceId_; bitField0_ |= 0x00000001; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { serviceId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object serviceId_ = ""; /** * string serviceId = 1; * @return The serviceId. */ public java.lang.String getServiceId() { java.lang.Object ref = serviceId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); serviceId_ = s; return s; } else { return (java.lang.String) ref; } } /** * string serviceId = 1; * @return The bytes for serviceId. */ public com.google.protobuf.ByteString getServiceIdBytes() { java.lang.Object ref = serviceId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); serviceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string serviceId = 1; * @param value The serviceId to set. * @return This builder for chaining. */ public Builder setServiceId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } serviceId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * string serviceId = 1; * @return This builder for chaining. */ public Builder clearServiceId() { serviceId_ = getDefaultInstance().getServiceId(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * string serviceId = 1; * @param value The bytes for serviceId to set. * @return This builder for chaining. */ public Builder setServiceIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); serviceId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:pinecone.meta.ServiceIdRequest) } // @@protoc_insertion_point(class_scope:pinecone.meta.ServiceIdRequest) private static final com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest(); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public ServiceIdRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ServiceIdRequestOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_meta.proto package com.pinecone.hydra.service.registry.grpc.server.meta; public interface ServiceIdRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:pinecone.meta.ServiceIdRequest) com.google.protobuf.MessageOrBuilder { /** * string serviceId = 1; * @return The serviceId. */ java.lang.String getServiceId(); /** * string serviceId = 1; * @return The bytes for serviceId. */ com.google.protobuf.ByteString getServiceIdBytes(); } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ServiceMetaDTO.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_meta.proto package com.pinecone.hydra.service.registry.grpc.server.meta; /** * Protobuf type {@code pinecone.meta.ServiceMetaDTO} */ public final class ServiceMetaDTO extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:pinecone.meta.ServiceMetaDTO) ServiceMetaDTOOrBuilder { private static final long serialVersionUID = 0L; // Use ServiceMetaDTO.newBuilder() to construct. private ServiceMetaDTO(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ServiceMetaDTO() { guid_ = ""; name_ = ""; type_ = ""; displayName_ = ""; description_ = ""; fullName_ = ""; groupNamespace_ = ""; groupName_ = ""; scenario_ = ""; primaryImplLang_ = ""; extraInformation_ = ""; level_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ServiceMetaDTO(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTO_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTO_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.class, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder.class); } public static final int GUID_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object guid_ = ""; /** * string guid = 1; * @return The guid. */ @java.lang.Override public java.lang.String getGuid() { java.lang.Object ref = guid_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); guid_ = s; return s; } } /** * string guid = 1; * @return The bytes for guid. */ @java.lang.Override public com.google.protobuf.ByteString getGuidBytes() { java.lang.Object ref = guid_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); guid_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int NAME_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * string name = 2; * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * string name = 2; * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int TYPE_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object type_ = ""; /** * string type = 3; * @return The type. */ @java.lang.Override public java.lang.String getType() { java.lang.Object ref = type_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); type_ = s; return s; } } /** * string type = 3; * @return The bytes for type. */ @java.lang.Override public com.google.protobuf.ByteString getTypeBytes() { java.lang.Object ref = type_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); type_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int DISPLAYNAME_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object displayName_ = ""; /** * string displayName = 4; * @return The displayName. */ @java.lang.Override public java.lang.String getDisplayName() { java.lang.Object ref = displayName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); displayName_ = s; return s; } } /** * string displayName = 4; * @return The bytes for displayName. */ @java.lang.Override public com.google.protobuf.ByteString getDisplayNameBytes() { java.lang.Object ref = displayName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); displayName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int DESCRIPTION_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object description_ = ""; /** * string description = 5; * @return The description. */ @java.lang.Override public java.lang.String getDescription() { java.lang.Object ref = description_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); description_ = s; return s; } } /** * string description = 5; * @return The bytes for description. */ @java.lang.Override public com.google.protobuf.ByteString getDescriptionBytes() { java.lang.Object ref = description_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); description_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FULLNAME_FIELD_NUMBER = 6; @SuppressWarnings("serial") private volatile java.lang.Object fullName_ = ""; /** * string fullName = 6; * @return The fullName. */ @java.lang.Override public java.lang.String getFullName() { java.lang.Object ref = fullName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); fullName_ = s; return s; } } /** * string fullName = 6; * @return The bytes for fullName. */ @java.lang.Override public com.google.protobuf.ByteString getFullNameBytes() { java.lang.Object ref = fullName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); fullName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int GROUPNAMESPACE_FIELD_NUMBER = 7; @SuppressWarnings("serial") private volatile java.lang.Object groupNamespace_ = ""; /** * string groupNamespace = 7; * @return The groupNamespace. */ @java.lang.Override public java.lang.String getGroupNamespace() { java.lang.Object ref = groupNamespace_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); groupNamespace_ = s; return s; } } /** * string groupNamespace = 7; * @return The bytes for groupNamespace. */ @java.lang.Override public com.google.protobuf.ByteString getGroupNamespaceBytes() { java.lang.Object ref = groupNamespace_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); groupNamespace_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int GROUPNAME_FIELD_NUMBER = 8; @SuppressWarnings("serial") private volatile java.lang.Object groupName_ = ""; /** * string groupName = 8; * @return The groupName. */ @java.lang.Override public java.lang.String getGroupName() { java.lang.Object ref = groupName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); groupName_ = s; return s; } } /** * string groupName = 8; * @return The bytes for groupName. */ @java.lang.Override public com.google.protobuf.ByteString getGroupNameBytes() { java.lang.Object ref = groupName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); groupName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SCENARIO_FIELD_NUMBER = 9; @SuppressWarnings("serial") private volatile java.lang.Object scenario_ = ""; /** * string scenario = 9; * @return The scenario. */ @java.lang.Override public java.lang.String getScenario() { java.lang.Object ref = scenario_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); scenario_ = s; return s; } } /** * string scenario = 9; * @return The bytes for scenario. */ @java.lang.Override public com.google.protobuf.ByteString getScenarioBytes() { java.lang.Object ref = scenario_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); scenario_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PRIMARYIMPLLANG_FIELD_NUMBER = 10; @SuppressWarnings("serial") private volatile java.lang.Object primaryImplLang_ = ""; /** * string primaryImplLang = 10; * @return The primaryImplLang. */ @java.lang.Override public java.lang.String getPrimaryImplLang() { java.lang.Object ref = primaryImplLang_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); primaryImplLang_ = s; return s; } } /** * string primaryImplLang = 10; * @return The bytes for primaryImplLang. */ @java.lang.Override public com.google.protobuf.ByteString getPrimaryImplLangBytes() { java.lang.Object ref = primaryImplLang_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); primaryImplLang_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int EXTRAINFORMATION_FIELD_NUMBER = 11; @SuppressWarnings("serial") private volatile java.lang.Object extraInformation_ = ""; /** * string extraInformation = 11; * @return The extraInformation. */ @java.lang.Override public java.lang.String getExtraInformation() { java.lang.Object ref = extraInformation_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); extraInformation_ = s; return s; } } /** * string extraInformation = 11; * @return The bytes for extraInformation. */ @java.lang.Override public com.google.protobuf.ByteString getExtraInformationBytes() { java.lang.Object ref = extraInformation_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); extraInformation_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int LEVEL_FIELD_NUMBER = 12; @SuppressWarnings("serial") private volatile java.lang.Object level_ = ""; /** * string level = 12; * @return The level. */ @java.lang.Override public java.lang.String getLevel() { java.lang.Object ref = level_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); level_ = s; return s; } } /** * string level = 12; * @return The bytes for level. */ @java.lang.Override public com.google.protobuf.ByteString getLevelBytes() { java.lang.Object ref = level_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); level_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(guid_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, guid_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, type_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, displayName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(description_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, description_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(fullName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, fullName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(groupNamespace_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 7, groupNamespace_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(groupName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 8, groupName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(scenario_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 9, scenario_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(primaryImplLang_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 10, primaryImplLang_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(extraInformation_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 11, extraInformation_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(level_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 12, level_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(guid_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, guid_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, type_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, displayName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(description_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, description_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(fullName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, fullName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(groupNamespace_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, groupNamespace_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(groupName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(8, groupName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(scenario_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(9, scenario_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(primaryImplLang_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, primaryImplLang_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(extraInformation_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(11, extraInformation_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(level_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(12, level_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO)) { return super.equals(obj); } com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO other = (com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO) obj; if (!getGuid() .equals(other.getGuid())) return false; if (!getName() .equals(other.getName())) return false; if (!getType() .equals(other.getType())) return false; if (!getDisplayName() .equals(other.getDisplayName())) return false; if (!getDescription() .equals(other.getDescription())) return false; if (!getFullName() .equals(other.getFullName())) return false; if (!getGroupNamespace() .equals(other.getGroupNamespace())) return false; if (!getGroupName() .equals(other.getGroupName())) return false; if (!getScenario() .equals(other.getScenario())) return false; if (!getPrimaryImplLang() .equals(other.getPrimaryImplLang())) return false; if (!getExtraInformation() .equals(other.getExtraInformation())) return false; if (!getLevel() .equals(other.getLevel())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + GUID_FIELD_NUMBER; hash = (53 * hash) + getGuid().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + TYPE_FIELD_NUMBER; hash = (53 * hash) + getType().hashCode(); hash = (37 * hash) + DISPLAYNAME_FIELD_NUMBER; hash = (53 * hash) + getDisplayName().hashCode(); hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER; hash = (53 * hash) + getDescription().hashCode(); hash = (37 * hash) + FULLNAME_FIELD_NUMBER; hash = (53 * hash) + getFullName().hashCode(); hash = (37 * hash) + GROUPNAMESPACE_FIELD_NUMBER; hash = (53 * hash) + getGroupNamespace().hashCode(); hash = (37 * hash) + GROUPNAME_FIELD_NUMBER; hash = (53 * hash) + getGroupName().hashCode(); hash = (37 * hash) + SCENARIO_FIELD_NUMBER; hash = (53 * hash) + getScenario().hashCode(); hash = (37 * hash) + PRIMARYIMPLLANG_FIELD_NUMBER; hash = (53 * hash) + getPrimaryImplLang().hashCode(); hash = (37 * hash) + EXTRAINFORMATION_FIELD_NUMBER; hash = (53 * hash) + getExtraInformation().hashCode(); hash = (37 * hash) + LEVEL_FIELD_NUMBER; hash = (53 * hash) + getLevel().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code pinecone.meta.ServiceMetaDTO} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:pinecone.meta.ServiceMetaDTO) com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTO_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTO_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.class, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder.class); } // Construct using com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; guid_ = ""; name_ = ""; type_ = ""; displayName_ = ""; description_ = ""; fullName_ = ""; groupNamespace_ = ""; groupName_ = ""; scenario_ = ""; primaryImplLang_ = ""; extraInformation_ = ""; level_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTO_descriptor; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getDefaultInstanceForType() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance(); } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO build() { com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO buildPartial() { com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO result = new com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.guid_ = guid_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.type_ = type_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.displayName_ = displayName_; } if (((from_bitField0_ & 0x00000010) != 0)) { result.description_ = description_; } if (((from_bitField0_ & 0x00000020) != 0)) { result.fullName_ = fullName_; } if (((from_bitField0_ & 0x00000040) != 0)) { result.groupNamespace_ = groupNamespace_; } if (((from_bitField0_ & 0x00000080) != 0)) { result.groupName_ = groupName_; } if (((from_bitField0_ & 0x00000100) != 0)) { result.scenario_ = scenario_; } if (((from_bitField0_ & 0x00000200) != 0)) { result.primaryImplLang_ = primaryImplLang_; } if (((from_bitField0_ & 0x00000400) != 0)) { result.extraInformation_ = extraInformation_; } if (((from_bitField0_ & 0x00000800) != 0)) { result.level_ = level_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO) { return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO other) { if (other == com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance()) return this; if (!other.getGuid().isEmpty()) { guid_ = other.guid_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getType().isEmpty()) { type_ = other.type_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getDisplayName().isEmpty()) { displayName_ = other.displayName_; bitField0_ |= 0x00000008; onChanged(); } if (!other.getDescription().isEmpty()) { description_ = other.description_; bitField0_ |= 0x00000010; onChanged(); } if (!other.getFullName().isEmpty()) { fullName_ = other.fullName_; bitField0_ |= 0x00000020; onChanged(); } if (!other.getGroupNamespace().isEmpty()) { groupNamespace_ = other.groupNamespace_; bitField0_ |= 0x00000040; onChanged(); } if (!other.getGroupName().isEmpty()) { groupName_ = other.groupName_; bitField0_ |= 0x00000080; onChanged(); } if (!other.getScenario().isEmpty()) { scenario_ = other.scenario_; bitField0_ |= 0x00000100; onChanged(); } if (!other.getPrimaryImplLang().isEmpty()) { primaryImplLang_ = other.primaryImplLang_; bitField0_ |= 0x00000200; onChanged(); } if (!other.getExtraInformation().isEmpty()) { extraInformation_ = other.extraInformation_; bitField0_ |= 0x00000400; onChanged(); } if (!other.getLevel().isEmpty()) { level_ = other.level_; bitField0_ |= 0x00000800; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { guid_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { type_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { displayName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 case 42: { description_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000010; break; } // case 42 case 50: { fullName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000020; break; } // case 50 case 58: { groupNamespace_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000040; break; } // case 58 case 66: { groupName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000080; break; } // case 66 case 74: { scenario_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000100; break; } // case 74 case 82: { primaryImplLang_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000200; break; } // case 82 case 90: { extraInformation_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000400; break; } // case 90 case 98: { level_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000800; break; } // case 98 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object guid_ = ""; /** * string guid = 1; * @return The guid. */ public java.lang.String getGuid() { java.lang.Object ref = guid_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); guid_ = s; return s; } else { return (java.lang.String) ref; } } /** * string guid = 1; * @return The bytes for guid. */ public com.google.protobuf.ByteString getGuidBytes() { java.lang.Object ref = guid_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); guid_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string guid = 1; * @param value The guid to set. * @return This builder for chaining. */ public Builder setGuid( java.lang.String value) { if (value == null) { throw new NullPointerException(); } guid_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * string guid = 1; * @return This builder for chaining. */ public Builder clearGuid() { guid_ = getDefaultInstance().getGuid(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * string guid = 1; * @param value The bytes for guid to set. * @return This builder for chaining. */ public Builder setGuidBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); guid_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object name_ = ""; /** * string name = 2; * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * string name = 2; * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string name = 2; * @param value The name to set. * @return This builder for chaining. */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * string name = 2; * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * string name = 2; * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object type_ = ""; /** * string type = 3; * @return The type. */ public java.lang.String getType() { java.lang.Object ref = type_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); type_ = s; return s; } else { return (java.lang.String) ref; } } /** * string type = 3; * @return The bytes for type. */ public com.google.protobuf.ByteString getTypeBytes() { java.lang.Object ref = type_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); type_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string type = 3; * @param value The type to set. * @return This builder for chaining. */ public Builder setType( java.lang.String value) { if (value == null) { throw new NullPointerException(); } type_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * string type = 3; * @return This builder for chaining. */ public Builder clearType() { type_ = getDefaultInstance().getType(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * string type = 3; * @param value The bytes for type to set. * @return This builder for chaining. */ public Builder setTypeBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); type_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object displayName_ = ""; /** * string displayName = 4; * @return The displayName. */ public java.lang.String getDisplayName() { java.lang.Object ref = displayName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); displayName_ = s; return s; } else { return (java.lang.String) ref; } } /** * string displayName = 4; * @return The bytes for displayName. */ public com.google.protobuf.ByteString getDisplayNameBytes() { java.lang.Object ref = displayName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); displayName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string displayName = 4; * @param value The displayName to set. * @return This builder for chaining. */ public Builder setDisplayName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } displayName_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * string displayName = 4; * @return This builder for chaining. */ public Builder clearDisplayName() { displayName_ = getDefaultInstance().getDisplayName(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * string displayName = 4; * @param value The bytes for displayName to set. * @return This builder for chaining. */ public Builder setDisplayNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); displayName_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } private java.lang.Object description_ = ""; /** * string description = 5; * @return The description. */ public java.lang.String getDescription() { java.lang.Object ref = description_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); description_ = s; return s; } else { return (java.lang.String) ref; } } /** * string description = 5; * @return The bytes for description. */ public com.google.protobuf.ByteString getDescriptionBytes() { java.lang.Object ref = description_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); description_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string description = 5; * @param value The description to set. * @return This builder for chaining. */ public Builder setDescription( java.lang.String value) { if (value == null) { throw new NullPointerException(); } description_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * string description = 5; * @return This builder for chaining. */ public Builder clearDescription() { description_ = getDefaultInstance().getDescription(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } /** * string description = 5; * @param value The bytes for description to set. * @return This builder for chaining. */ public Builder setDescriptionBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); description_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } private java.lang.Object fullName_ = ""; /** * string fullName = 6; * @return The fullName. */ public java.lang.String getFullName() { java.lang.Object ref = fullName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); fullName_ = s; return s; } else { return (java.lang.String) ref; } } /** * string fullName = 6; * @return The bytes for fullName. */ public com.google.protobuf.ByteString getFullNameBytes() { java.lang.Object ref = fullName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); fullName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string fullName = 6; * @param value The fullName to set. * @return This builder for chaining. */ public Builder setFullName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } fullName_ = value; bitField0_ |= 0x00000020; onChanged(); return this; } /** * string fullName = 6; * @return This builder for chaining. */ public Builder clearFullName() { fullName_ = getDefaultInstance().getFullName(); bitField0_ = (bitField0_ & ~0x00000020); onChanged(); return this; } /** * string fullName = 6; * @param value The bytes for fullName to set. * @return This builder for chaining. */ public Builder setFullNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); fullName_ = value; bitField0_ |= 0x00000020; onChanged(); return this; } private java.lang.Object groupNamespace_ = ""; /** * string groupNamespace = 7; * @return The groupNamespace. */ public java.lang.String getGroupNamespace() { java.lang.Object ref = groupNamespace_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); groupNamespace_ = s; return s; } else { return (java.lang.String) ref; } } /** * string groupNamespace = 7; * @return The bytes for groupNamespace. */ public com.google.protobuf.ByteString getGroupNamespaceBytes() { java.lang.Object ref = groupNamespace_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); groupNamespace_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string groupNamespace = 7; * @param value The groupNamespace to set. * @return This builder for chaining. */ public Builder setGroupNamespace( java.lang.String value) { if (value == null) { throw new NullPointerException(); } groupNamespace_ = value; bitField0_ |= 0x00000040; onChanged(); return this; } /** * string groupNamespace = 7; * @return This builder for chaining. */ public Builder clearGroupNamespace() { groupNamespace_ = getDefaultInstance().getGroupNamespace(); bitField0_ = (bitField0_ & ~0x00000040); onChanged(); return this; } /** * string groupNamespace = 7; * @param value The bytes for groupNamespace to set. * @return This builder for chaining. */ public Builder setGroupNamespaceBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); groupNamespace_ = value; bitField0_ |= 0x00000040; onChanged(); return this; } private java.lang.Object groupName_ = ""; /** * string groupName = 8; * @return The groupName. */ public java.lang.String getGroupName() { java.lang.Object ref = groupName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); groupName_ = s; return s; } else { return (java.lang.String) ref; } } /** * string groupName = 8; * @return The bytes for groupName. */ public com.google.protobuf.ByteString getGroupNameBytes() { java.lang.Object ref = groupName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); groupName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string groupName = 8; * @param value The groupName to set. * @return This builder for chaining. */ public Builder setGroupName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } groupName_ = value; bitField0_ |= 0x00000080; onChanged(); return this; } /** * string groupName = 8; * @return This builder for chaining. */ public Builder clearGroupName() { groupName_ = getDefaultInstance().getGroupName(); bitField0_ = (bitField0_ & ~0x00000080); onChanged(); return this; } /** * string groupName = 8; * @param value The bytes for groupName to set. * @return This builder for chaining. */ public Builder setGroupNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); groupName_ = value; bitField0_ |= 0x00000080; onChanged(); return this; } private java.lang.Object scenario_ = ""; /** * string scenario = 9; * @return The scenario. */ public java.lang.String getScenario() { java.lang.Object ref = scenario_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); scenario_ = s; return s; } else { return (java.lang.String) ref; } } /** * string scenario = 9; * @return The bytes for scenario. */ public com.google.protobuf.ByteString getScenarioBytes() { java.lang.Object ref = scenario_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); scenario_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string scenario = 9; * @param value The scenario to set. * @return This builder for chaining. */ public Builder setScenario( java.lang.String value) { if (value == null) { throw new NullPointerException(); } scenario_ = value; bitField0_ |= 0x00000100; onChanged(); return this; } /** * string scenario = 9; * @return This builder for chaining. */ public Builder clearScenario() { scenario_ = getDefaultInstance().getScenario(); bitField0_ = (bitField0_ & ~0x00000100); onChanged(); return this; } /** * string scenario = 9; * @param value The bytes for scenario to set. * @return This builder for chaining. */ public Builder setScenarioBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); scenario_ = value; bitField0_ |= 0x00000100; onChanged(); return this; } private java.lang.Object primaryImplLang_ = ""; /** * string primaryImplLang = 10; * @return The primaryImplLang. */ public java.lang.String getPrimaryImplLang() { java.lang.Object ref = primaryImplLang_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); primaryImplLang_ = s; return s; } else { return (java.lang.String) ref; } } /** * string primaryImplLang = 10; * @return The bytes for primaryImplLang. */ public com.google.protobuf.ByteString getPrimaryImplLangBytes() { java.lang.Object ref = primaryImplLang_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); primaryImplLang_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string primaryImplLang = 10; * @param value The primaryImplLang to set. * @return This builder for chaining. */ public Builder setPrimaryImplLang( java.lang.String value) { if (value == null) { throw new NullPointerException(); } primaryImplLang_ = value; bitField0_ |= 0x00000200; onChanged(); return this; } /** * string primaryImplLang = 10; * @return This builder for chaining. */ public Builder clearPrimaryImplLang() { primaryImplLang_ = getDefaultInstance().getPrimaryImplLang(); bitField0_ = (bitField0_ & ~0x00000200); onChanged(); return this; } /** * string primaryImplLang = 10; * @param value The bytes for primaryImplLang to set. * @return This builder for chaining. */ public Builder setPrimaryImplLangBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); primaryImplLang_ = value; bitField0_ |= 0x00000200; onChanged(); return this; } private java.lang.Object extraInformation_ = ""; /** * string extraInformation = 11; * @return The extraInformation. */ public java.lang.String getExtraInformation() { java.lang.Object ref = extraInformation_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); extraInformation_ = s; return s; } else { return (java.lang.String) ref; } } /** * string extraInformation = 11; * @return The bytes for extraInformation. */ public com.google.protobuf.ByteString getExtraInformationBytes() { java.lang.Object ref = extraInformation_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); extraInformation_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string extraInformation = 11; * @param value The extraInformation to set. * @return This builder for chaining. */ public Builder setExtraInformation( java.lang.String value) { if (value == null) { throw new NullPointerException(); } extraInformation_ = value; bitField0_ |= 0x00000400; onChanged(); return this; } /** * string extraInformation = 11; * @return This builder for chaining. */ public Builder clearExtraInformation() { extraInformation_ = getDefaultInstance().getExtraInformation(); bitField0_ = (bitField0_ & ~0x00000400); onChanged(); return this; } /** * string extraInformation = 11; * @param value The bytes for extraInformation to set. * @return This builder for chaining. */ public Builder setExtraInformationBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); extraInformation_ = value; bitField0_ |= 0x00000400; onChanged(); return this; } private java.lang.Object level_ = ""; /** * string level = 12; * @return The level. */ public java.lang.String getLevel() { java.lang.Object ref = level_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); level_ = s; return s; } else { return (java.lang.String) ref; } } /** * string level = 12; * @return The bytes for level. */ public com.google.protobuf.ByteString getLevelBytes() { java.lang.Object ref = level_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); level_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string level = 12; * @param value The level to set. * @return This builder for chaining. */ public Builder setLevel( java.lang.String value) { if (value == null) { throw new NullPointerException(); } level_ = value; bitField0_ |= 0x00000800; onChanged(); return this; } /** * string level = 12; * @return This builder for chaining. */ public Builder clearLevel() { level_ = getDefaultInstance().getLevel(); bitField0_ = (bitField0_ & ~0x00000800); onChanged(); return this; } /** * string level = 12; * @param value The bytes for level to set. * @return This builder for chaining. */ public Builder setLevelBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); level_ = value; bitField0_ |= 0x00000800; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:pinecone.meta.ServiceMetaDTO) } // @@protoc_insertion_point(class_scope:pinecone.meta.ServiceMetaDTO) private static final com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO(); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public ServiceMetaDTO parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ServiceMetaDTOListReply.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_meta.proto package com.pinecone.hydra.service.registry.grpc.server.meta; /** * Protobuf type {@code pinecone.meta.ServiceMetaDTOListReply} */ public final class ServiceMetaDTOListReply extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:pinecone.meta.ServiceMetaDTOListReply) ServiceMetaDTOListReplyOrBuilder { private static final long serialVersionUID = 0L; // Use ServiceMetaDTOListReply.newBuilder() to construct. private ServiceMetaDTOListReply(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ServiceMetaDTOListReply() { metas_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ServiceMetaDTOListReply(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTOListReply_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTOListReply_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.class, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.Builder.class); } public static final int METAS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List metas_; /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ @java.lang.Override public java.util.List getMetasList() { return metas_; } /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ @java.lang.Override public java.util.List getMetasOrBuilderList() { return metas_; } /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ @java.lang.Override public int getMetasCount() { return metas_.size(); } /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getMetas(int index) { return metas_.get(index); } /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder getMetasOrBuilder( int index) { return metas_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < metas_.size(); i++) { output.writeMessage(1, metas_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < metas_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, metas_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply)) { return super.equals(obj); } com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply other = (com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply) obj; if (!getMetasList() .equals(other.getMetasList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getMetasCount() > 0) { hash = (37 * hash) + METAS_FIELD_NUMBER; hash = (53 * hash) + getMetasList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code pinecone.meta.ServiceMetaDTOListReply} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:pinecone.meta.ServiceMetaDTOListReply) com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReplyOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTOListReply_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTOListReply_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.class, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.Builder.class); } // Construct using com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (metasBuilder_ == null) { metas_ = java.util.Collections.emptyList(); } else { metas_ = null; metasBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTOListReply_descriptor; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply getDefaultInstanceForType() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.getDefaultInstance(); } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply build() { com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply buildPartial() { com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply result = new com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply result) { if (metasBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { metas_ = java.util.Collections.unmodifiableList(metas_); bitField0_ = (bitField0_ & ~0x00000001); } result.metas_ = metas_; } else { result.metas_ = metasBuilder_.build(); } } private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply) { return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply other) { if (other == com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.getDefaultInstance()) return this; if (metasBuilder_ == null) { if (!other.metas_.isEmpty()) { if (metas_.isEmpty()) { metas_ = other.metas_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureMetasIsMutable(); metas_.addAll(other.metas_); } onChanged(); } } else { if (!other.metas_.isEmpty()) { if (metasBuilder_.isEmpty()) { metasBuilder_.dispose(); metasBuilder_ = null; metas_ = other.metas_; bitField0_ = (bitField0_ & ~0x00000001); metasBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getMetasFieldBuilder() : null; } else { metasBuilder_.addAllMessages(other.metas_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO m = input.readMessage( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.parser(), extensionRegistry); if (metasBuilder_ == null) { ensureMetasIsMutable(); metas_.add(m); } else { metasBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List metas_ = java.util.Collections.emptyList(); private void ensureMetasIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { metas_ = new java.util.ArrayList(metas_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder> metasBuilder_; /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ public java.util.List getMetasList() { if (metasBuilder_ == null) { return java.util.Collections.unmodifiableList(metas_); } else { return metasBuilder_.getMessageList(); } } /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ public int getMetasCount() { if (metasBuilder_ == null) { return metas_.size(); } else { return metasBuilder_.getCount(); } } /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getMetas(int index) { if (metasBuilder_ == null) { return metas_.get(index); } else { return metasBuilder_.getMessage(index); } } /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ public Builder setMetas( int index, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO value) { if (metasBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMetasIsMutable(); metas_.set(index, value); onChanged(); } else { metasBuilder_.setMessage(index, value); } return this; } /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ public Builder setMetas( int index, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder builderForValue) { if (metasBuilder_ == null) { ensureMetasIsMutable(); metas_.set(index, builderForValue.build()); onChanged(); } else { metasBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ public Builder addMetas(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO value) { if (metasBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMetasIsMutable(); metas_.add(value); onChanged(); } else { metasBuilder_.addMessage(value); } return this; } /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ public Builder addMetas( int index, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO value) { if (metasBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMetasIsMutable(); metas_.add(index, value); onChanged(); } else { metasBuilder_.addMessage(index, value); } return this; } /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ public Builder addMetas( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder builderForValue) { if (metasBuilder_ == null) { ensureMetasIsMutable(); metas_.add(builderForValue.build()); onChanged(); } else { metasBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ public Builder addMetas( int index, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder builderForValue) { if (metasBuilder_ == null) { ensureMetasIsMutable(); metas_.add(index, builderForValue.build()); onChanged(); } else { metasBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ public Builder addAllMetas( java.lang.Iterable values) { if (metasBuilder_ == null) { ensureMetasIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, metas_); onChanged(); } else { metasBuilder_.addAllMessages(values); } return this; } /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ public Builder clearMetas() { if (metasBuilder_ == null) { metas_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { metasBuilder_.clear(); } return this; } /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ public Builder removeMetas(int index) { if (metasBuilder_ == null) { ensureMetasIsMutable(); metas_.remove(index); onChanged(); } else { metasBuilder_.remove(index); } return this; } /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder getMetasBuilder( int index) { return getMetasFieldBuilder().getBuilder(index); } /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder getMetasOrBuilder( int index) { if (metasBuilder_ == null) { return metas_.get(index); } else { return metasBuilder_.getMessageOrBuilder(index); } } /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ public java.util.List getMetasOrBuilderList() { if (metasBuilder_ != null) { return metasBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(metas_); } } /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder addMetasBuilder() { return getMetasFieldBuilder().addBuilder( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance()); } /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder addMetasBuilder( int index) { return getMetasFieldBuilder().addBuilder( index, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance()); } /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ public java.util.List getMetasBuilderList() { return getMetasFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder> getMetasFieldBuilder() { if (metasBuilder_ == null) { metasBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder>( metas_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); metas_ = null; } return metasBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:pinecone.meta.ServiceMetaDTOListReply) } // @@protoc_insertion_point(class_scope:pinecone.meta.ServiceMetaDTOListReply) private static final com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply(); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public ServiceMetaDTOListReply parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ServiceMetaDTOListReplyOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_meta.proto package com.pinecone.hydra.service.registry.grpc.server.meta; public interface ServiceMetaDTOListReplyOrBuilder extends // @@protoc_insertion_point(interface_extends:pinecone.meta.ServiceMetaDTOListReply) com.google.protobuf.MessageOrBuilder { /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ java.util.List getMetasList(); /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getMetas(int index); /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ int getMetasCount(); /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ java.util.List getMetasOrBuilderList(); /** * repeated .pinecone.meta.ServiceMetaDTO metas = 1; */ com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder getMetasOrBuilder( int index); } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ServiceMetaDTOOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_meta.proto package com.pinecone.hydra.service.registry.grpc.server.meta; public interface ServiceMetaDTOOrBuilder extends // @@protoc_insertion_point(interface_extends:pinecone.meta.ServiceMetaDTO) com.google.protobuf.MessageOrBuilder { /** * string guid = 1; * @return The guid. */ java.lang.String getGuid(); /** * string guid = 1; * @return The bytes for guid. */ com.google.protobuf.ByteString getGuidBytes(); /** * string name = 2; * @return The name. */ java.lang.String getName(); /** * string name = 2; * @return The bytes for name. */ com.google.protobuf.ByteString getNameBytes(); /** * string type = 3; * @return The type. */ java.lang.String getType(); /** * string type = 3; * @return The bytes for type. */ com.google.protobuf.ByteString getTypeBytes(); /** * string displayName = 4; * @return The displayName. */ java.lang.String getDisplayName(); /** * string displayName = 4; * @return The bytes for displayName. */ com.google.protobuf.ByteString getDisplayNameBytes(); /** * string description = 5; * @return The description. */ java.lang.String getDescription(); /** * string description = 5; * @return The bytes for description. */ com.google.protobuf.ByteString getDescriptionBytes(); /** * string fullName = 6; * @return The fullName. */ java.lang.String getFullName(); /** * string fullName = 6; * @return The bytes for fullName. */ com.google.protobuf.ByteString getFullNameBytes(); /** * string groupNamespace = 7; * @return The groupNamespace. */ java.lang.String getGroupNamespace(); /** * string groupNamespace = 7; * @return The bytes for groupNamespace. */ com.google.protobuf.ByteString getGroupNamespaceBytes(); /** * string groupName = 8; * @return The groupName. */ java.lang.String getGroupName(); /** * string groupName = 8; * @return The bytes for groupName. */ com.google.protobuf.ByteString getGroupNameBytes(); /** * string scenario = 9; * @return The scenario. */ java.lang.String getScenario(); /** * string scenario = 9; * @return The bytes for scenario. */ com.google.protobuf.ByteString getScenarioBytes(); /** * string primaryImplLang = 10; * @return The primaryImplLang. */ java.lang.String getPrimaryImplLang(); /** * string primaryImplLang = 10; * @return The bytes for primaryImplLang. */ com.google.protobuf.ByteString getPrimaryImplLangBytes(); /** * string extraInformation = 11; * @return The extraInformation. */ java.lang.String getExtraInformation(); /** * string extraInformation = 11; * @return The bytes for extraInformation. */ com.google.protobuf.ByteString getExtraInformationBytes(); /** * string level = 12; * @return The level. */ java.lang.String getLevel(); /** * string level = 12; * @return The bytes for level. */ com.google.protobuf.ByteString getLevelBytes(); } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ServiceMetaDTOReply.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_meta.proto package com.pinecone.hydra.service.registry.grpc.server.meta; /** * Protobuf type {@code pinecone.meta.ServiceMetaDTOReply} */ public final class ServiceMetaDTOReply extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:pinecone.meta.ServiceMetaDTOReply) ServiceMetaDTOReplyOrBuilder { private static final long serialVersionUID = 0L; // Use ServiceMetaDTOReply.newBuilder() to construct. private ServiceMetaDTOReply(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ServiceMetaDTOReply() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ServiceMetaDTOReply(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTOReply_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTOReply_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.class, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.Builder.class); } private int bitField0_; public static final int META_FIELD_NUMBER = 1; private com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO meta_; /** * .pinecone.meta.ServiceMetaDTO meta = 1; * @return Whether the meta field is set. */ @java.lang.Override public boolean hasMeta() { return ((bitField0_ & 0x00000001) != 0); } /** * .pinecone.meta.ServiceMetaDTO meta = 1; * @return The meta. */ @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getMeta() { return meta_ == null ? com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance() : meta_; } /** * .pinecone.meta.ServiceMetaDTO meta = 1; */ @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder getMetaOrBuilder() { return meta_ == null ? com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance() : meta_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getMeta()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, getMeta()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply)) { return super.equals(obj); } com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply other = (com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply) obj; if (hasMeta() != other.hasMeta()) return false; if (hasMeta()) { if (!getMeta() .equals(other.getMeta())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasMeta()) { hash = (37 * hash) + META_FIELD_NUMBER; hash = (53 * hash) + getMeta().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code pinecone.meta.ServiceMetaDTOReply} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:pinecone.meta.ServiceMetaDTOReply) com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReplyOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTOReply_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTOReply_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.class, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.Builder.class); } // Construct using com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getMetaFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; meta_ = null; if (metaBuilder_ != null) { metaBuilder_.dispose(); metaBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTOReply_descriptor; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply getDefaultInstanceForType() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.getDefaultInstance(); } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply build() { com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply buildPartial() { com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply result = new com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.meta_ = metaBuilder_ == null ? meta_ : metaBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply) { return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply other) { if (other == com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.getDefaultInstance()) return this; if (other.hasMeta()) { mergeMeta(other.getMeta()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getMetaFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO meta_; private com.google.protobuf.SingleFieldBuilderV3< com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder> metaBuilder_; /** * .pinecone.meta.ServiceMetaDTO meta = 1; * @return Whether the meta field is set. */ public boolean hasMeta() { return ((bitField0_ & 0x00000001) != 0); } /** * .pinecone.meta.ServiceMetaDTO meta = 1; * @return The meta. */ public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getMeta() { if (metaBuilder_ == null) { return meta_ == null ? com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance() : meta_; } else { return metaBuilder_.getMessage(); } } /** * .pinecone.meta.ServiceMetaDTO meta = 1; */ public Builder setMeta(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO value) { if (metaBuilder_ == null) { if (value == null) { throw new NullPointerException(); } meta_ = value; } else { metaBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * .pinecone.meta.ServiceMetaDTO meta = 1; */ public Builder setMeta( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder builderForValue) { if (metaBuilder_ == null) { meta_ = builderForValue.build(); } else { metaBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * .pinecone.meta.ServiceMetaDTO meta = 1; */ public Builder mergeMeta(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO value) { if (metaBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && meta_ != null && meta_ != com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance()) { getMetaBuilder().mergeFrom(value); } else { meta_ = value; } } else { metaBuilder_.mergeFrom(value); } if (meta_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * .pinecone.meta.ServiceMetaDTO meta = 1; */ public Builder clearMeta() { bitField0_ = (bitField0_ & ~0x00000001); meta_ = null; if (metaBuilder_ != null) { metaBuilder_.dispose(); metaBuilder_ = null; } onChanged(); return this; } /** * .pinecone.meta.ServiceMetaDTO meta = 1; */ public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder getMetaBuilder() { bitField0_ |= 0x00000001; onChanged(); return getMetaFieldBuilder().getBuilder(); } /** * .pinecone.meta.ServiceMetaDTO meta = 1; */ public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder getMetaOrBuilder() { if (metaBuilder_ != null) { return metaBuilder_.getMessageOrBuilder(); } else { return meta_ == null ? com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance() : meta_; } } /** * .pinecone.meta.ServiceMetaDTO meta = 1; */ private com.google.protobuf.SingleFieldBuilderV3< com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder> getMetaFieldBuilder() { if (metaBuilder_ == null) { metaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder>( getMeta(), getParentForChildren(), isClean()); meta_ = null; } return metaBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:pinecone.meta.ServiceMetaDTOReply) } // @@protoc_insertion_point(class_scope:pinecone.meta.ServiceMetaDTOReply) private static final com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply(); } public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public ServiceMetaDTOReply parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ServiceMetaDTOReplyOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_meta.proto package com.pinecone.hydra.service.registry.grpc.server.meta; public interface ServiceMetaDTOReplyOrBuilder extends // @@protoc_insertion_point(interface_extends:pinecone.meta.ServiceMetaDTOReply) com.google.protobuf.MessageOrBuilder { /** * .pinecone.meta.ServiceMetaDTO meta = 1; * @return Whether the meta field is set. */ boolean hasMeta(); /** * .pinecone.meta.ServiceMetaDTO meta = 1; * @return The meta. */ com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getMeta(); /** * .pinecone.meta.ServiceMetaDTO meta = 1; */ com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder getMetaOrBuilder(); } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ServiceMetaGrpc.java ================================================ package com.pinecone.hydra.service.registry.grpc.server.meta; import static io.grpc.MethodDescriptor.generateFullMethodName; /** */ @javax.annotation.Generated( value = "by gRPC proto compiler (version 1.62.2)", comments = "Source: service_meta.proto") @io.grpc.stub.annotations.GrpcGenerated public final class ServiceMetaGrpc { private ServiceMetaGrpc() {} public static final java.lang.String SERVICE_NAME = "pinecone.meta.ServiceMeta"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor getFetchServiceInsMetaByClientIdMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "FetchServiceInsMetaByClientId", requestType = com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest.class, responseType = com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor getFetchServiceInsMetaByClientIdMethod() { io.grpc.MethodDescriptor getFetchServiceInsMetaByClientIdMethod; if ((getFetchServiceInsMetaByClientIdMethod = ServiceMetaGrpc.getFetchServiceInsMetaByClientIdMethod) == null) { synchronized (ServiceMetaGrpc.class) { if ((getFetchServiceInsMetaByClientIdMethod = ServiceMetaGrpc.getFetchServiceInsMetaByClientIdMethod) == null) { ServiceMetaGrpc.getFetchServiceInsMetaByClientIdMethod = getFetchServiceInsMetaByClientIdMethod = io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "FetchServiceInsMetaByClientId")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.getDefaultInstance())) .setSchemaDescriptor(new ServiceMetaMethodDescriptorSupplier("FetchServiceInsMetaByClientId")) .build(); } } } return getFetchServiceInsMetaByClientIdMethod; } private static volatile io.grpc.MethodDescriptor getFetchServiceInsMetaByServiceIdMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "FetchServiceInsMetaByServiceId", requestType = com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest.class, responseType = com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor getFetchServiceInsMetaByServiceIdMethod() { io.grpc.MethodDescriptor getFetchServiceInsMetaByServiceIdMethod; if ((getFetchServiceInsMetaByServiceIdMethod = ServiceMetaGrpc.getFetchServiceInsMetaByServiceIdMethod) == null) { synchronized (ServiceMetaGrpc.class) { if ((getFetchServiceInsMetaByServiceIdMethod = ServiceMetaGrpc.getFetchServiceInsMetaByServiceIdMethod) == null) { ServiceMetaGrpc.getFetchServiceInsMetaByServiceIdMethod = getFetchServiceInsMetaByServiceIdMethod = io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "FetchServiceInsMetaByServiceId")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.getDefaultInstance())) .setSchemaDescriptor(new ServiceMetaMethodDescriptorSupplier("FetchServiceInsMetaByServiceId")) .build(); } } } return getFetchServiceInsMetaByServiceIdMethod; } private static volatile io.grpc.MethodDescriptor getQueryServiceMetaByPathMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "QueryServiceMetaByPath", requestType = com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest.class, responseType = com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor getQueryServiceMetaByPathMethod() { io.grpc.MethodDescriptor getQueryServiceMetaByPathMethod; if ((getQueryServiceMetaByPathMethod = ServiceMetaGrpc.getQueryServiceMetaByPathMethod) == null) { synchronized (ServiceMetaGrpc.class) { if ((getQueryServiceMetaByPathMethod = ServiceMetaGrpc.getQueryServiceMetaByPathMethod) == null) { ServiceMetaGrpc.getQueryServiceMetaByPathMethod = getQueryServiceMetaByPathMethod = io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "QueryServiceMetaByPath")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.getDefaultInstance())) .setSchemaDescriptor(new ServiceMetaMethodDescriptorSupplier("QueryServiceMetaByPath")) .build(); } } } return getQueryServiceMetaByPathMethod; } private static volatile io.grpc.MethodDescriptor getQueryServiceMetaByGuidMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "QueryServiceMetaByGuid", requestType = com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest.class, responseType = com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor getQueryServiceMetaByGuidMethod() { io.grpc.MethodDescriptor getQueryServiceMetaByGuidMethod; if ((getQueryServiceMetaByGuidMethod = ServiceMetaGrpc.getQueryServiceMetaByGuidMethod) == null) { synchronized (ServiceMetaGrpc.class) { if ((getQueryServiceMetaByGuidMethod = ServiceMetaGrpc.getQueryServiceMetaByGuidMethod) == null) { ServiceMetaGrpc.getQueryServiceMetaByGuidMethod = getQueryServiceMetaByGuidMethod = io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "QueryServiceMetaByGuid")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.getDefaultInstance())) .setSchemaDescriptor(new ServiceMetaMethodDescriptorSupplier("QueryServiceMetaByGuid")) .build(); } } } return getQueryServiceMetaByGuidMethod; } private static volatile io.grpc.MethodDescriptor getEvalCreationStatementMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "EvalCreationStatement", requestType = com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest.class, responseType = com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor getEvalCreationStatementMethod() { io.grpc.MethodDescriptor getEvalCreationStatementMethod; if ((getEvalCreationStatementMethod = ServiceMetaGrpc.getEvalCreationStatementMethod) == null) { synchronized (ServiceMetaGrpc.class) { if ((getEvalCreationStatementMethod = ServiceMetaGrpc.getEvalCreationStatementMethod) == null) { ServiceMetaGrpc.getEvalCreationStatementMethod = getEvalCreationStatementMethod = io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "EvalCreationStatement")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.getDefaultInstance())) .setSchemaDescriptor(new ServiceMetaMethodDescriptorSupplier("EvalCreationStatement")) .build(); } } } return getEvalCreationStatementMethod; } private static volatile io.grpc.MethodDescriptor getCreateNewServiceMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "CreateNewService", requestType = com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest.class, responseType = com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor getCreateNewServiceMethod() { io.grpc.MethodDescriptor getCreateNewServiceMethod; if ((getCreateNewServiceMethod = ServiceMetaGrpc.getCreateNewServiceMethod) == null) { synchronized (ServiceMetaGrpc.class) { if ((getCreateNewServiceMethod = ServiceMetaGrpc.getCreateNewServiceMethod) == null) { ServiceMetaGrpc.getCreateNewServiceMethod = getCreateNewServiceMethod = io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateNewService")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.getDefaultInstance())) .setSchemaDescriptor(new ServiceMetaMethodDescriptorSupplier("CreateNewService")) .build(); } } } return getCreateNewServiceMethod; } /** * Creates a new async stub that supports all call types for the service */ public static ServiceMetaStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory factory = new io.grpc.stub.AbstractStub.StubFactory() { @java.lang.Override public ServiceMetaStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ServiceMetaStub(channel, callOptions); } }; return ServiceMetaStub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static ServiceMetaBlockingStub newBlockingStub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory factory = new io.grpc.stub.AbstractStub.StubFactory() { @java.lang.Override public ServiceMetaBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ServiceMetaBlockingStub(channel, callOptions); } }; return ServiceMetaBlockingStub.newStub(factory, channel); } /** * Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static ServiceMetaFutureStub newFutureStub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory factory = new io.grpc.stub.AbstractStub.StubFactory() { @java.lang.Override public ServiceMetaFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ServiceMetaFutureStub(channel, callOptions); } }; return ServiceMetaFutureStub.newStub(factory, channel); } /** */ public interface AsyncService { /** */ default void fetchServiceInsMetaByClientId(com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getFetchServiceInsMetaByClientIdMethod(), responseObserver); } /** */ default void fetchServiceInsMetaByServiceId(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getFetchServiceInsMetaByServiceIdMethod(), responseObserver); } /** */ default void queryServiceMetaByPath(com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getQueryServiceMetaByPathMethod(), responseObserver); } /** */ default void queryServiceMetaByGuid(com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getQueryServiceMetaByGuidMethod(), responseObserver); } /** */ default void evalCreationStatement(com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getEvalCreationStatementMethod(), responseObserver); } /** */ default void createNewService(com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getCreateNewServiceMethod(), responseObserver); } } /** * Base class for the server implementation of the service ServiceMeta. */ public static abstract class ServiceMetaImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return ServiceMetaGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service ServiceMeta. */ public static final class ServiceMetaStub extends io.grpc.stub.AbstractAsyncStub { private ServiceMetaStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ServiceMetaStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ServiceMetaStub(channel, callOptions); } /** */ public void fetchServiceInsMetaByClientId(com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getFetchServiceInsMetaByClientIdMethod(), getCallOptions()), request, responseObserver); } /** */ public void fetchServiceInsMetaByServiceId(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getFetchServiceInsMetaByServiceIdMethod(), getCallOptions()), request, responseObserver); } /** */ public void queryServiceMetaByPath(com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getQueryServiceMetaByPathMethod(), getCallOptions()), request, responseObserver); } /** */ public void queryServiceMetaByGuid(com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getQueryServiceMetaByGuidMethod(), getCallOptions()), request, responseObserver); } /** */ public void evalCreationStatement(com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getEvalCreationStatementMethod(), getCallOptions()), request, responseObserver); } /** */ public void createNewService(com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest request, io.grpc.stub.StreamObserver responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getCreateNewServiceMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service ServiceMeta. */ public static final class ServiceMetaBlockingStub extends io.grpc.stub.AbstractBlockingStub { private ServiceMetaBlockingStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ServiceMetaBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ServiceMetaBlockingStub(channel, callOptions); } /** */ public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply fetchServiceInsMetaByClientId(com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getFetchServiceInsMetaByClientIdMethod(), getCallOptions(), request); } /** */ public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply fetchServiceInsMetaByServiceId(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getFetchServiceInsMetaByServiceIdMethod(), getCallOptions(), request); } /** */ public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply queryServiceMetaByPath(com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getQueryServiceMetaByPathMethod(), getCallOptions(), request); } /** */ public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply queryServiceMetaByGuid(com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getQueryServiceMetaByGuidMethod(), getCallOptions(), request); } /** */ public com.pinecone.hydra.service.registry.grpc.server.meta.StringReply evalCreationStatement(com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getEvalCreationStatementMethod(), getCallOptions(), request); } /** */ public com.pinecone.hydra.service.registry.grpc.server.meta.StringReply createNewService(com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateNewServiceMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service ServiceMeta. */ public static final class ServiceMetaFutureStub extends io.grpc.stub.AbstractFutureStub { private ServiceMetaFutureStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ServiceMetaFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ServiceMetaFutureStub(channel, callOptions); } /** */ public com.google.common.util.concurrent.ListenableFuture fetchServiceInsMetaByClientId( com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getFetchServiceInsMetaByClientIdMethod(), getCallOptions()), request); } /** */ public com.google.common.util.concurrent.ListenableFuture fetchServiceInsMetaByServiceId( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getFetchServiceInsMetaByServiceIdMethod(), getCallOptions()), request); } /** */ public com.google.common.util.concurrent.ListenableFuture queryServiceMetaByPath( com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getQueryServiceMetaByPathMethod(), getCallOptions()), request); } /** */ public com.google.common.util.concurrent.ListenableFuture queryServiceMetaByGuid( com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getQueryServiceMetaByGuidMethod(), getCallOptions()), request); } /** */ public com.google.common.util.concurrent.ListenableFuture evalCreationStatement( com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getEvalCreationStatementMethod(), getCallOptions()), request); } /** */ public com.google.common.util.concurrent.ListenableFuture createNewService( com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getCreateNewServiceMethod(), getCallOptions()), request); } } private static final int METHODID_FETCH_SERVICE_INS_META_BY_CLIENT_ID = 0; private static final int METHODID_FETCH_SERVICE_INS_META_BY_SERVICE_ID = 1; private static final int METHODID_QUERY_SERVICE_META_BY_PATH = 2; private static final int METHODID_QUERY_SERVICE_META_BY_GUID = 3; private static final int METHODID_EVAL_CREATION_STATEMENT = 4; private static final int METHODID_CREATE_NEW_SERVICE = 5; private static final class MethodHandlers implements io.grpc.stub.ServerCalls.UnaryMethod, io.grpc.stub.ServerCalls.ServerStreamingMethod, io.grpc.stub.ServerCalls.ClientStreamingMethod, io.grpc.stub.ServerCalls.BidiStreamingMethod { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver responseObserver) { switch (methodId) { case METHODID_FETCH_SERVICE_INS_META_BY_CLIENT_ID: serviceImpl.fetchServiceInsMetaByClientId((com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest) request, (io.grpc.stub.StreamObserver) responseObserver); break; case METHODID_FETCH_SERVICE_INS_META_BY_SERVICE_ID: serviceImpl.fetchServiceInsMetaByServiceId((com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest) request, (io.grpc.stub.StreamObserver) responseObserver); break; case METHODID_QUERY_SERVICE_META_BY_PATH: serviceImpl.queryServiceMetaByPath((com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest) request, (io.grpc.stub.StreamObserver) responseObserver); break; case METHODID_QUERY_SERVICE_META_BY_GUID: serviceImpl.queryServiceMetaByGuid((com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest) request, (io.grpc.stub.StreamObserver) responseObserver); break; case METHODID_EVAL_CREATION_STATEMENT: serviceImpl.evalCreationStatement((com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest) request, (io.grpc.stub.StreamObserver) responseObserver); break; case METHODID_CREATE_NEW_SERVICE: serviceImpl.createNewService((com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest) request, (io.grpc.stub.StreamObserver) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver invoke( io.grpc.stub.StreamObserver responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getFetchServiceInsMetaByClientIdMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply>( service, METHODID_FETCH_SERVICE_INS_META_BY_CLIENT_ID))) .addMethod( getFetchServiceInsMetaByServiceIdMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply>( service, METHODID_FETCH_SERVICE_INS_META_BY_SERVICE_ID))) .addMethod( getQueryServiceMetaByPathMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply>( service, METHODID_QUERY_SERVICE_META_BY_PATH))) .addMethod( getQueryServiceMetaByGuidMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply>( service, METHODID_QUERY_SERVICE_META_BY_GUID))) .addMethod( getEvalCreationStatementMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest, com.pinecone.hydra.service.registry.grpc.server.meta.StringReply>( service, METHODID_EVAL_CREATION_STATEMENT))) .addMethod( getCreateNewServiceMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest, com.pinecone.hydra.service.registry.grpc.server.meta.StringReply>( service, METHODID_CREATE_NEW_SERVICE))) .build(); } private static abstract class ServiceMetaBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { ServiceMetaBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("ServiceMeta"); } } private static final class ServiceMetaFileDescriptorSupplier extends ServiceMetaBaseDescriptorSupplier { ServiceMetaFileDescriptorSupplier() {} } private static final class ServiceMetaMethodDescriptorSupplier extends ServiceMetaBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; ServiceMetaMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (ServiceMetaGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new ServiceMetaFileDescriptorSupplier()) .addMethod(getFetchServiceInsMetaByClientIdMethod()) .addMethod(getFetchServiceInsMetaByServiceIdMethod()) .addMethod(getQueryServiceMetaByPathMethod()) .addMethod(getQueryServiceMetaByGuidMethod()) .addMethod(getEvalCreationStatementMethod()) .addMethod(getCreateNewServiceMethod()) .build(); } } } return result; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ServiceMetaProto.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_meta.proto package com.pinecone.hydra.service.registry.grpc.server.meta; public final class ServiceMetaProto { private ServiceMetaProto() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistryLite registry) { } public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions( (com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_pinecone_meta_ServiceMetaDTO_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_pinecone_meta_ServiceMetaDTO_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_pinecone_meta_ClientIdRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_pinecone_meta_ClientIdRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_pinecone_meta_ServiceIdRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_pinecone_meta_ServiceIdRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_pinecone_meta_PathRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_pinecone_meta_PathRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_pinecone_meta_GuidRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_pinecone_meta_GuidRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_pinecone_meta_EvalRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_pinecone_meta_EvalRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_pinecone_meta_CreateNewServiceRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_pinecone_meta_CreateNewServiceRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_pinecone_meta_ServiceMetaDTOReply_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_pinecone_meta_ServiceMetaDTOReply_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_pinecone_meta_ServiceMetaDTOListReply_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_pinecone_meta_ServiceMetaDTOListReply_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_pinecone_meta_StringReply_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_pinecone_meta_StringReply_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\022service_meta.proto\022\rpinecone.meta\"\365\001\n\016" + "ServiceMetaDTO\022\014\n\004guid\030\001 \001(\t\022\014\n\004name\030\002 \001" + "(\t\022\014\n\004type\030\003 \001(\t\022\023\n\013displayName\030\004 \001(\t\022\023\n" + "\013description\030\005 \001(\t\022\020\n\010fullName\030\006 \001(\t\022\026\n\016" + "groupNamespace\030\007 \001(\t\022\021\n\tgroupName\030\010 \001(\t\022" + "\020\n\010scenario\030\t \001(\t\022\027\n\017primaryImplLang\030\n \001" + "(\t\022\030\n\020extraInformation\030\013 \001(\t\022\r\n\005level\030\014 " + "\001(\t\"#\n\017ClientIdRequest\022\020\n\010clientId\030\001 \001(\003" + "\"%\n\020ServiceIdRequest\022\021\n\tserviceId\030\001 \001(\t\"" + "\033\n\013PathRequest\022\014\n\004path\030\001 \001(\t\"\033\n\013GuidRequ" + "est\022\014\n\004guid\030\001 \001(\t\"$\n\013EvalRequest\022\025\n\rjson" + "Statement\030\001 \001(\t\"]\n\027CreateNewServiceReque" + "st\022\025\n\rparentAppPath\030\001 \001(\t\022+\n\004meta\030\002 \001(\0132" + "\035.pinecone.meta.ServiceMetaDTO\"B\n\023Servic" + "eMetaDTOReply\022+\n\004meta\030\001 \001(\0132\035.pinecone.m" + "eta.ServiceMetaDTO\"G\n\027ServiceMetaDTOList" + "Reply\022,\n\005metas\030\001 \003(\0132\035.pinecone.meta.Ser" + "viceMetaDTO\"\034\n\013StringReply\022\r\n\005value\030\001 \001(" + "\t2\276\004\n\013ServiceMeta\022g\n\035FetchServiceInsMeta" + "ByClientId\022\036.pinecone.meta.ClientIdReque" + "st\032&.pinecone.meta.ServiceMetaDTOListRep" + "ly\022i\n\036FetchServiceInsMetaByServiceId\022\037.p" + "inecone.meta.ServiceIdRequest\032&.pinecone" + ".meta.ServiceMetaDTOListReply\022X\n\026QuerySe" + "rviceMetaByPath\022\032.pinecone.meta.PathRequ" + "est\032\".pinecone.meta.ServiceMetaDTOReply\022" + "X\n\026QueryServiceMetaByGuid\022\032.pinecone.met" + "a.GuidRequest\032\".pinecone.meta.ServiceMet" + "aDTOReply\022O\n\025EvalCreationStatement\022\032.pin" + "econe.meta.EvalRequest\032\032.pinecone.meta.S" + "tringReply\022V\n\020CreateNewService\022&.pinecon" + "e.meta.CreateNewServiceRequest\032\032.pinecon" + "e.meta.StringReplyBJ\n4com.pinecone.hydra" + ".service.registry.grpc.server.metaB\020Serv" + "iceMetaProtoP\001b\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }); internal_static_pinecone_meta_ServiceMetaDTO_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_pinecone_meta_ServiceMetaDTO_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_pinecone_meta_ServiceMetaDTO_descriptor, new java.lang.String[] { "Guid", "Name", "Type", "DisplayName", "Description", "FullName", "GroupNamespace", "GroupName", "Scenario", "PrimaryImplLang", "ExtraInformation", "Level", }); internal_static_pinecone_meta_ClientIdRequest_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_pinecone_meta_ClientIdRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_pinecone_meta_ClientIdRequest_descriptor, new java.lang.String[] { "ClientId", }); internal_static_pinecone_meta_ServiceIdRequest_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_pinecone_meta_ServiceIdRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_pinecone_meta_ServiceIdRequest_descriptor, new java.lang.String[] { "ServiceId", }); internal_static_pinecone_meta_PathRequest_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_pinecone_meta_PathRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_pinecone_meta_PathRequest_descriptor, new java.lang.String[] { "Path", }); internal_static_pinecone_meta_GuidRequest_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_pinecone_meta_GuidRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_pinecone_meta_GuidRequest_descriptor, new java.lang.String[] { "Guid", }); internal_static_pinecone_meta_EvalRequest_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_pinecone_meta_EvalRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_pinecone_meta_EvalRequest_descriptor, new java.lang.String[] { "JsonStatement", }); internal_static_pinecone_meta_CreateNewServiceRequest_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_pinecone_meta_CreateNewServiceRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_pinecone_meta_CreateNewServiceRequest_descriptor, new java.lang.String[] { "ParentAppPath", "Meta", }); internal_static_pinecone_meta_ServiceMetaDTOReply_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_pinecone_meta_ServiceMetaDTOReply_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_pinecone_meta_ServiceMetaDTOReply_descriptor, new java.lang.String[] { "Meta", }); internal_static_pinecone_meta_ServiceMetaDTOListReply_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_pinecone_meta_ServiceMetaDTOListReply_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_pinecone_meta_ServiceMetaDTOListReply_descriptor, new java.lang.String[] { "Metas", }); internal_static_pinecone_meta_StringReply_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_pinecone_meta_StringReply_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_pinecone_meta_StringReply_descriptor, new java.lang.String[] { "Value", }); } // @@protoc_insertion_point(outer_class_scope) } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/StringReply.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_meta.proto package com.pinecone.hydra.service.registry.grpc.server.meta; /** * Protobuf type {@code pinecone.meta.StringReply} */ public final class StringReply extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:pinecone.meta.StringReply) StringReplyOrBuilder { private static final long serialVersionUID = 0L; // Use StringReply.newBuilder() to construct. private StringReply(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private StringReply() { value_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new StringReply(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_StringReply_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_StringReply_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.class, com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.Builder.class); } public static final int VALUE_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object value_ = ""; /** * string value = 1; * @return The value. */ @java.lang.Override public java.lang.String getValue() { java.lang.Object ref = value_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); value_ = s; return s; } } /** * string value = 1; * @return The bytes for value. */ @java.lang.Override public com.google.protobuf.ByteString getValueBytes() { java.lang.Object ref = value_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); value_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(value_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, value_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(value_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, value_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.meta.StringReply)) { return super.equals(obj); } com.pinecone.hydra.service.registry.grpc.server.meta.StringReply other = (com.pinecone.hydra.service.registry.grpc.server.meta.StringReply) obj; if (!getValue() .equals(other.getValue())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValue().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.meta.StringReply prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code pinecone.meta.StringReply} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:pinecone.meta.StringReply) com.pinecone.hydra.service.registry.grpc.server.meta.StringReplyOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_StringReply_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_StringReply_fieldAccessorTable .ensureFieldAccessorsInitialized( com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.class, com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.Builder.class); } // Construct using com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; value_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_StringReply_descriptor; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.StringReply getDefaultInstanceForType() { return com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.getDefaultInstance(); } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.StringReply build() { com.pinecone.hydra.service.registry.grpc.server.meta.StringReply result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.StringReply buildPartial() { com.pinecone.hydra.service.registry.grpc.server.meta.StringReply result = new com.pinecone.hydra.service.registry.grpc.server.meta.StringReply(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.meta.StringReply result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.value_ = value_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.pinecone.hydra.service.registry.grpc.server.meta.StringReply) { return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.meta.StringReply)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.meta.StringReply other) { if (other == com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.getDefaultInstance()) return this; if (!other.getValue().isEmpty()) { value_ = other.value_; bitField0_ |= 0x00000001; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { value_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object value_ = ""; /** * string value = 1; * @return The value. */ public java.lang.String getValue() { java.lang.Object ref = value_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); value_ = s; return s; } else { return (java.lang.String) ref; } } /** * string value = 1; * @return The bytes for value. */ public com.google.protobuf.ByteString getValueBytes() { java.lang.Object ref = value_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); value_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string value = 1; * @param value The value to set. * @return This builder for chaining. */ public Builder setValue( java.lang.String value) { if (value == null) { throw new NullPointerException(); } value_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * string value = 1; * @return This builder for chaining. */ public Builder clearValue() { value_ = getDefaultInstance().getValue(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * string value = 1; * @param value The bytes for value to set. * @return This builder for chaining. */ public Builder setValueBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); value_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:pinecone.meta.StringReply) } // @@protoc_insertion_point(class_scope:pinecone.meta.StringReply) private static final com.pinecone.hydra.service.registry.grpc.server.meta.StringReply DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.meta.StringReply(); } public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public StringReply parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.pinecone.hydra.service.registry.grpc.server.meta.StringReply getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/StringReplyOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: service_meta.proto package com.pinecone.hydra.service.registry.grpc.server.meta; public interface StringReplyOrBuilder extends // @@protoc_insertion_point(interface_extends:pinecone.meta.StringReply) com.google.protobuf.MessageOrBuilder { /** * string value = 1; * @return The value. */ java.lang.String getValue(); /** * string value = 1; * @return The bytes for value. */ com.google.protobuf.ByteString getValueBytes(); } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/proto/control_stream.proto ================================================ syntax = "proto3"; option java_multiple_files = true; option java_package = "com.pinecone.hydra.service.registry.grpc.server.cs"; service ControlStream { rpc Connect(stream ControlMessage) returns (stream ControlMessage); } message ControlMessage { int64 clientId = 1; string payload = 2; } ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/proto/service_lifecycle.proto ================================================ syntax = "proto3"; option java_multiple_files = true; option java_package = "com.pinecone.hydra.service.registry.grpc.server.lifecycle"; option java_outer_classname = "ServiceLifecycleProto"; service ServiceLifecycle { rpc RegisterService (RegisterServiceRequest) returns (RegisterServiceReply); rpc CreateInstanceMeta (CreateInstanceMetaRequest) returns (BoolReply); rpc DeregisterServiceByClientId (ClientIdRequest) returns (EmptyReply); rpc DeregisterServiceByInstanceId (InstanceIdRequest) returns (EmptyReply); rpc HasOwnedServiceByServiceId (ServiceIdRequest) returns (BoolReply); rpc HasOwnedServiceInstanceByClientId (ClientIdRequest) returns (BoolReply); rpc HasOwnedServiceInstanceByInstanceId (InstanceIdRequest) returns (BoolReply); rpc HasOwnedServiceClient (ClientIdRequest) returns (BoolReply); rpc CountRegisteredService (EmptyRequest) returns (CountReply); } message RegisterServiceRequest { int64 clientId = 1; string serviceId = 2; string deployId = 3; } message RegisterServiceReply { string instanceId = 1; } message CreateInstanceMetaRequest { string instanceGuid = 1; } message ClientIdRequest { int64 clientId = 1; } message InstanceIdRequest { string instanceId = 1; } message ServiceIdRequest { string serviceId = 1; } message BoolReply { bool value = 1; } message CountReply { int32 value = 1; } message EmptyRequest {} message EmptyReply {} ================================================ FILE: Hydra/hydra-lib-grpc-service-sdk/src/main/proto/service_meta.proto ================================================ syntax = "proto3"; option java_multiple_files = true; option java_package = "com.pinecone.hydra.service.registry.grpc.server.meta"; option java_outer_classname = "ServiceMetaProto"; package pinecone.meta; service ServiceMeta { rpc FetchServiceInsMetaByClientId (ClientIdRequest) returns (ServiceMetaDTOListReply); rpc FetchServiceInsMetaByServiceId (ServiceIdRequest) returns (ServiceMetaDTOListReply); rpc QueryServiceMetaByPath (PathRequest) returns (ServiceMetaDTOReply); rpc QueryServiceMetaByGuid (GuidRequest) returns (ServiceMetaDTOReply); rpc EvalCreationStatement (EvalRequest) returns (StringReply); rpc CreateNewService (CreateNewServiceRequest) returns (StringReply); } /* ================= DTO ================= */ message ServiceMetaDTO { string guid = 1; string name = 2; string type = 3; string displayName = 4; string description = 5; string fullName = 6; string groupNamespace = 7; string groupName = 8; string scenario = 9; string primaryImplLang = 10; string extraInformation = 11; string level = 12; } /* ================= Request / Reply ================= */ message ClientIdRequest { int64 clientId = 1; } message ServiceIdRequest { string serviceId = 1; } message PathRequest { string path = 1; } message GuidRequest { string guid = 1; } message EvalRequest { string jsonStatement = 1; } message CreateNewServiceRequest { string parentAppPath = 1; ServiceMetaDTO meta = 2; } message ServiceMetaDTOReply { ServiceMetaDTO meta = 1; } message ServiceMetaDTOListReply { repeated ServiceMetaDTO metas = 1; } message StringReply { string value = 1; } ================================================ FILE: Hydra/hydra-lib-thrift-sdk/pom.xml ================================================ hydra com.pinecone.hydra 2.5.1 4.0.0 com.pinecone.hydra.sdk.thrift hydra-lib-thrift-sdk 1.2.1 11 11 UTF-8 junit junit 3.8.1 test org.apache.thrift libthrift 0.18.0 org.slf4j slf4j-api com.pinecone pinecone 2.5.1 compile ================================================ FILE: Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/GenericThriftServiceRegistry.java ================================================ package com.pinecone.hydra.thrift; public class GenericThriftServiceRegistry implements ThriftServiceRegistry{ } ================================================ FILE: Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/MCConnectionArguments.java ================================================ package com.pinecone.hydra.thrift; import com.pinecone.framework.system.prototype.Pinenut; public interface MCConnectionArguments extends Pinenut { String getHost(); void setHost(String host); short getPort(); void setPort( short port ); int getKeepAliveTimeout(); void setKeepAliveTimeout( int keepAliveTimeout ); int getSocketTimeout(); void setSocketTimeout( int socketTimeout ); } ================================================ FILE: Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/SharedConnectionArguments.java ================================================ package com.pinecone.hydra.thrift; import com.pinecone.framework.util.json.JSONObject; public abstract class SharedConnectionArguments implements MCConnectionArguments { protected String mszHost; protected short mnPort; protected int mnKeepAliveTimeout; protected int mnSocketTimeout; public SharedConnectionArguments( JSONObject args ) { this.mszHost = args.optString( "host", null ); this.mnPort = (short) args.optInt( "port", -1 ); this.mnKeepAliveTimeout = args.optInt( "KeepAliveTimeout" ); this.mnSocketTimeout = args.optInt( "SocketTimeout", 800 ); } @Override public String getHost() { return this.mszHost; } @Override public void setHost( String host ) { this.mszHost = host; } @Override public short getPort() { return this.mnPort; } @Override public void setPort( short port ) { this.mnPort = port; } @Override public int getKeepAliveTimeout() { return this.mnKeepAliveTimeout; } @Override public void setKeepAliveTimeout( int keepAliveTimeout ) { this.mnKeepAliveTimeout = keepAliveTimeout; } @Override public int getSocketTimeout() { return this.mnSocketTimeout; } @Override public void setSocketTimeout( int socketTimeout ) { this.mnSocketTimeout = socketTimeout; } } ================================================ FILE: Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/ThriftServiceRegistry.java ================================================ package com.pinecone.hydra.thrift; public interface ThriftServiceRegistry { } ================================================ FILE: Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/client/GenericMultiplexedThriftClient.java ================================================ package com.pinecone.hydra.thrift.client; import org.apache.thrift.TException; import org.apache.thrift.TServiceClient; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TMultiplexedProtocol; import org.apache.thrift.protocol.TProtocol; import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; import java.lang.reflect.Constructor; public class GenericMultiplexedThriftClient implements MultiplexedThriftClient{ private String host; private int port; private TTransport transport; private TBinaryProtocol protocol; public GenericMultiplexedThriftClient( String host, int port ) throws TTransportException { this.host = host; this.port = port; this.transport = new TSocket(this.host,this.port); this.transport.open(); this.protocol = new TBinaryProtocol(this.transport); } @Override public T getClient(String serviceName, Class clientClass) throws TException { // 创建多路复用协议 TMultiplexedProtocol multiplexedProtocol = new TMultiplexedProtocol(this.protocol, serviceName); try { // 获取 Client 类的构造方法 Constructor constructor = clientClass.getConstructor(TProtocol.class); // 使用构造方法创建 Client 对象 return constructor.newInstance(multiplexedProtocol); } catch (Exception e) { throw new TException("Failed to create client for service: " + serviceName, e); } } } ================================================ FILE: Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/client/GenericThriftClient.java ================================================ package com.pinecone.hydra.thrift.client; import org.apache.thrift.TException; import org.apache.thrift.TServiceClient; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TProtocol; import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TTransport; public class GenericThriftClient implements ThriftClient { private String host; private int port; private int outTime; private TTransport transport; private T client; public GenericThriftClient( String host, int port, int outTime, Class clientClass ){ this.host = host; this.port = port; this.outTime = outTime; try { // 创建传输层和协议 this.transport = new TSocket(this.host, this.port, this.outTime); TProtocol protocol = new TBinaryProtocol(this.transport); // 使用反射创建客户端实例 this.client = clientClass.getConstructor(TProtocol.class).newInstance(protocol); } catch (Exception e) { throw new RuntimeException("Failed to initialize the client", e); } } @Override public T getClient() throws TException { if (!transport.isOpen()) { transport.open(); } return client; } @Override public void close() { if (transport != null && transport.isOpen()) { transport.close(); } } } ================================================ FILE: Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/client/MultiplexedThriftClient.java ================================================ package com.pinecone.hydra.thrift.client; import org.apache.thrift.TException; import org.apache.thrift.TServiceClient; public interface MultiplexedThriftClient { T getClient(String serviceName, Class clientClass) throws TException; } ================================================ FILE: Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/client/ThriftClient.java ================================================ package com.pinecone.hydra.thrift.client; import org.apache.thrift.TException; import org.apache.thrift.TServiceClient; public interface ThriftClient { T getClient() throws TException; void close(); } ================================================ FILE: Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/server/GenericThriftServer.java ================================================ package com.pinecone.hydra.thrift.server; import org.apache.thrift.TProcessor; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.server.TServer; import org.apache.thrift.server.TSimpleServer; import org.apache.thrift.transport.TServerSocket; import org.apache.thrift.transport.TTransportException; import java.util.Map; public class GenericThriftServer implements ThriftServer{ private final T processor; private final int port; public GenericThriftServer(T processor, int port) { this.processor = processor; this.port = port; } @Override public void start() { try { System.out.println("服务端开启...."); // 创建服务传输层 TServerSocket serverTransport = new TServerSocket(port); // 构造服务参数 TSimpleServer.Args tArgs = new TSimpleServer.Args(serverTransport); tArgs.processor(processor); tArgs.protocolFactory(new TBinaryProtocol.Factory()); // 创建并启动服务 TServer server = new TSimpleServer(tArgs);; server.serve(); } catch (TTransportException e) { e.printStackTrace(); } } @Override public void close() { } @Override public ServerConnectArguments getConnectionArguments() { return null; } @Override public ThriftServer apply( Map conf ) { return null; } } ================================================ FILE: Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/server/MultiplexedServer.java ================================================ package com.pinecone.hydra.thrift.server; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import org.apache.thrift.TMultiplexedProcessor; import org.apache.thrift.TProcessor; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.server.TServer; import org.apache.thrift.server.TSimpleServer; import org.apache.thrift.transport.TServerSocket; import org.apache.thrift.transport.TTransportException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.InetSocketAddress; import java.util.Map; public class MultiplexedServer implements ThriftServer { protected Logger logger = LoggerFactory.getLogger( MultiplexedServer.class ); protected JSONObject mjoSectionConf; protected ServerConnectArguments connectionArguments ; protected InetSocketAddress primaryBindAddress ; protected final TMultiplexedProcessor multiplexedProcessor; protected TServer server; public MultiplexedServer( Map conf ){ this( conf, null ); } public MultiplexedServer( Map conf, ServerConnectArguments arguments ){ this.multiplexedProcessor = new TMultiplexedProcessor(); this.mjoSectionConf = MultiplexedServer.asConfig( conf ); this.connectionArguments = arguments; if ( this.connectionArguments == null ) { this.connectionArguments = new ServerConnectionArguments( this.mjoSectionConf ); } } protected static JSONObject asConfig( Map joConf ) { if( joConf instanceof JSONObject ) { return (JSONObject) joConf; } else { return new JSONMaptron( joConf, true ); } } @Override public ThriftServer apply( Map conf ) { this.mjoSectionConf = MultiplexedServer.asConfig( conf ); JSONObject joConf = this.getSectionConf(); this.connectionArguments = new ServerConnectionArguments( joConf ); // this.mChannelPool = new PassiveRegisterChannelPool<>( // this, new UlfIdleFirstBalanceStrategy(), joConf.optInt( "MaximumConnections", (int)1e7 ) // ); return this; } public void registerProcessor( TProcessor processor ) { String name = processor.getClass().getName(); String[] parts = name.split("[.$]"); name = parts[parts.length - 2]; this.registerProcessor( name, processor ); } public void registerProcessor( String serviceName, TProcessor processor ) { this.multiplexedProcessor.registerProcessor( serviceName, processor ); } @Override public void start() { try { String szHost = this.getConnectionArguments().getHost(); short nPort = this.getConnectionArguments().getPort(); if( StringUtils.isEmpty( szHost ) ) { this.primaryBindAddress = new InetSocketAddress( nPort ); } else { this.primaryBindAddress = new InetSocketAddress( szHost, nPort ); } TServerSocket serverTransport = new TServerSocket( this.primaryBindAddress ); TSimpleServer.Args tArgs = new TSimpleServer.Args(serverTransport); tArgs.processor( this.multiplexedProcessor ); tArgs.protocolFactory( new TBinaryProtocol.Factory() ); this.server = new TSimpleServer(tArgs); this.server.serve(); this.logger.info( "ThriftServer started at " + this.primaryBindAddress ); } catch ( TTransportException e ) { e.printStackTrace(); } } public JSONObject getSectionConf() { return this.mjoSectionConf; } @Override public ServerConnectArguments getConnectionArguments() { return this.connectionArguments; } @Override public void close() { } } ================================================ FILE: Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/server/ServerConnectArguments.java ================================================ package com.pinecone.hydra.thrift.server; import com.pinecone.hydra.thrift.MCConnectionArguments; public interface ServerConnectArguments extends MCConnectionArguments { int getMaximumClients() ; void setMaximumClients( int mnMaximumClients ); } ================================================ FILE: Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/server/ServerConnectionArguments.java ================================================ package com.pinecone.hydra.thrift.server; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.thrift.SharedConnectionArguments; public class ServerConnectionArguments extends SharedConnectionArguments implements ServerConnectArguments { protected int mnMaximumClients; // 0 <= for unlimited clients public ServerConnectionArguments( JSONObject args ) { super( args ); this.mnMaximumClients = args.optInt( "MaximumClients", 0 ); } @Override public int getMaximumClients() { return this.mnMaximumClients; } @Override public void setMaximumClients( int mnMaximumClients ) { this.mnMaximumClients = mnMaximumClients; } } ================================================ FILE: Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/server/ThriftServer.java ================================================ package com.pinecone.hydra.thrift.server; import java.util.Map; public interface ThriftServer { void start(); void close(); ServerConnectArguments getConnectionArguments(); ThriftServer apply( Map conf ); } ================================================ FILE: Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/service/HelloWorldService.java ================================================ /** * Autogenerated by Thrift Compiler (0.18.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package com.pinecone.hydra.thrift.service; import org.apache.thrift.async.TAsyncMethodCall; import org.apache.tomcat.jni.Proc; @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.18.0)", date = "2025-01-24") @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) public class HelloWorldService { public interface Iface { public java.lang.String sayHello(java.lang.String name) throws org.apache.thrift.TException; } public interface AsyncIface { public void sayHello(java.lang.String name, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException; } public static class Client extends org.apache.thrift.TServiceClient implements Iface { public static class Factory implements org.apache.thrift.TServiceClientFactory { public Factory() {} @Override public Client getClient(org.apache.thrift.protocol.TProtocol prot) { return new Client(prot); } @Override public Client getClient(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) { return new Client(iprot, oprot); } } public Client(org.apache.thrift.protocol.TProtocol prot) { super(prot, prot); } public Client(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) { super(iprot, oprot); } @Override public java.lang.String sayHello(java.lang.String name) throws org.apache.thrift.TException { send_sayHello(name); return recv_sayHello(); } public void send_sayHello(java.lang.String name) throws org.apache.thrift.TException { sayHello_args args = new sayHello_args(); args.setName(name); sendBase("sayHello", args); } public java.lang.String recv_sayHello() throws org.apache.thrift.TException { sayHello_result result = new sayHello_result(); receiveBase(result, "sayHello"); if (result.isSetSuccess()) { return result.success; } throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "sayHello failed: unknown result"); } } public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface { public static class Factory implements org.apache.thrift.async.TAsyncClientFactory { private org.apache.thrift.async.TAsyncClientManager clientManager; private org.apache.thrift.protocol.TProtocolFactory protocolFactory; public Factory(org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.protocol.TProtocolFactory protocolFactory) { this.clientManager = clientManager; this.protocolFactory = protocolFactory; } @Override public AsyncClient getAsyncClient(org.apache.thrift.transport.TNonblockingTransport transport) { return new AsyncClient(protocolFactory, clientManager, transport); } } public AsyncClient(org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.transport.TNonblockingTransport transport) { super(protocolFactory, clientManager, transport); } @Override public void sayHello(java.lang.String name, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException { checkReady(); sayHello_call method_call = new sayHello_call(name, resultHandler, this, ___protocolFactory, ___transport); this.___currentMethod = method_call; ___manager.call(method_call); } public static class sayHello_call extends TAsyncMethodCall { private java.lang.String name; public sayHello_call(java.lang.String name, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException { super(client, protocolFactory, transport, resultHandler, false); this.name = name; } @Override public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException { prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("sayHello", org.apache.thrift.protocol.TMessageType.CALL, 0)); sayHello_args args = new sayHello_args(); args.setName(name); args.write(prot); prot.writeMessageEnd(); } @Override public java.lang.String getResult() throws org.apache.thrift.TException { if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) { throw new java.lang.IllegalStateException("Method call not finished!"); } org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array()); org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport); return (new Client(prot)).recv_sayHello(); } } } public static class Processor extends org.apache.thrift.TBaseProcessor implements org.apache.thrift.TProcessor { private static final org.slf4j.Logger _LOGGER = org.slf4j.LoggerFactory.getLogger(Processor.class.getName()); public Processor(I iface) { super(iface, getProcessMap(new java.util.HashMap>())); } protected Processor(I iface, java.util.Map> processMap) { super(iface, getProcessMap(processMap)); } private static java.util.Map> getProcessMap(java.util.Map> processMap) { processMap.put("sayHello", new sayHello()); return processMap; } public static class sayHello extends org.apache.thrift.ProcessFunction { public sayHello() { super("sayHello"); } @Override public sayHello_args getEmptyArgsInstance() { return new sayHello_args(); } @Override protected boolean isOneway() { return false; } @Override protected boolean rethrowUnhandledExceptions() { return false; } @Override public sayHello_result getResult(I iface, sayHello_args args) throws org.apache.thrift.TException { sayHello_result result = new sayHello_result(); result.success = iface.sayHello(args.name); return result; } } } public static class AsyncProcessor extends org.apache.thrift.TBaseAsyncProcessor { private static final org.slf4j.Logger _LOGGER = org.slf4j.LoggerFactory.getLogger(AsyncProcessor.class.getName()); public AsyncProcessor(I iface) { super(iface, getProcessMap(new java.util.HashMap>())); } protected AsyncProcessor(I iface, java.util.Map> processMap) { super(iface, getProcessMap(processMap)); } private static java.util.Map> getProcessMap(java.util.Map> processMap) { processMap.put("sayHello", new sayHello()); return processMap; } public static class sayHello extends org.apache.thrift.AsyncProcessFunction { public sayHello() { super("sayHello"); } @Override public sayHello_args getEmptyArgsInstance() { return new sayHello_args(); } @Override public org.apache.thrift.async.AsyncMethodCallback getResultHandler(final org.apache.thrift.server.AbstractNonblockingServer.AsyncFrameBuffer fb, final int seqid) { final org.apache.thrift.AsyncProcessFunction fcall = this; return new org.apache.thrift.async.AsyncMethodCallback() { @Override public void onComplete(java.lang.String o) { sayHello_result result = new sayHello_result(); result.success = o; try { fcall.sendResponse(fb, result, org.apache.thrift.protocol.TMessageType.REPLY,seqid); } catch (org.apache.thrift.transport.TTransportException e) { _LOGGER.error("TTransportException writing to internal frame buffer", e); fb.close(); } catch (java.lang.Exception e) { _LOGGER.error("Exception writing to internal frame buffer", e); onError(e); } } @Override public void onError(java.lang.Exception e) { byte msgType = org.apache.thrift.protocol.TMessageType.REPLY; org.apache.thrift.TSerializable msg; sayHello_result result = new sayHello_result(); if (e instanceof org.apache.thrift.transport.TTransportException) { _LOGGER.error("TTransportException inside handler", e); fb.close(); return; } else if (e instanceof org.apache.thrift.TApplicationException) { _LOGGER.error("TApplicationException inside handler", e); msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION; msg = (org.apache.thrift.TApplicationException)e; } else { _LOGGER.error("Exception inside handler", e); msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION; msg = new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage()); } try { fcall.sendResponse(fb,msg,msgType,seqid); } catch (java.lang.Exception ex) { _LOGGER.error("Exception writing to internal frame buffer", ex); fb.close(); } } }; } @Override protected boolean isOneway() { return false; } @Override public void start(I iface, sayHello_args args, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException { iface.sayHello(args.name,resultHandler); } } } @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) public static class sayHello_args implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("sayHello_args"); private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("name", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new sayHello_argsStandardSchemeFactory(); private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new sayHello_argsTupleSchemeFactory(); public @org.apache.thrift.annotation.Nullable java.lang.String name; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { NAME((short)1, "name"); private static final java.util.Map byName = new java.util.HashMap(); static { for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // NAME return NAME; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByName(java.lang.String name) { return byName.get(name); } private final short _thriftId; private final java.lang.String _fieldName; _Fields(short thriftId, java.lang.String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } @Override public short getThriftFieldId() { return _thriftId; } @Override public java.lang.String getFieldName() { return _fieldName; } } // isset id assignments public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.NAME, new org.apache.thrift.meta_data.FieldMetaData("name", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); metaDataMap = java.util.Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(sayHello_args.class, metaDataMap); } public sayHello_args() { } public sayHello_args( java.lang.String name) { this(); this.name = name; } /** * Performs a deep copy on other. */ public sayHello_args(sayHello_args other) { if (other.isSetName()) { this.name = other.name; } } @Override public sayHello_args deepCopy() { return new sayHello_args(this); } @Override public void clear() { this.name = null; } @org.apache.thrift.annotation.Nullable public java.lang.String getName() { return this.name; } public sayHello_args setName(@org.apache.thrift.annotation.Nullable java.lang.String name) { this.name = name; return this; } public void unsetName() { this.name = null; } /** Returns true if field name is set (has been assigned a value) and false otherwise */ public boolean isSetName() { return this.name != null; } public void setNameIsSet(boolean value) { if (!value) { this.name = null; } } @Override public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) { switch (field) { case NAME: if (value == null) { unsetName(); } else { setName((java.lang.String)value); } break; } } @org.apache.thrift.annotation.Nullable @Override public java.lang.Object getFieldValue(_Fields field) { switch (field) { case NAME: return getName(); } throw new java.lang.IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ @Override public boolean isSet(_Fields field) { if (field == null) { throw new java.lang.IllegalArgumentException(); } switch (field) { case NAME: return isSetName(); } throw new java.lang.IllegalStateException(); } @Override public boolean equals(java.lang.Object that) { if (that instanceof sayHello_args) return this.equals((sayHello_args)that); return false; } public boolean equals(sayHello_args that) { if (that == null) return false; if (this == that) return true; boolean this_present_name = true && this.isSetName(); boolean that_present_name = true && that.isSetName(); if (this_present_name || that_present_name) { if (!(this_present_name && that_present_name)) return false; if (!this.name.equals(that.name)) return false; } return true; } @Override public int hashCode() { int hashCode = 1; hashCode = hashCode * 8191 + ((isSetName()) ? 131071 : 524287); if (isSetName()) hashCode = hashCode * 8191 + name.hashCode(); return hashCode; } @Override public int compareTo(sayHello_args other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = java.lang.Boolean.compare(isSetName(), other.isSetName()); if (lastComparison != 0) { return lastComparison; } if (isSetName()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.name, other.name); if (lastComparison != 0) { return lastComparison; } } return 0; } @org.apache.thrift.annotation.Nullable @Override public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } @Override public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { scheme(iprot).read(iprot, this); } @Override public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { scheme(oprot).write(oprot, this); } @Override public java.lang.String toString() { java.lang.StringBuilder sb = new java.lang.StringBuilder("sayHello_args("); boolean first = true; sb.append("name:"); if (this.name == null) { sb.append("null"); } else { sb.append(this.name); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class sayHello_argsStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { @Override public sayHello_argsStandardScheme getScheme() { return new sayHello_argsStandardScheme(); } } private static class sayHello_argsStandardScheme extends org.apache.thrift.scheme.StandardScheme { @Override public void read(org.apache.thrift.protocol.TProtocol iprot, sayHello_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // NAME if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.name = iprot.readString(); struct.setNameIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method struct.validate(); } @Override public void write(org.apache.thrift.protocol.TProtocol oprot, sayHello_args struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.name != null) { oprot.writeFieldBegin(NAME_FIELD_DESC); oprot.writeString(struct.name); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class sayHello_argsTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { @Override public sayHello_argsTupleScheme getScheme() { return new sayHello_argsTupleScheme(); } } private static class sayHello_argsTupleScheme extends org.apache.thrift.scheme.TupleScheme { @Override public void write(org.apache.thrift.protocol.TProtocol prot, sayHello_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot; java.util.BitSet optionals = new java.util.BitSet(); if (struct.isSetName()) { optionals.set(0); } oprot.writeBitSet(optionals, 1); if (struct.isSetName()) { oprot.writeString(struct.name); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, sayHello_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot; java.util.BitSet incoming = iprot.readBitSet(1); if (incoming.get(0)) { struct.name = iprot.readString(); struct.setNameIsSet(true); } } } private static S scheme(org.apache.thrift.protocol.TProtocol proto) { return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme(); } } @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) public static class sayHello_result implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("sayHello_result"); private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRING, (short)0); private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new sayHello_resultStandardSchemeFactory(); private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new sayHello_resultTupleSchemeFactory(); public @org.apache.thrift.annotation.Nullable java.lang.String success; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { SUCCESS((short)0, "success"); private static final java.util.Map byName = new java.util.HashMap(); static { for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 0: // SUCCESS return SUCCESS; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByName(java.lang.String name) { return byName.get(name); } private final short _thriftId; private final java.lang.String _fieldName; _Fields(short thriftId, java.lang.String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } @Override public short getThriftFieldId() { return _thriftId; } @Override public java.lang.String getFieldName() { return _fieldName; } } // isset id assignments public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); metaDataMap = java.util.Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(sayHello_result.class, metaDataMap); } public sayHello_result() { } public sayHello_result( java.lang.String success) { this(); this.success = success; } /** * Performs a deep copy on other. */ public sayHello_result(sayHello_result other) { if (other.isSetSuccess()) { this.success = other.success; } } @Override public sayHello_result deepCopy() { return new sayHello_result(this); } @Override public void clear() { this.success = null; } @org.apache.thrift.annotation.Nullable public java.lang.String getSuccess() { return this.success; } public sayHello_result setSuccess(@org.apache.thrift.annotation.Nullable java.lang.String success) { this.success = success; return this; } public void unsetSuccess() { this.success = null; } /** Returns true if field success is set (has been assigned a value) and false otherwise */ public boolean isSetSuccess() { return this.success != null; } public void setSuccessIsSet(boolean value) { if (!value) { this.success = null; } } @Override public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) { switch (field) { case SUCCESS: if (value == null) { unsetSuccess(); } else { setSuccess((java.lang.String)value); } break; } } @org.apache.thrift.annotation.Nullable @Override public java.lang.Object getFieldValue(_Fields field) { switch (field) { case SUCCESS: return getSuccess(); } throw new java.lang.IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ @Override public boolean isSet(_Fields field) { if (field == null) { throw new java.lang.IllegalArgumentException(); } switch (field) { case SUCCESS: return isSetSuccess(); } throw new java.lang.IllegalStateException(); } @Override public boolean equals(java.lang.Object that) { if (that instanceof sayHello_result) return this.equals((sayHello_result)that); return false; } public boolean equals(sayHello_result that) { if (that == null) return false; if (this == that) return true; boolean this_present_success = true && this.isSetSuccess(); boolean that_present_success = true && that.isSetSuccess(); if (this_present_success || that_present_success) { if (!(this_present_success && that_present_success)) return false; if (!this.success.equals(that.success)) return false; } return true; } @Override public int hashCode() { int hashCode = 1; hashCode = hashCode * 8191 + ((isSetSuccess()) ? 131071 : 524287); if (isSetSuccess()) hashCode = hashCode * 8191 + success.hashCode(); return hashCode; } @Override public int compareTo(sayHello_result other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = java.lang.Boolean.compare(isSetSuccess(), other.isSetSuccess()); if (lastComparison != 0) { return lastComparison; } if (isSetSuccess()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success); if (lastComparison != 0) { return lastComparison; } } return 0; } @org.apache.thrift.annotation.Nullable @Override public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } @Override public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { scheme(iprot).read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { scheme(oprot).write(oprot, this); } @Override public java.lang.String toString() { java.lang.StringBuilder sb = new java.lang.StringBuilder("sayHello_result("); boolean first = true; sb.append("success:"); if (this.success == null) { sb.append("null"); } else { sb.append(this.success); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class sayHello_resultStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { @Override public sayHello_resultStandardScheme getScheme() { return new sayHello_resultStandardScheme(); } } private static class sayHello_resultStandardScheme extends org.apache.thrift.scheme.StandardScheme { @Override public void read(org.apache.thrift.protocol.TProtocol iprot, sayHello_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 0: // SUCCESS if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.success = iprot.readString(); struct.setSuccessIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method struct.validate(); } @Override public void write(org.apache.thrift.protocol.TProtocol oprot, sayHello_result struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.success != null) { oprot.writeFieldBegin(SUCCESS_FIELD_DESC); oprot.writeString(struct.success); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class sayHello_resultTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { @Override public sayHello_resultTupleScheme getScheme() { return new sayHello_resultTupleScheme(); } } private static class sayHello_resultTupleScheme extends org.apache.thrift.scheme.TupleScheme { @Override public void write(org.apache.thrift.protocol.TProtocol prot, sayHello_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot; java.util.BitSet optionals = new java.util.BitSet(); if (struct.isSetSuccess()) { optionals.set(0); } oprot.writeBitSet(optionals, 1); if (struct.isSetSuccess()) { oprot.writeString(struct.success); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, sayHello_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot; java.util.BitSet incoming = iprot.readBitSet(1); if (incoming.get(0)) { struct.success = iprot.readString(); struct.setSuccessIsSet(true); } } } private static S scheme(org.apache.thrift.protocol.TProtocol proto) { return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme(); } } } ================================================ FILE: Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/service/impl/HelloWorldServiceImpl.java ================================================ package com.pinecone.hydra.thrift.service.impl; import com.pinecone.hydra.thrift.service.HelloWorldService; import org.apache.thrift.TException; public class HelloWorldServiceImpl implements HelloWorldService.Iface { @Override public String sayHello(String name) throws TException { System.out.println(name); try { return "Hello, " + name; } catch (Exception e) { e.printStackTrace(); throw new TException("Error in sayHello: " + e.getMessage()); } } } ================================================ FILE: Hydra/hydra-lib-thrift-sdk/src/main/resources/thrift/hellow.thrift ================================================ namespace java com.example.thrift service HelloWorldService { string sayHello(1: string name) } ================================================ FILE: Hydra/hydra-lib-thrift-sdk/src/test/java/com/thrift/TestThriftClient.java ================================================ package com.thrift; import com.pinecone.hydra.thrift.client.GenericThriftClient; import com.pinecone.hydra.thrift.service.HelloWorldService; import org.apache.thrift.TException; public class TestThriftClient { public static void main(String[] args) throws TException { GenericThriftClient client = new GenericThriftClient<>("localhost", 8001, 30000, HelloWorldService.Client.class); HelloWorldService.Client clientClient = client.getClient(); clientClient.sayHello("你好"); } } ================================================ FILE: Hydra/hydra-lib-thrift-sdk/src/test/java/com/thrift/TestThriftService.java ================================================ package com.thrift; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.hydra.thrift.client.GenericMultiplexedThriftClient; import com.pinecone.hydra.thrift.server.MultiplexedServer; import com.pinecone.hydra.thrift.service.HelloWorldService; import com.pinecone.hydra.thrift.service.impl.HelloWorldServiceImpl; public class TestThriftService { public static void main(String[] args) throws Exception { Thread thread = new Thread(()->{ HelloWorldService.Iface hello = new HelloWorldServiceImpl(); // GenericThriftServer> server = new GenericThriftServer<>(new HelloWorldService.Processor<>(hello), 8001); // server.start(); MultiplexedServer multiplexedServer = new MultiplexedServer( new JSONMaptron("{host: \"0.0.0.0\",\n" + "port: 16701, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}") ); multiplexedServer.registerProcessor( new HelloWorldService.Processor<>(hello) ); multiplexedServer.start(); }); thread.start(); Thread.sleep( 1000 ); // GenericThriftClient client = new GenericThriftClient<>("localhost", 8001, 30000, HelloWorldService.Client.class); // HelloWorldService.Client clientClient = client.getClient(); // clientClient.sayHello("你好"); GenericMultiplexedThriftClient thriftClient = new GenericMultiplexedThriftClient("localhost", 16701); HelloWorldService.Client hello = thriftClient.getClient("HelloWorldService", HelloWorldService.Client.class); hello.sayHello("你好"); } } ================================================ FILE: Hydra/hydra-lib-thrift-sdk/src/test/java/org/example/AppTest.java ================================================ package org.example; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; /** * Unit test for simple App. */ public class AppTest extends TestCase { /** * Create the test case * * @param testName name of the test case */ public AppTest( String testName ) { super( testName ); } /** * @return the suite of tests being tested */ public static Test suite() { return new TestSuite( AppTest.class ); } /** * Rigourous Test :-) */ public void testApp() { assertTrue( true ); } } ================================================ FILE: Hydra/hydra-lib-uofs-cache/pom.xml ================================================ hydra com.pinecone.hydra 2.5.1 4.0.0 com.pinecone.hydra.storage.uofs.cache hydra-lib-uofs-cache 1.2.1 11 11 UTF-8 com.pinecone.hydra.kernel hydra-framework-storage 2.1.0 compile junit junit 3.8.1 test com.pinecone.slime.jelly jelly 2.1.0 compile ================================================ FILE: Hydra/hydra-lib-uofs-cache/src/main/java/com/pinecone/hydra/storage/file/UOFSCacheComponentor.java ================================================ package com.pinecone.hydra.storage.file; import com.pinecone.hydra.storage.file.builder.Feature; import com.pinecone.hydra.storage.file.builder.UOFSComponentor; import com.pinecone.hydra.storage.file.cache.FileSystemCacheConfig; import com.pinecone.slime.jelly.source.redis.GenericRedisMasterManipulator; import com.pinecone.slime.map.indexable.IndexableMapQuerier; import com.pinecone.slime.source.indexable.GenericIndexableTargetScopeMeta; import com.pinecone.slime.source.indexable.IndexableIterableManipulator; import com.pinecone.slime.source.indexable.IndexableTargetScopeMeta; import redis.clients.jedis.Jedis; import redis.clients.jedis.JedisPool; import redis.clients.jedis.JedisPoolConfig; public class UOFSCacheComponentor implements UOFSComponentor { private FileSystemCacheConfig cacheConfig; private String redisHost; private int redisPort; private int redisTimeOut; private String redisPassword; private int redisDatabase; public UOFSCacheComponentor(FileSystemCacheConfig cacheConfig){ this.cacheConfig = cacheConfig; this.redisHost = this.cacheConfig.getRedisHost(); this.redisPort = this.cacheConfig.getRedisPort(); this.redisTimeOut = this.cacheConfig.getRedisTimeOut(); this.redisPassword = this.cacheConfig.getRedisPassword(); this.redisDatabase = this.cacheConfig.getRedisDatabase(); } @Override public Feature getFeature() { return Feature.EnableGlobalCache; } @Override public void apply( KOMFileSystem fs ) { UniformObjectFileSystem uofs = (UniformObjectFileSystem) fs; JedisPoolConfig poolConfig = new JedisPoolConfig(); JedisPool jedisPool = new JedisPool( poolConfig, this.redisHost, this.redisPort, this.redisTimeOut, this.redisPassword, this.redisDatabase ); Jedis jedis = jedisPool.getResource(); jedis.auth( this.redisPassword ); IndexableIterableManipulator manipulator = new GenericRedisMasterManipulator<>( jedis ); IndexableTargetScopeMeta meta = new GenericIndexableTargetScopeMeta( "0", "", Object.class, manipulator ); IndexableMapQuerier querier = new IndexableMapQuerier<>( meta, true ); uofs.apply( querier ); } } ================================================ FILE: Hydra/hydra-lib-uofs-cache/src/test/java/org/example/AppTest.java ================================================ package org.example; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; /** * Unit test for simple App. */ public class AppTest extends TestCase { /** * Create the test case * * @param testName name of the test case */ public AppTest( String testName ) { super( testName ); } /** * @return the suite of tests being tested */ public static Test suite() { return new TestSuite( AppTest.class ); } /** * Rigourous Test :-) */ public void testApp() { assertTrue( true ); } } ================================================ FILE: Hydra/hydra-message-broadcast/pom.xml ================================================ hydra com.pinecone.hydra 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.pinecone.hydra.kernel hydra-message-broadcast 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile com.pinecone.hydra.kernel hydra-architecture 2.1.0 compile com.pinecone.hydra.kernel hydra-message-control 2.1.0 compile com.pinecone.ulf ulfhedinn 1.2.1 compile io.netty netty-all 4.1.80.Final com.rabbitmq amqp-client 5.14.2 org.apache.rocketmq rocketmq-client 4.9.1 org.apache.kafka kafka-clients 3.9.1 ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/UMBBytesDecoder.java ================================================ package com.pinecone.hydra.umb; import java.io.IOException; import com.pinecone.hydra.umc.msg.GenericEMCBytesDecoder; import com.pinecone.hydra.umc.msg.UMCHead; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; public class UMBBytesDecoder extends GenericEMCBytesDecoder { @Override public UMCHead decode( byte[] buf, ExtraHeadCoder extraHeadCoder ) throws IOException { UMCHead head = super.decode( buf, extraHeadCoder ); if ( head != null ) { return head; } if ( this.isQualified( buf, UMBPHeadV1.ProtocolSignature ) ) { return UMBPHeadV1.decode( buf, UMBPHeadV1.ProtocolSignature, extraHeadCoder ); } return null; } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/UMBClientException.java ================================================ package com.pinecone.hydra.umb; import com.pinecone.hydra.umct.ServiceException; public class UMBClientException extends ServiceException { public UMBClientException() { super(); } public UMBClientException( String message ) { super(message); } public UMBClientException( String message, Throwable cause ) { super(message, cause); } public UMBClientException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/UMBHead.java ================================================ package com.pinecone.hydra.umb; import com.pinecone.hydra.umc.msg.UMCHead; /** * Pinecone Ursus For Java UMB [ Uniform Message Broadcast Control Transmit ] * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ********************************************************** * Uniform Message Control Transmission Protocol - Broadcast [UMC-T-B] * 统一消息广播控制传输协议 * For: MQ / Kafka * ********************************************************** */ public interface UMBHead extends UMCHead { } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/UMBPHeadV1.java ================================================ package com.pinecone.hydra.umb; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.Arrays; import java.util.Map; import com.pinecone.framework.system.prototype.ObjectiveBean; import com.pinecone.framework.unit.LinkedTreeMap; import com.pinecone.framework.util.Bytes; import com.pinecone.framework.util.ReflectionUtils; import com.pinecone.framework.util.datetime.compact.CompactTimeUnit; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.umc.msg.AbstractUMCHead; import com.pinecone.hydra.umc.msg.ExtraEncode; import com.pinecone.hydra.umc.msg.Status; import com.pinecone.hydra.umc.msg.StreamTerminateException; import com.pinecone.hydra.umc.msg.UMCHead; import com.pinecone.hydra.umc.msg.UMCHeadV1; import com.pinecone.hydra.umc.msg.UMCMethod; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; /** * Pinecone Ursus For Java UMB [ Uniform Message Broadcast Control Transmit - Package ] * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ********************************************************** * Uniform Message Control Transmission Protocol - Broadcast Package [UMC-T-BP] * 统一消息广播控制传输协议-小包分协议 * For: Simplified Message Small-Package [最小压缩邮政小包] * ********************************************************** * According to MQ traits, in practice, if a message is received, its status should be 'OK' in principle. * 根据MQ特性,实践中,若收到消息状态码原则上就应该是 `OK` * ********************************************************** */ public class UMBPHeadV1 extends AbstractUMCHead implements UMBHead { public static final String ProtocolVersion = "1.1"; public static final String ProtocolSignature = "UMC-BP/" + UMBPHeadV1.ProtocolVersion; public static final int StructBlockSize = Integer.BYTES + Byte.BYTES; public static final int HeadBlockSize = UMBPHeadV1.ProtocolSignature.length() + UMBPHeadV1.StructBlockSize; public static final ByteOrder BinByteOrder = UMCHeadV1.BinByteOrder; public static final int HeadFieldsSize = 3; protected String szSignature ; // :0 protected int nExtraHeadLength = 2 ; // :1 sizeof( int32 ) = 4 protected ExtraEncode extraEncode = ExtraEncode.Undefined ; // :2 sizeof( ExtraEncode/byte ) = 1 protected byte[] extraHead = {} ; protected Object dyExtraHead ; protected ExtraHeadCoder extraHeadCoder ; public UMBPHeadV1( ) { this.szSignature = UMBPHeadV1.ProtocolSignature; this.dyExtraHead = new LinkedTreeMap<>(); } @Override public int sizeof() { return UMBPHeadV1.HeadBlockSize; } @Override public int fieldsSize() { return UMBPHeadV1.HeadFieldsSize; } @Override protected void setSignature ( String signature ) { this.szSignature = signature; } @Override protected void setBodyLength ( long length ) { } @Override public void setKeepAlive ( int nKeepAliveMills ) { } @Override public void setKeepAlive ( int nKeepAlive, CompactTimeUnit timeUnit ) { } @Override protected void setMethod ( UMCMethod umcMethod ) { } @Override protected void setExtraEncode ( ExtraEncode encode ) { this.extraEncode = encode; } @Override public void setControlBits ( int controlBits ) { } @Override public void setSessionId ( long sessionId ) { } @Override public void setIdentityId ( long identityId ) { } @Override protected void setExtraHead ( JSONObject jo ) { this.dyExtraHead = jo.getMap(); } @Override protected void setExtraHead ( Map jo ) { this.dyExtraHead = jo; } @Override protected void setExtraHead ( Object o ) { this.dyExtraHead = o; if( o == null ) { this.nExtraHeadLength = 0; } } @Override protected void transApplyExHead ( ) { if ( this.dyExtraHead != null ) { this.extraHead = this.extraHeadCoder.getEncoder().encode( this, this.dyExtraHead ); this.nExtraHeadLength = this.extraHead.length; } else { if( this.extraEncode == ExtraEncode.JSONString ) { this.extraHead = "{}".getBytes(); } else if( this.extraEncode == ExtraEncode.Prototype ) { this.extraHead = null; this.nExtraHeadLength = 0; return; } else if( this.extraEncode == ExtraEncode.Iussum ) { this.extraHead = new byte[ 0 ]; this.nExtraHeadLength = 0; return; } else { this.dyExtraHead = this.extraHeadCoder.newExtraHead(); this.extraHead = this.extraHeadCoder.getEncoder().encode( this, this.dyExtraHead ); } } this.nExtraHeadLength = this.extraHead.length; } @Override protected void applyExtraHeadCoder ( ExtraHeadCoder coder ) { this.extraHeadCoder = coder; if( this.extraEncode == ExtraEncode.Undefined ) { this.extraEncode = coder.getDefaultEncode(); } } @Override public void setStatus ( Status status ) { } @Override public ExtraHeadCoder getExtraHeadCoder() { return this.extraHeadCoder; } @Override public String getSignature() { return this.szSignature; } @Override public int getSignatureLength() { return this.getSignature().length(); } @Override public UMCMethod getMethod() { return UMCMethod.INFORM; } @Override public int getExtraHeadLength() { return this.nExtraHeadLength; } @Override public long getBodyLength() { return 0L; } @Override public long getKeepAlive() { return -1L; } @Override public int getCompactKeepAlive() { return -1; } @Override public long getSessionId() { return -1L; } @Override public Status getStatus() { return Status.OK; } @Override public ExtraEncode getExtraEncode() { return this.extraEncode; } @Override public int getControlBits() { return 0; } @Override public long getIdentityId() { return 0; } @Override public byte[] getExtraHeadBytes() { return this.extraHead ; } @Override @SuppressWarnings( "unchecked" ) public Map evalMapExtraHead() { if( this.dyExtraHead instanceof Map ) { return (Map) this.dyExtraHead; } return ( new ObjectiveBean( this.dyExtraHead ) ).toMap(); } @Override @SuppressWarnings( "unchecked" ) public Map getMapExtraHead() { if( this.dyExtraHead instanceof Map ) { return (Map) this.dyExtraHead; } return null; } @Override public Object getExtraHead() { return this.dyExtraHead; } @Override public void putExHeaderVal( String key, Object val ) throws IllegalArgumentException { if( this.dyExtraHead instanceof Map ) { this.getMapExtraHead().put( key, val ); } else { ReflectionUtils.beanSet( this.dyExtraHead, key, val ); } } @Override public Object getExHeaderVal( String key ) { if( this.dyExtraHead instanceof Map ) { return this.getMapExtraHead().get( key ); } else { return ReflectionUtils.beanGet( this.dyExtraHead, key ); } } @Override protected UMCHead applyExHead( Map jo ) { if( !( this.dyExtraHead instanceof Map ) && this.dyExtraHead != null ) { throw new IllegalArgumentException( "Current extra headed is not dynamic." ); } if( this.getMapExtraHead() == null || this.getMapExtraHead().size() == 0 ) { this.setExtraHead( jo ); } else { if( jo.size() > this.getMapExtraHead().size() ) { jo.putAll( this.getMapExtraHead() ); this.setExtraHead( jo ); } else { this.getMapExtraHead().putAll( jo ); } } return this; } public UMCHead receiveSet( Map joExtraHead ) { this.dyExtraHead = joExtraHead; return this; } @Override public void release() { // Help GC this.dyExtraHead = null; } @Override public EncodePair bytesEncode( ExtraHeadCoder extraHeadCoder ) { return UMBPHeadV1.encode( this, extraHeadCoder ); } public static EncodePair encode( UMCHead umcHead, ExtraHeadCoder extraHeadCoder ) { UMBPHeadV1 head = (UMBPHeadV1) umcHead; head.applyExtraHeadCoder( extraHeadCoder ); head.transApplyExHead(); ByteBuffer byteBuffer = ByteBuffer.allocate( UMCHeadV1.ReadBufferSize + head.getExtraHeadLength() ); byteBuffer.order( BinByteOrder ); byteBuffer.put( head.getSignature().getBytes() ); int nBufLength = head.getSignatureLength(); byteBuffer.putInt( head.nExtraHeadLength ); nBufLength += Integer.BYTES; byteBuffer.put( head.extraEncode.getByteValue() ); nBufLength += Byte.BYTES; if( head.extraHead == null ) { byteBuffer.put( Bytes.Empty ); } else { byteBuffer.put( head.extraHead ); } nBufLength += head.getExtraHeadLength(); return new EncodePair( byteBuffer, nBufLength ); } public static UMCHead decode( byte[] buf, String szSignature, ExtraHeadCoder extraHeadCoder ) throws IOException { int nBufSize = szSignature.length() + UMBPHeadV1.StructBlockSize; if ( buf.length < nBufSize ) { throw new StreamTerminateException( "StreamEndException:[UMBPProtocol] Stream is ended." ); } int nReadAt = szSignature.length(); if ( !Arrays.equals( buf, 0, szSignature.length(), szSignature.getBytes(), 0, szSignature.length() ) ) { throw new IOException( "[UMBPProtocol] Illegal protocol signature." ); } UMBPHeadV1 head = new UMBPHeadV1(); head.applyExtraHeadCoder( extraHeadCoder ); head.nExtraHeadLength = ByteBuffer.wrap( buf, nReadAt, Integer.BYTES ).order( BinByteOrder ).getInt(); nReadAt += Integer.BYTES; head.extraEncode = ExtraEncode.asValue( ByteBuffer.wrap( buf, nReadAt, Byte.BYTES ).order( BinByteOrder ).get() ); nReadAt += Byte.BYTES; return head; } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/UMBServiceException.java ================================================ package com.pinecone.hydra.umb; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umct.ServiceException; public class UMBServiceException extends ServiceException implements Pinenut { public UMBServiceException() { super(); } public UMBServiceException( String message ) { super(message); } public UMBServiceException( String message, Throwable cause ) { super(message, cause); } public UMBServiceException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/UMCPackageMessageEncoder.java ================================================ package com.pinecone.hydra.umb; import java.io.IOException; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umc.msg.UMCMessage; public interface UMCPackageMessageEncoder extends Pinenut { byte[] encode( UMCMessage message ) throws IOException; } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/UlfMBInformMessage.java ================================================ package com.pinecone.hydra.umb; import com.pinecone.hydra.umc.msg.ArchUMCMessage; import com.pinecone.hydra.umc.msg.ExtraEncode; import com.pinecone.hydra.umc.msg.InformMessage; import com.pinecone.hydra.umc.msg.UMCCHeadV1; import com.pinecone.hydra.umc.msg.UMCHead; import java.util.Map; public class UlfMBInformMessage extends ArchUMCMessage implements InformMessage { public static UMCHead newUMCHead( Object exHead ) { UMBPHeadV1 head = new UMBPHeadV1(); head.setExtraHead( exHead ); head.setExtraEncode( ExtraEncode.Prototype ); return head; } public static UMCHead newUMCHead( Map joExHead ) { UMBPHeadV1 head = new UMBPHeadV1(); head.applyExHead( joExHead ); return head; } public static UMCHead newUMCHead( Object exHead, int controlBits ) { UMCCHeadV1 head = UlfMBInformMessage.newUMCHead( controlBits ); head.setExtraHead( exHead ); head.setExtraEncode( ExtraEncode.Prototype ); return head; } public static UMCHead newUMCHead( Map joExHead, int controlBits ) { UMCCHeadV1 head = UlfMBInformMessage.newUMCHead( controlBits ); head.applyExHead( joExHead ); return head; } public static UMCCHeadV1 newUMCHead( int controlBits ) { UMCCHeadV1 head = new UMCCHeadV1(); head.setControlBits( controlBits ); return head; } public UlfMBInformMessage( UMCHead head ) { super( head ); } public UlfMBInformMessage( Map joExHead ) { this( UlfMBInformMessage.newUMCHead( joExHead ) ); } public UlfMBInformMessage( Object protoExHead ) { this( UlfMBInformMessage.newUMCHead( protoExHead ) ); } public UlfMBInformMessage( Map joExHead, int controlBits ) { this( UlfMBInformMessage.newUMCHead( joExHead, controlBits ) ); } public UlfMBInformMessage( Object protoExHead, int controlBits ) { this( UlfMBInformMessage.newUMCHead( protoExHead, controlBits ) ); } public UlfMBInformMessage( int controlBits ) { this( UlfMBInformMessage.newUMCHead( controlBits ) ); } @Override public long getMessageLength(){ if ( this.mHead instanceof UMBPHeadV1 ) { return UMBPHeadV1.HeadBlockSize + this.mHead.getExtraHeadLength(); } return UMCCHeadV1.HeadBlockSize + this.mHead.getExtraHeadLength(); } @Override public UMCHead getHead() { return super.getHead(); } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/UlfPackageMessageEncoder.java ================================================ package com.pinecone.hydra.umb; import java.io.IOException; import com.pinecone.framework.util.UnitHelper; import com.pinecone.hydra.umc.msg.ArchBytesTransferMessage; import com.pinecone.hydra.umc.msg.ArchStreamTransferMessage; import com.pinecone.hydra.umc.msg.UMCHead; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; public class UlfPackageMessageEncoder implements UMCPackageMessageEncoder { protected ExtraHeadCoder mExtraHeadCoder ; public UlfPackageMessageEncoder ( ExtraHeadCoder extraHeadCoder ) { this.mExtraHeadCoder = extraHeadCoder; } @Override public byte[] encode( UMCMessage message ) throws IOException { if ( message.evinceTransferMessage() != null ) { UMCHead.EncodePair pair = message.getHead().bytesEncode( this.mExtraHeadCoder ); byte[] headBuf = pair.getBytes(); if ( message instanceof ArchStreamTransferMessage ) { ArchStreamTransferMessage transferMessage = (ArchStreamTransferMessage) message; byte[] bytes = transferMessage.getBody().readAllBytes(); return (byte[]) UnitHelper.mergeArr( headBuf, bytes ); } else if ( message instanceof ArchBytesTransferMessage ) { ArchBytesTransferMessage transferMessage = (ArchBytesTransferMessage) message; byte[] bytes = transferMessage.getBody(); return (byte[]) UnitHelper.mergeArr( headBuf, bytes ); } } else if ( message.evinceInformMessage() != null ) { UMCHead.EncodePair pair = message.getHead().bytesEncode( this.mExtraHeadCoder ); return pair.getBytes(); } throw new IllegalArgumentException( "Type of `UMCMessage` [ " + message.getClass().getSimpleName() + " ] is not supported." ); } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/UlfPackageMessageHandler.java ================================================ package com.pinecone.hydra.umb; import com.pinecone.framework.system.prototype.Pinenut; public interface UlfPackageMessageHandler extends Pinenut { default void onSuccessfulMsgReceived ( byte[] body, Object[] args ) throws Exception { } default void onErrorMsgReceived ( byte[] body, Object[] args ) throws Exception { } default void onError ( Object data, Throwable cause ) { } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/ArchUnidirectionalMCProtocol.java ================================================ package com.pinecone.hydra.umb.broadcast; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCHeadV1; import com.pinecone.hydra.umc.msg.UMCProtocol; public class ArchUnidirectionalMCProtocol implements UMCProtocol { protected String mszVersion = UMCHeadV1.ProtocolVersion; protected String mszSignature = UMCHeadV1.ProtocolSignature; protected Medium mMessageSource ; public ArchUnidirectionalMCProtocol( Medium messageSource ) { this.mMessageSource = messageSource; this.applyMessageSource( messageSource ); } @Override public UMCProtocol applyMessageSource( Medium medium ) { this.mMessageSource = medium; return this; } @Override public Medium getMessageSource() { return this.mMessageSource; } @Override public String getVersion(){ return this.mszVersion; } @Override public String getSignature() { return this.mszSignature; } @Override public void release() { } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/BroadcastConsumer.java ================================================ package com.pinecone.hydra.umb.broadcast; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umb.UlfPackageMessageHandler; public interface BroadcastConsumer extends Pinenut { void close(); void start( UlfPackageMessageHandler handler ) throws UMBServiceException; boolean isClosed(); String topic(); String tag(); } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/BroadcastControlAgent.java ================================================ package com.pinecone.hydra.umb.broadcast; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umct.husky.compiler.ClassDigest; import com.pinecone.hydra.umct.husky.compiler.InterfacialCompiler; import com.pinecone.hydra.umct.husky.compiler.MethodDigest; import com.pinecone.hydra.umct.husky.machinery.MCTContextMachinery; public interface BroadcastControlAgent extends Pinenut { MCTContextMachinery getMCTTransformer(); InterfacialCompiler getInterfacialCompiler(); ClassDigest queryClassDigest( String name ); MethodDigest queryMethodDigest( String name ); void addClassDigest( ClassDigest that ); void addMethodDigest( MethodDigest that ); ClassDigest compile( Class clazz, boolean bAsIface ); BroadcastControlNode broadcastControlNode(); } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/BroadcastControlConsumer.java ================================================ package com.pinecone.hydra.umb.broadcast; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umct.UMCTExpressHandler; public interface BroadcastControlConsumer extends BroadcastControlAgent { void start() throws UMBServiceException; void start( UMCTExpressHandler handler ) throws UMBServiceException; void close(); void registerInstance( String deliverName, Object instance, Class iface ) ; void registerInstance( Object instance, Class iface ) ; void registerController( String deliverName, Object instance, Class controllerType ) ; void registerController( Object instance, Class controllerType ) ; default void registerController( Object instance ) { this.registerController( instance, instance.getClass() ); } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/BroadcastControlNode.java ================================================ package com.pinecone.hydra.umb.broadcast; import com.pinecone.hydra.umct.UMCTExpress; import com.pinecone.hydra.umct.UMCTNode; import com.pinecone.hydra.umct.husky.compiler.ClassDigest; import com.pinecone.hydra.umct.husky.compiler.InterfacialCompiler; import com.pinecone.hydra.umct.husky.compiler.MethodDigest; import com.pinecone.hydra.umct.husky.machinery.MCTContextMachinery; import com.pinecone.hydra.umct.husky.machinery.RouteDispatcher; public interface BroadcastControlNode extends UMCBroadcastNode, UMCTNode { UMCBroadcastNode getUMCBroadcastNode(); RouteDispatcher getRouteDispatcher(); MCTContextMachinery getMCTTransformer(); InterfacialCompiler getInterfacialCompiler(); ClassDigest queryClassDigest( String name ); MethodDigest queryMethodDigest( String name ); void addClassDigest( ClassDigest that ); void addMethodDigest( MethodDigest that ); ClassDigest compile( Class clazz, boolean bAsIface ); void registerInstance( String deliverName, Object instance, Class iface ) ; void registerInstance( Object instance, Class iface ) ; void registerController( String deliverName, Object instance, Class controllerType ) ; void registerController( Object instance, Class controllerType ) ; default void registerController( Object instance ) { this.registerController( instance, instance.getClass() ); } void applyMCTContextMachinery( MCTContextMachinery mctContextMachinery ) ; void applyRouteDispatcher( RouteDispatcher routeDispatcher ); UMCTExpress createUMCTExpress( String name, Class expressType ); BroadcastControlConsumer createBroadcastControlConsumer( UMCBroadcastConsumer workAgent, RouteDispatcher routeDispatcher ) ; BroadcastControlConsumer createBroadcastControlConsumer( UMCBroadcastConsumer workAgent ) ; BroadcastControlConsumer createBroadcastControlConsumer( UNT unt ) ; BroadcastControlConsumer createBroadcastControlConsumer( String topic, String ns ) ; BroadcastControlConsumer createBroadcastControlConsumer( String topic ) ; BroadcastControlProducer createBroadcastControlProducer( UMCBroadcastProducer workAgent ) ; BroadcastControlProducer createBroadcastControlProducer() ; } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/BroadcastControlProducer.java ================================================ package com.pinecone.hydra.umb.broadcast; import java.io.IOException; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umct.husky.compiler.MethodPrototype; public interface BroadcastControlProducer extends BroadcastControlAgent { void issueInform( UNT unt, String name, MethodPrototype method, Object[] args ) throws IOException ; void issueInform( String topic, String ns, String name, MethodPrototype method, Object[] args ) throws IOException ; void issueInform( String topic, MethodPrototype method, Object[] args ) throws IOException ; void issueInform( String topic, String szMethodAddress, Object... args ) throws IOException ; T getIface( Class iface, String topic, String ns, String name ); default T getIface( Class iface, String topic ){ return this.getIface( iface, topic, "", BroadcastNode.DefaultEntityName ); } void close(); void start() throws UMBServiceException; } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/BroadcastNode.java ================================================ package com.pinecone.hydra.umb.broadcast; import com.pinecone.hydra.umc.msg.MessageNodus; public interface BroadcastNode extends MessageNodus { String DefaultEntityName = "__DEFAULT__"; void close(); void register( BroadcastProducer producer ); void register( BroadcastConsumer consumer ); void deregister( BroadcastProducer producer ) ; void deregister( BroadcastConsumer consumer ) ; BroadcastProducer createProducer() ; BroadcastConsumer createConsumer( String topic, String ns ) ; BroadcastConsumer createConsumer( String topic ); BroadcastConsumer createConsumer( UNT unt ) ; } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/BroadcastPollConsumer.java ================================================ package com.pinecone.hydra.umb.broadcast; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umb.UlfPackageMessageHandler; import java.util.List; public interface BroadcastPollConsumer extends BroadcastConsumer { void close(); void start( UlfPackageMessageHandler handler ) throws UMBServiceException; List startPull(long mils ); } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/BroadcastProducer.java ================================================ package com.pinecone.hydra.umb.broadcast; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umb.UMBClientException; import com.pinecone.hydra.umb.UMBServiceException; public interface BroadcastProducer extends Pinenut { void close(); void start() throws UMBServiceException; boolean isClosed(); void sendMessage( String topic, String ns, String name, byte[] body ) throws UMBClientException ; void sendMessage( String topic, byte[] body ) throws UMBClientException ; void sendMessage( UNT unt, String name, byte[] body ) throws UMBClientException ; } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/DistributedConsumer.java ================================================ package com.pinecone.hydra.umb.broadcast; import com.pinecone.framework.system.prototype.Pinenut; public interface DistributedConsumer extends Pinenut { DistributedConsumer parentConsumer(); BroadcastConsumer mainConsumer(); String mainTopic(); String routerPath(); } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/GenericUNT.java ================================================ package com.pinecone.hydra.umb.broadcast; public class GenericUNT implements UNT { protected String mszTopic; protected String mszNamespace; protected String[] mNameSegments; public GenericUNT ( String topic, String ns, String[] segs ) { this.mszTopic = topic; this.mszNamespace = ns; this.mNameSegments = segs; } @Override public String getTopic() { return this.mszTopic; } @Override public String getNamespace() { return this.mszNamespace; } @Override public String[] getNameSegments() { return this.mNameSegments; } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/PollResult.java ================================================ package com.pinecone.hydra.umb.broadcast; import com.pinecone.framework.system.prototype.Pinenut; public interface PollResult extends Pinenut { Object getName(); Object getValue(); byte[] getBytesValue(); Object[] getArgs(); } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/PushConsumer.java ================================================ package com.pinecone.hydra.umb.broadcast; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umb.UlfPackageMessageHandler; public interface PushConsumer extends BroadcastConsumer { void start( UlfPackageMessageHandler handler ) throws UMBServiceException ; } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/UMCBroadcastConsumer.java ================================================ package com.pinecone.hydra.umb.broadcast; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umct.UMCTExpressHandler; public interface UMCBroadcastConsumer extends BroadcastConsumer { void start( UMCTExpressHandler handler ) throws UMBServiceException ; } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/UMCBroadcastNode.java ================================================ package com.pinecone.hydra.umb.broadcast; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; public interface UMCBroadcastNode extends BroadcastNode { ExtraHeadCoder getExtraHeadCoder(); UMCBroadcastProducer createUlfProducer() ; UMCBroadcastConsumer createUlfConsumer( String topic, String ns ) ; UMCBroadcastConsumer createUlfConsumer( String topic ) ; UMCBroadcastConsumer createUlfConsumer( UNT unt ) ; } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/UMCBroadcastProducer.java ================================================ package com.pinecone.hydra.umb.broadcast; import com.pinecone.hydra.umb.UMBClientException; import com.pinecone.hydra.umc.msg.UMCMessage; public interface UMCBroadcastProducer extends BroadcastProducer { void sendMessage( String topic, String ns, String name, UMCMessage message ) throws UMBClientException ; void sendMessage( String topic, UMCMessage message ) throws UMBClientException ; void sendMessage( UNT unt, String name, UMCMessage message ) throws UMBClientException ; } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/UNT.java ================================================ package com.pinecone.hydra.umb.broadcast; import com.pinecone.framework.system.prototype.Pinenut; /** * Pinecone Ursus For Java [ Uniform Namespaced Topic ] * Author: Harald.E (Dragon King), Ken * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Topic + Namespace * ***************************************************************************************** */ public interface UNT extends Pinenut { String getTopic(); String getNamespace(); String[] getNameSegments(); } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/converter/GenericResultBytesConverter.java ================================================ package com.pinecone.hydra.umb.broadcast.converter; import com.pinecone.framework.util.Bytes; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; public class GenericResultBytesConverter implements ResultBytesConverter { @Override public byte[] convert( V value ) { if ( value == null ) { return Bytes.Empty; } else if ( value instanceof byte[] ) { return (byte[]) value; } else if ( value instanceof String ) { return ( (String) value ).getBytes(); } try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); ObjectOutputStream objectOutputStream = new ObjectOutputStream(byteArrayOutputStream)) { objectOutputStream.writeObject(value); objectOutputStream.flush(); return byteArrayOutputStream.toByteArray(); } catch ( IOException e ) { return Bytes.Empty; } } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/converter/ResultBytesConverter.java ================================================ package com.pinecone.hydra.umb.broadcast.converter; import com.pinecone.framework.system.prototype.Pinenut; public interface ResultBytesConverter extends Pinenut { byte[] convert( V value ); } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/proxy/GenericIfaceProxyFactory.java ================================================ package com.pinecone.hydra.umb.broadcast.proxy; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.umb.broadcast.BroadcastControlProducer; import com.pinecone.hydra.umct.husky.compiler.MethodPrototype; import com.pinecone.hydra.umct.proxy.UMCTHub; import com.pinecone.hydra.umct.husky.compiler.ClassDigest; import com.pinecone.hydra.umct.husky.compiler.DynamicMethodPrototype; import com.pinecone.hydra.umct.stereotype.IfaceUtils; import org.springframework.cglib.proxy.Enhancer; import org.springframework.cglib.proxy.MethodInterceptor; import org.springframework.cglib.proxy.MethodProxy; import java.lang.reflect.Method; import java.util.concurrent.ConcurrentHashMap; public class GenericIfaceProxyFactory implements IfaceProxyFactory { protected final ConcurrentHashMap, Enhancer> mEnhancerCache = new ConcurrentHashMap<>(); protected BroadcastControlProducer mProducer; public GenericIfaceProxyFactory( BroadcastControlProducer producer ) { this.mProducer = producer; } @Override public T createProxy( BroadcastControlProducer producer, ClassDigest classDigest, Class iface, String topic, String ns, String name ) { // if (!iface.isInterface()) { // throw new IllegalArgumentException("The provided class must be an interface."); // } Enhancer enhancer = this.mEnhancerCache.computeIfAbsent(iface, clazz -> { Enhancer e = new Enhancer(); e.setSuperclass(UMCTHub.class); e.setInterfaces( new Class[]{iface} ); e.setCallback(new MethodInterceptor() { @Override public Object intercept( Object obj, Method method, Object[] args, MethodProxy proxy ) throws Throwable { String methodName = IfaceUtils.getIfaceMethodName( method ); MethodPrototype methodPrototype = (DynamicMethodPrototype) producer.queryMethodDigest( classDigest.getClassName() + Namespace.DEFAULT_SEPARATOR + methodName ); producer.issueInform( topic, ns, name, methodPrototype, args ); return null; } }); return e; }); return iface.cast( enhancer.create() ); } @Override public T createProxy( BroadcastControlProducer producer, Class iface, String topic, String ns, String name ) { ClassDigest classDigest = producer.queryClassDigest( IfaceUtils.queryIfaceClassNameAddress( iface ) ); return this.createProxy( producer, classDigest, iface, topic, ns, name ); } @Override public T createProxy( Class iface, String topic, String ns, String name ) { return this.createProxy( this.mProducer, iface, topic, ns, name ); } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/proxy/IfaceProxyFactory.java ================================================ package com.pinecone.hydra.umb.broadcast.proxy; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umb.broadcast.BroadcastControlProducer; import com.pinecone.hydra.umct.husky.compiler.ClassDigest; public interface IfaceProxyFactory extends Pinenut { T createProxy( BroadcastControlProducer producer, ClassDigest classDigest, Class iface, String topic, String ns, String name ) ; T createProxy( BroadcastControlProducer producer, Class iface, String topic, String ns, String name ) ; T createProxy( Class iface, String topic, String ns, String name ); } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KBroadcastPollConsumer.java ================================================ package com.pinecone.hydra.umb.kafka; import com.pinecone.hydra.umb.broadcast.BroadcastPollConsumer; public interface KBroadcastPollConsumer extends BroadcastPollConsumer { } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KBroadcastProducer.java ================================================ package com.pinecone.hydra.umb.kafka; import com.pinecone.hydra.umb.UMBClientException; import com.pinecone.hydra.umb.broadcast.BroadcastProducer; public interface KBroadcastProducer extends BroadcastProducer { void sendPrototypeMessage( String topic, String ns, K name, V body ) throws UMBClientException ; } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KClient.java ================================================ package com.pinecone.hydra.umb.kafka; import com.pinecone.hydra.umb.broadcast.BroadcastConsumer; import com.pinecone.hydra.umb.broadcast.BroadcastNode; import com.pinecone.hydra.umb.broadcast.BroadcastProducer; import com.pinecone.hydra.umb.broadcast.converter.ResultBytesConverter; import com.pinecone.hydra.umc.msg.handler.ErrorMessageAudit; import java.util.Collection; import java.util.Properties; public interface KClient extends BroadcastNode { @Override default ErrorMessageAudit getErrorMessageAudit() { return null; } @Override default void setErrorMessageAudit( ErrorMessageAudit audit ){ } KConfig getKafkaConfig(); KBroadcastProducer createPrototypeProducer( Properties properties ) ; default KBroadcastProducer createProducer( Properties properties ) { return this.createPrototypeProducer( properties ); } KBroadcastPollConsumer createPrototypeConsumer( String topic, String ns, Properties properties ) ; default KBroadcastPollConsumer createConsumer( String topic, String ns, Properties properties ) { return this.createPrototypeConsumer( topic, ns, properties ); } ResultBytesConverter getDafaultResultBytesConverter(); Collection viewProducerRegister(); Collection viewConsumerRegister(); } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KConfig.java ================================================ package com.pinecone.hydra.umb.kafka; import com.pinecone.hydra.umc.msg.MsgNodeConfig; public interface KConfig extends MsgNodeConfig { String getMszServer(); String getMszAutoOffsetReset(); long getMnDefaultPollHandleMillis(); @Override default long getSyncWaitingMillis() { return this.getMnDefaultPollHandleMillis(); } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KafkaClient.java ================================================ package com.pinecone.hydra.umb.kafka; import com.pinecone.hydra.umb.broadcast.BroadcastConsumer; import com.pinecone.hydra.umb.broadcast.BroadcastProducer; import com.pinecone.hydra.umb.broadcast.UNT; import com.pinecone.hydra.umb.broadcast.converter.GenericResultBytesConverter; import com.pinecone.hydra.umb.broadcast.converter.ResultBytesConverter; import com.pinecone.hydra.umc.msg.Messagus; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.Properties; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; public class KafkaClient implements KClient { protected Map producerRegister; protected Map consumerRegister; private static final Object PRESENT = new Object(); protected KConfig kafkaConfig; protected long nodeId; protected ExecutorService pollConsumerThreadPool; protected ResultBytesConverter resultBytesConverter; public KafkaClient( long nodeId, KConfig config ) { this.kafkaConfig = config; this.producerRegister = new ConcurrentHashMap<>(); this.consumerRegister = new ConcurrentHashMap<>(); this.nodeId = nodeId; this.resultBytesConverter = new GenericResultBytesConverter<>(); this.pollConsumerThreadPool = Executors.newCachedThreadPool(); } public KafkaClient( long nodeId, String server ) { this( nodeId, new KafkaConfig( server ) ); } public KafkaClient( String server ) { this( Messagus.nextLocalId(), server ); } public KafkaClient( Map config ){ this( Messagus.nextLocalId(), new KafkaConfig( config ) ); } @Override public void close() { for( Map.Entry kv : this.consumerRegister.entrySet() ) { kv.getKey().close(); } for( Map.Entry kv : this.producerRegister.entrySet() ) { kv.getKey().close(); } this.consumerRegister.clear(); this.producerRegister.clear(); this.pollConsumerThreadPool.shutdown(); } @Override public void register( BroadcastProducer producer ) { this.producerRegister.put( producer, PRESENT ); } @Override public void register( BroadcastConsumer consumer ) { this.consumerRegister.put( consumer, PRESENT ); } @Override public void deregister( BroadcastProducer producer ) { this.producerRegister.remove( producer ); } @Override public void deregister( BroadcastConsumer consumer ) { this.consumerRegister.remove( consumer ); } @Override public BroadcastProducer createProducer() { UlfBroadcastProducer ulfBroadcastProducer = new UlfBroadcastProducer<>(this); this.register(ulfBroadcastProducer); return ulfBroadcastProducer; } @Override public BroadcastConsumer createConsumer( String topic, String ns ) { UlfBroadcastPollConsumer kafkaBroadcastConsumer = new UlfBroadcastPollConsumer<>(this, topic, ns); this.register(kafkaBroadcastConsumer); return kafkaBroadcastConsumer; } @Override public KBroadcastProducer createPrototypeProducer( Properties properties ) { UlfBroadcastProducer ulfBroadcastProducer = new UlfBroadcastProducer<> ( this, properties ); this.register(ulfBroadcastProducer); return ulfBroadcastProducer; } @Override public KBroadcastPollConsumer createPrototypeConsumer( String topic, String ns, Properties properties ) { UlfBroadcastPollConsumer kafkaBroadcastConsumer = new UlfBroadcastPollConsumer<>( this, topic, ns, properties ); this.register(kafkaBroadcastConsumer); return kafkaBroadcastConsumer; } @Override public BroadcastConsumer createConsumer( String topic ) { return this.createConsumer(topic, ""); } @Override public BroadcastConsumer createConsumer( UNT unt ) { return this.createConsumer( unt.getTopic(), unt.getNamespace() ); } @Override public KConfig getKafkaConfig() { return this.kafkaConfig; } @Override public KConfig getMessageNodeConfig() { return this.getKafkaConfig(); } @Override public ResultBytesConverter getDafaultResultBytesConverter() { return this.resultBytesConverter; } @Override public long getMessageNodeId() { return this.nodeId; } @Override public ExtraHeadCoder getExtraHeadCoder() { return null; } protected ExecutorService getPollConsumerThreadPool() { return this.pollConsumerThreadPool; } @Override public Collection viewConsumerRegister() { return Collections.unmodifiableSet(this.consumerRegister.keySet()); } @Override public Collection viewProducerRegister() { return Collections.unmodifiableSet(this.producerRegister.keySet()); } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KafkaConfig.java ================================================ package com.pinecone.hydra.umb.kafka; import java.util.Map; public class KafkaConfig implements KConfig { protected String mszServer; protected String mszAutoOffsetReset; protected long mnDefaultPollHandleMillis; public KafkaConfig ( Map conf ) { this.mszServer = (String) conf.get( "server" ); this.mszAutoOffsetReset = (String) conf.getOrDefault( "AutoOffsetReset", KafkaConstants.DefaultAutoOffsetReset ); this.mnDefaultPollHandleMillis = ( (Number)conf.getOrDefault( "DefaultPollHandleMillis", KafkaConstants.DefaultPollHandleMillis ) ).longValue(); } public KafkaConfig( String szServer, String szAutoOffsetReset, long nDefaultPollHandleMillis ){ this.mszServer = szServer; this.mszAutoOffsetReset = szAutoOffsetReset; this.mnDefaultPollHandleMillis = nDefaultPollHandleMillis; } public KafkaConfig( String szServer ){ this( szServer, KafkaConstants.DefaultAutoOffsetReset, KafkaConstants.DefaultPollHandleMillis ); } @Override public String getMszServer() { return this.mszServer; } @Override public String getMszAutoOffsetReset() { return this.mszAutoOffsetReset; } @Override public long getMnDefaultPollHandleMillis() { return this.mnDefaultPollHandleMillis; } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KafkaConstants.java ================================================ package com.pinecone.hydra.umb.kafka; public final class KafkaConstants { public static final String DefaultAutoOffsetReset = "earliest"; public static final Long DefaultPollHandleMillis = 100L; } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KafkaMedium.java ================================================ package com.pinecone.hydra.umb.kafka; import com.pinecone.framework.system.NotImplementedException; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.MessageNodus; import java.io.InputStream; import java.io.OutputStream; public class KafkaMedium implements Medium { protected MessageNodus mMessageNode; public KafkaMedium( MessageNodus medium ){ this.mMessageNode = medium; } @Override public Object getNativeMessageSource() { throw new NotImplementedException(); } @Override public OutputStream getOutputStream() { throw new NotImplementedException(); } @Override public InputStream getInputStream() { throw new NotImplementedException(); } @Override public String sourceName() { return "Kafka"; } @Override public void release() { } @Override public MessageNodus getMessageNode() { return this.mMessageNode; } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KafkaPollResult.java ================================================ package com.pinecone.hydra.umb.kafka; import com.pinecone.hydra.umb.broadcast.PollResult; public class KafkaPollResult implements PollResult { protected Object name; protected Object value; protected byte[] bytesValue; protected Object[] args; public KafkaPollResult( Object name, Object value, byte[] bytesValue, Object[] args ){ this( name,value,bytesValue ); this.args = args; } public KafkaPollResult( Object name, Object value, byte[] bytesValue ){ this.name = name; this.value = value; this.bytesValue = bytesValue; } @Override public Object getName() { return this.name; } @Override public Object getValue() { return null; } @Override public byte[] getBytesValue() { return new byte[0]; } @Override public Object[] getArgs() { return new Object[0]; } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KafkaReceiver.java ================================================ package com.pinecone.hydra.umb.kafka; import com.pinecone.hydra.umb.broadcast.ArchUnidirectionalMCProtocol; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.UMCReceiver; import java.io.IOException; public class KafkaReceiver extends ArchUnidirectionalMCProtocol implements UMCReceiver { public KafkaReceiver(Medium messageSource){ super(messageSource); } @Override public Object readInformMsg() throws IOException { return null; } @Override public UMCMessage readTransferMsg() throws IOException { return null; } @Override public UMCMessage readTransferMsgBytes() throws IOException { return null; } @Override public UMCMessage readMsg() throws IOException { return null; } @Override public UMCMessage readMsgBytes() throws IOException { return null; } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KafkaTransmit.java ================================================ package com.pinecone.hydra.umb.kafka; import com.pinecone.hydra.umb.broadcast.ArchUnidirectionalMCProtocol; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.Status; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.UMCTransmit; import java.io.IOException; import java.io.InputStream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class KafkaTransmit extends ArchUnidirectionalMCProtocol implements UMCTransmit { protected Logger logger = LoggerFactory.getLogger( this.getClass() ); public KafkaTransmit( Medium messageSource ){ super(messageSource); } @Override public void sendInformMsg( Object msg ) throws IOException { } @Override public void sendInformMsg( Object msg, Status status ) throws IOException { if ( status != Status.OK ) { this.logger.warn( "IllegalTransmitResponse for broadcast message nodes. what => {}, {}", msg, status ); } } @Override public void sendTransferMsg( Object msg, byte[] bytes ) throws IOException { } @Override public void sendTransferMsg( Object msg, byte[] bytes, Status status ) throws IOException { if ( status != Status.OK ) { this.logger.warn( "IllegalTransmitResponse for broadcast message nodes. what => {}, {}", msg, status ); } } @Override public void sendTransferMsg( Object msg, InputStream is ) throws IOException { } @Override public void sendMsg( UMCMessage msg, boolean bNoneBuffered ) throws IOException { if ( msg.getHead().getStatus() != Status.OK ) { this.logger.warn( "IllegalTransmitResponse for broadcast message nodes. what => {}", msg ); } } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/UlfBroadcastPollConsumer.java ================================================ package com.pinecone.hydra.umb.kafka; import com.pinecone.framework.system.IrrationalProvokedException; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umb.UlfPackageMessageHandler; import com.pinecone.hydra.umb.broadcast.PollResult; import com.pinecone.hydra.umb.broadcast.converter.ResultBytesConverter; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.common.serialization.ByteArrayDeserializer; import org.apache.kafka.common.serialization.StringDeserializer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.time.Duration; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Properties; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicBoolean; public class UlfBroadcastPollConsumer implements KBroadcastPollConsumer { protected static Properties newDefaultProperties( KConfig kafkaConfig, String group ) { Properties properties = new Properties(); properties.put( "bootstrap.servers", kafkaConfig.getMszServer() ); properties.put( "group.id", group ); properties.put( "key.deserializer", StringDeserializer.class.getName() ); properties.put( "value.deserializer", ByteArrayDeserializer.class.getName() ); properties.put( "auto.offset.reset", kafkaConfig.getMszAutoOffsetReset() ); return properties; } protected KClient kafkaClient; protected Properties properties; protected String topic; protected String group; protected KafkaConsumer wrappedConsumer; protected AtomicBoolean pollConsumerCloseSignal; protected ResultBytesConverter resultBytesConverter; protected ExecutorService pollConsumerThreadPool; protected Thread privatePollConsumerThread; protected Logger log = LoggerFactory.getLogger( this.getClass() ); public UlfBroadcastPollConsumer( KClient kafkaClient, String topic, String group, Properties properties, ResultBytesConverter resultBytesConverter ){ this.kafkaClient = kafkaClient; this.properties = properties; this.topic = topic; this.group = group; this.pollConsumerCloseSignal = new AtomicBoolean( false ); this.resultBytesConverter = resultBytesConverter; try { this.pollConsumerThreadPool = ((KafkaClient)this.getKafkaClient()).getPollConsumerThreadPool(); } catch ( ClassCastException ignore ) { // Ignore them. } } @SuppressWarnings( "unchecked" ) public UlfBroadcastPollConsumer( KClient kafkaClient, String topic, String group, Properties properties ){ this( kafkaClient, topic, group, properties, (ResultBytesConverter) kafkaClient.getDafaultResultBytesConverter() ); } public UlfBroadcastPollConsumer( KClient kafkaClient, String topic, String group ){ this( kafkaClient, topic, group, UlfBroadcastPollConsumer.newDefaultProperties( kafkaClient.getKafkaConfig(), group ) ); } @Override public void close() { if ( this.wrappedConsumer != null ) { this.wrappedConsumer.close(); this.kafkaClient.deregister( this ); this.wrappedConsumer = null; if ( this.pollConsumerThreadPool != null ) { this.pollConsumerCloseSignal.compareAndSet( false, true ); } } } @Override public void start( UlfPackageMessageHandler handler ) throws UMBServiceException { try { this.close(); this.wrappedConsumer = this.newBytesConsumer( handler ); } catch ( Exception e ) { throw new UMBServiceException( e ); } } @Override public boolean isClosed() { return this.wrappedConsumer == null; } @Override public String topic() { return this.topic; } @Override public String tag() { return this.group; } @Override public List startPull(long mils ) { this.close(); KafkaConsumer kafkaConsumer = new KafkaConsumer<>( this.properties ); kafkaConsumer.subscribe(Collections.singletonList( this.topic ) ); ConsumerRecords records = kafkaConsumer.poll( Duration.ofMillis( mils ) ); ArrayList pollResults = new ArrayList<>(); for ( ConsumerRecord record : records ) { KafkaPollResult kafkaPollResult = new KafkaPollResult( record.key(), record.value(), this.resultBytesConverter.convert(record.value()), new Object[] {record.headers(),record.topic(),record.offset()} ); pollResults.add(kafkaPollResult); } return pollResults; } protected KafkaConsumer newBytesConsumer( UlfPackageMessageHandler handler ) { KafkaConsumer kafkaConsumer = new KafkaConsumer<>(this.properties); kafkaConsumer.subscribe(Collections.singletonList( this.topic ) ); long pollMills = this.kafkaClient.getKafkaConfig().getMnDefaultPollHandleMillis(); Runnable runnable = new Runnable() { @Override public void run() { while ( true ) { ConsumerRecords records = kafkaConsumer.poll( Duration.ofMillis( pollMills ) ); for ( ConsumerRecord record : records ) { try { handler.onSuccessfulMsgReceived( UlfBroadcastPollConsumer.this.resultBytesConverter.convert(record.value()), new Object[] {record.key(), record.headers()} ); } catch ( Exception e ) { log.warn( "Warning, unexpected proceeding Kafka consumer messages, what => '{}'", e.getMessage(), e ); //throw new IrrationalProvokedException( e ); // It will kill the kafka loop thread. } } if ( UlfBroadcastPollConsumer.this.pollConsumerCloseSignal.get() ) { break; } } } }; if ( this.pollConsumerThreadPool != null ) { this.pollConsumerThreadPool.execute( runnable ); } else { this.privatePollConsumerThread = new Thread(runnable); this.privatePollConsumerThread.start(); } return kafkaConsumer; } public KClient getKafkaClient(){ return this.kafkaClient; } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/UlfBroadcastProducer.java ================================================ package com.pinecone.hydra.umb.kafka; import com.pinecone.hydra.umb.UMBClientException; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umb.broadcast.BroadcastNode; import com.pinecone.hydra.umb.broadcast.UNT; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.serialization.ByteArraySerializer; import org.apache.kafka.common.serialization.StringSerializer; import java.util.Properties; public class UlfBroadcastProducer implements KBroadcastProducer { protected static Properties newDefaultProperties( KConfig kafkaConfig ) { Properties properties = new Properties(); properties.put( "bootstrap.servers", kafkaConfig.getMszServer() ); properties.put( "key.serializer", StringSerializer.class.getName() ); properties.put( "value.serializer", ByteArraySerializer.class.getName() ); return properties; } protected String server; protected KClient kafkaClient; protected KafkaProducer kafkaProducer; protected Properties properties; public UlfBroadcastProducer( KClient kafkaClient, Properties properties ){ this.kafkaClient = kafkaClient; KConfig kafkaConfig = kafkaClient.getKafkaConfig(); this.server = kafkaConfig.getMszServer(); this.properties = properties; } public UlfBroadcastProducer( KClient kafkaClient ){ this( kafkaClient, UlfBroadcastProducer.newDefaultProperties( kafkaClient.getKafkaConfig() ) ); } public KClient getKafkaClient(){ return this.kafkaClient; } @Override public void close() { if ( this.kafkaProducer != null ) { this.kafkaProducer.close(); this.kafkaClient.deregister( this ); this.kafkaProducer = null; } } @Override public void start() throws UMBServiceException { this.close(); this.kafkaProducer = new KafkaProducer<>( this.properties ); } @Override public boolean isClosed() { return this.kafkaProducer == null; } @Override public void sendPrototypeMessage( String topic, String ns, K name, V body ) throws UMBClientException { ProducerRecord producerRecord = new ProducerRecord<>( topic, name, body ); this.kafkaProducer.send( producerRecord ); } @SuppressWarnings( "unchecked" ) @Override public void sendMessage( String topic, String ns, String name, byte[] body ) throws UMBClientException { ProducerRecord producerRecord = new ProducerRecord<>( topic, name, body ); this.kafkaProducer.send( (ProducerRecord) producerRecord ); } @Override public void sendMessage( String topic, byte[] body ) throws UMBClientException { this.sendMessage(topic, "", BroadcastNode.DefaultEntityName,body); } @Override public void sendMessage( UNT unt, String name, byte[] body ) throws UMBClientException { this.sendMessage( unt.getTopic(), unt.getNamespace(), name, body ); } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/UlfKafkaClient.java ================================================ package com.pinecone.hydra.umb.kafka; import com.pinecone.hydra.umb.broadcast.UMCBroadcastNode; import com.pinecone.hydra.umb.broadcast.UMCBroadcastProducer; public interface UlfKafkaClient extends KClient, UMCBroadcastNode { UMCBroadcastProducer createUlfProducer() ; } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/WolfKafkaConsumer.java ================================================ package com.pinecone.hydra.umb.kafka; import com.pinecone.framework.system.Nullable; import com.pinecone.hydra.umb.UMBBytesDecoder; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umb.UlfMBInformMessage; import com.pinecone.hydra.umb.UlfPackageMessageHandler; import com.pinecone.hydra.umb.broadcast.UMCBroadcastConsumer; import com.pinecone.hydra.umc.msg.EMCBytesDecoder; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCHead; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.UMCMethod; import com.pinecone.hydra.umc.msg.UMCReceiver; import com.pinecone.hydra.umc.msg.UMCTransmit; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; import com.pinecone.hydra.umc.wolf.UlfBytesTransferMessage; import com.pinecone.hydra.umct.UMCTExpressHandler; import java.io.IOException; public class WolfKafkaConsumer extends UlfBroadcastPollConsumer implements UMCBroadcastConsumer { protected EMCBytesDecoder mEMCBytesDecoder; protected ExtraHeadCoder mExtraHeadCoder; protected Medium mMedium; protected UMCTransmit mUMCTransmit; protected UMCReceiver mUMCReceiver; public WolfKafkaConsumer( KClient client, String topic, String group, @Nullable ExtraHeadCoder extraHeadCoder ) { super( client,topic,group ); this.mExtraHeadCoder = extraHeadCoder; if ( this.mExtraHeadCoder == null ) { this.mExtraHeadCoder = client.getExtraHeadCoder(); } this.mEMCBytesDecoder = new UMBBytesDecoder(); // Dummy [ MQ is base on unidirectional communication. ] this.mMedium = new KafkaMedium( this.getKafkaClient() ); this.mUMCReceiver = new KafkaReceiver( this.mMedium ); this.mUMCTransmit = new KafkaTransmit( this.mMedium ); } public WolfKafkaConsumer( KClient client, String topic, String group ) { this( client, topic, group, null ); } protected UMCMessage decodeMessage( byte[] raw ) throws IOException { UMCHead head = WolfKafkaConsumer.this.mEMCBytesDecoder.decodeIntegrated( raw, WolfKafkaConsumer.this.mExtraHeadCoder ); if ( head.getMethod() == UMCMethod.TRANSFER ) { int bodyLen = (int)head.getBodyLength(); byte[] bodyBuf = new byte[ bodyLen ]; int headSize = head.sizeof() + head.getExtraHeadLength(); System.arraycopy( raw, headSize, bodyBuf, 0, bodyLen ); return new UlfBytesTransferMessage( head, bodyBuf ); } return new UlfMBInformMessage( head ); } @Override public void start( UMCTExpressHandler handler ) throws UMBServiceException { super.start(new UlfPackageMessageHandler() { @Override public void onSuccessfulMsgReceived( byte[] raw, Object[] args ) throws Exception { UMCMessage message = WolfKafkaConsumer.this.decodeMessage( raw ); handler.onSuccessfulMsgReceived( WolfKafkaConsumer.this.mMedium, WolfKafkaConsumer.this.mUMCTransmit, WolfKafkaConsumer.this.mUMCReceiver, message, args ); } @Override public void onErrorMsgReceived( byte[] raw, Object[] args ) throws Exception { UMCMessage message = WolfKafkaConsumer.this.decodeMessage( raw ); handler.onErrorMsgReceived( WolfKafkaConsumer.this.mMedium, WolfKafkaConsumer.this.mUMCTransmit, WolfKafkaConsumer.this.mUMCReceiver, message, args ); } @Override public void onError( Object data, Throwable cause ) { handler.onError( data, cause ); } }); } @Override public UlfKafkaClient getKafkaClient() { return (UlfKafkaClient) this.kafkaClient; } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/WolfKafkaProducer.java ================================================ package com.pinecone.hydra.umb.kafka; import com.pinecone.framework.system.Nullable; import com.pinecone.hydra.umb.UMBClientException; import com.pinecone.hydra.umb.UMCPackageMessageEncoder; import com.pinecone.hydra.umb.UlfPackageMessageEncoder; import com.pinecone.hydra.umb.broadcast.BroadcastNode; import com.pinecone.hydra.umb.broadcast.UMCBroadcastProducer; import com.pinecone.hydra.umb.broadcast.UNT; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; import java.io.IOException; public class WolfKafkaProducer extends UlfBroadcastProducer implements UMCBroadcastProducer { protected UMCPackageMessageEncoder mUMCPackageMessageEncoder; protected ExtraHeadCoder mExtraHeadCoder; public WolfKafkaProducer( UlfKafkaClient client, @Nullable ExtraHeadCoder extraHeadCoder ){ super( client ); this.mExtraHeadCoder = extraHeadCoder; if ( this.mExtraHeadCoder == null ) { this.mExtraHeadCoder = client.getExtraHeadCoder(); } this.mUMCPackageMessageEncoder = new UlfPackageMessageEncoder( this.mExtraHeadCoder ); } public WolfKafkaProducer( UlfKafkaClient client ){ this(client,null); } @Override public UlfKafkaClient getKafkaClient() { return (UlfKafkaClient) this.kafkaClient; } @Override public void sendMessage( String topic, String ns, String name, UMCMessage message ) throws UMBClientException { try{ this.sendMessage( topic, ns, name, this.mUMCPackageMessageEncoder.encode( message ) ); } catch ( IOException e ) { throw new UMBClientException( e ); } } @Override public void sendMessage( String topic, UMCMessage message ) throws UMBClientException { this.sendMessage( topic, "", BroadcastNode.DefaultEntityName, message ); } @Override public void sendMessage( UNT unt, String name, UMCMessage message ) throws UMBClientException { this.sendMessage( unt.getTopic(), unt.getNamespace(), name, message ); } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/WolfMCKafkaClient.java ================================================ package com.pinecone.hydra.umb.kafka; import com.pinecone.hydra.umb.broadcast.UMCBroadcastConsumer; import com.pinecone.hydra.umb.broadcast.UMCBroadcastProducer; import com.pinecone.hydra.umb.broadcast.UNT; import com.pinecone.hydra.umc.msg.Messagus; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; import com.pinecone.hydra.umc.msg.extra.GenericExtraHeadCoder; import com.pinecone.hydra.umc.msg.handler.ErrorMessageAudit; import com.pinecone.hydra.umc.msg.handler.GenericErrorMessageAudit; import java.util.Map; public class WolfMCKafkaClient extends KafkaClient implements UlfKafkaClient{ protected ExtraHeadCoder mExtraHeadCoder; protected ErrorMessageAudit mErrorMessageAudit; public WolfMCKafkaClient( long nodeId, KafkaConfig config, ExtraHeadCoder extraHeadCoder ) { super( nodeId, config ); this.mExtraHeadCoder = extraHeadCoder; this.mErrorMessageAudit = new GenericErrorMessageAudit( this ); } public WolfMCKafkaClient( long nodeId, String nameSrvAddr, ExtraHeadCoder extraHeadCoder ) { this( nodeId, new KafkaConfig( nameSrvAddr ), extraHeadCoder ); } public WolfMCKafkaClient( String nameSrvAddr ) { this( Messagus.nextLocalId(), nameSrvAddr, new GenericExtraHeadCoder() ); } public WolfMCKafkaClient( long nodeId, Map config, ExtraHeadCoder extraHeadCoder ){ this( nodeId, new KafkaConfig( config ), extraHeadCoder ); } public WolfMCKafkaClient( Map config, ExtraHeadCoder extraHeadCoder ){ this( Messagus.nextLocalId(), config, extraHeadCoder ); } @Override public ErrorMessageAudit getErrorMessageAudit() { return this.mErrorMessageAudit; } @Override public void setErrorMessageAudit( ErrorMessageAudit audit ){ this.mErrorMessageAudit = audit; } @Override public ExtraHeadCoder getExtraHeadCoder() { return this.mExtraHeadCoder; } @Override public UMCBroadcastConsumer createUlfConsumer( String topic, String ns ) { WolfKafkaConsumer consumer = new WolfKafkaConsumer( this,topic,ns ); this.register( consumer ); return consumer; } @Override public UMCBroadcastConsumer createUlfConsumer( String topic ) { return this.createUlfConsumer( topic,"" ); } @Override public UMCBroadcastConsumer createUlfConsumer( UNT unt ) { return this.createUlfConsumer( unt.getTopic(), unt.getNamespace() ); } @Override public UMCBroadcastProducer createUlfProducer() { WolfKafkaProducer wolfKafkaProducer = new WolfKafkaProducer(this, this.mExtraHeadCoder); this.register( wolfKafkaProducer ); return wolfKafkaProducer; } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rabbit/RabbitMQClient.java ================================================ package com.pinecone.hydra.umb.rabbit; import com.pinecone.hydra.system.Hydrogen; import com.rabbitmq.client.Channel; import com.rabbitmq.client.Connection; import com.rabbitmq.client.ConnectionFactory; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.json.JSONObject; import java.io.IOException; import java.util.concurrent.TimeoutException; public class RabbitMQClient implements Pinenut { protected Hydrogen mSystem; protected JSONObject mjoSectionConf; protected String mszHost; protected short mnPort; protected int mnKeepAliveTimeout; protected int mnSocketTimeout; protected String mszUsername; protected String mszPassword; protected String mszVHost = "/wolf"; public RabbitMQClient(Hydrogen system, JSONObject joConf ) { this.mSystem = system; this.mjoSectionConf = joConf; this.apply( joConf ); } public RabbitMQClient apply( JSONObject joConf ) { this.mjoSectionConf = joConf; this.mszHost = this.mjoSectionConf.optString( "host" ); this.mnPort = (short) this.mjoSectionConf.optInt( "port" ); this.mszPassword = this.mjoSectionConf.optString( "password" ); this.mszUsername = this.mjoSectionConf.optString( "username" ); return this; } public void toListen() throws IOException, TimeoutException { ConnectionFactory connectionFactory = new ConnectionFactory(); connectionFactory.setHost( this.mszHost ); connectionFactory.setPort( this.mnPort ); connectionFactory.setUsername( this.mszUsername ); connectionFactory.setPassword( this.mszPassword ); connectionFactory.setVirtualHost( this.mszVHost ); Connection connection = connectionFactory.newConnection(); Channel channel = connection.createChannel(); channel.queueDeclare("fuck", false, false, false, null); for ( int i = 0; i < 1e3; i++ ) { channel.basicPublish("", "fuck", null, "Jesus fucking christ".getBytes()); } channel.close(); connection.close(); } @Override public String toString() { return String.format( "[object %s(0x%s)<\uD83D\uDC07>]", this.className() , Integer.toHexString( this.hashCode() ) ); } @Override public String toJSONString() { return "\"" + this.toString() + "\""; } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rabbit/RabbitMedium.java ================================================ package com.pinecone.hydra.umb.rabbit; public class RabbitMedium { } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/ArchMQConsumer.java ================================================ package com.pinecone.hydra.umb.rocket; import com.pinecone.hydra.umb.broadcast.PushConsumer; public abstract class ArchMQConsumer implements PushConsumer { protected String mszNameServerAddr; protected String mszGroupName; protected String mszTopic; protected String mszTag; public ArchMQConsumer( String nameSrvAddr, String groupName, String topic, String tag ) { this.mszNameServerAddr = nameSrvAddr; this.mszGroupName = groupName; this.mszTopic = topic; this.mszTag = tag; } @Override public String topic() { return this.mszTopic; } @Override public String tag() { return this.mszTag; } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/RocketClient.java ================================================ package com.pinecone.hydra.umb.rocket; import java.util.function.Supplier; import org.apache.rocketmq.client.producer.DefaultMQProducer; import com.pinecone.hydra.umb.broadcast.BroadcastNode; import com.pinecone.hydra.umb.broadcast.BroadcastProducer; import com.pinecone.hydra.umc.msg.handler.ErrorMessageAudit; public interface RocketClient extends BroadcastNode { @Override default ErrorMessageAudit getErrorMessageAudit() { return null; } @Override default void setErrorMessageAudit( ErrorMessageAudit audit ){ } RocketConfig getRocketConfig(); BroadcastProducer createProducer(Supplier producerSupplier ); } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/RocketConfig.java ================================================ package com.pinecone.hydra.umb.rocket; import com.pinecone.hydra.umc.msg.MsgNodeConfig; public interface RocketConfig extends MsgNodeConfig { String getNameServerAddr(); String getGroupName(); int getMaxMessageSize(); int getSendMsgTimeout(); int getRetryTimesWhenSendFailed(); @Override default long getSyncWaitingMillis() { return this.getSendMsgTimeout(); } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/RocketConstants.java ================================================ package com.pinecone.hydra.umb.rocket; public final class RocketConstants { public static Integer DefaultMaxMessageSize = 4096; public static Integer DefaultSendMsgTimeout = 8000; public static Integer DefaultRetryTimesWhenSendFailed = 2; } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/RocketMQClient.java ================================================ package com.pinecone.hydra.umb.rocket; import com.pinecone.hydra.umb.broadcast.BroadcastConsumer; import com.pinecone.hydra.umb.broadcast.BroadcastProducer; import com.pinecone.hydra.umb.broadcast.UNT; import com.pinecone.hydra.umc.msg.Messagus; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; import org.apache.rocketmq.client.producer.DefaultMQProducer; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.function.Supplier; public class RocketMQClient implements RocketClient { protected Map mProducerRegister; protected Map mConsumerRegister; protected RocketConfig mRocketConfig; protected long mnNodeId; private static final Object PRESENT = new Object(); public RocketMQClient( long nodeId, RocketConfig config ) { this.mRocketConfig = config; this.mProducerRegister = new ConcurrentHashMap<>(); this.mConsumerRegister = new ConcurrentHashMap<>(); this.mnNodeId = nodeId; } public RocketMQClient( long nodeId, String nameSrvAddr, String groupName ) { this( nodeId, new RocketMQConfig( nameSrvAddr, groupName, RocketConstants.DefaultMaxMessageSize, RocketConstants.DefaultSendMsgTimeout, RocketConstants.DefaultRetryTimesWhenSendFailed ) ); } public RocketMQClient( String nameSrvAddr, String groupName ) { this( Messagus.nextLocalId(), nameSrvAddr, groupName ); } public RocketMQClient( long nodeId, Map config ){ this( nodeId, new RocketMQConfig( config ) ); } public RocketMQClient( Map config ){ this( Messagus.nextLocalId(), config ); } @Override public ExtraHeadCoder getExtraHeadCoder() { return null; } @Override public long getMessageNodeId() { return this.mnNodeId; } @Override public RocketConfig getRocketConfig() { return this.mRocketConfig; } @Override public RocketConfig getMessageNodeConfig() { return this.getRocketConfig(); } @Override public void close() { for( Map.Entry kv : this.mConsumerRegister.entrySet() ) { kv.getKey().close(); } for( Map.Entry kv : this.mProducerRegister.entrySet() ) { kv.getKey().close(); } this.mConsumerRegister.clear(); this.mProducerRegister.clear(); } @Override public void register( BroadcastProducer producer ) { this.mProducerRegister.put( producer, PRESENT ); } @Override public void register( BroadcastConsumer consumer ) { this.mConsumerRegister.put( consumer, PRESENT ); } @Override public void deregister( BroadcastProducer producer ) { this.mProducerRegister.remove( producer ); } @Override public void deregister( BroadcastConsumer consumer ) { this.mConsumerRegister.remove( consumer ); } @Override public BroadcastProducer createProducer( Supplier producerSupplier ) { BroadcastProducer producer = new UlfBroadcastProducer( this, producerSupplier ); this.register( producer ); return producer; } @Override public BroadcastProducer createProducer() { return this.createProducer( DefaultMQProducer::new ); } @Override public BroadcastConsumer createConsumer( String topic, String ns ) { BroadcastConsumer consumer = new UlfPushConsumer( this, topic, ns ); this.register( consumer ); return consumer; } @Override public BroadcastConsumer createConsumer( String topic ) { return this.createConsumer( topic, "" ); } @Override public BroadcastConsumer createConsumer( UNT unt ) { return this.createConsumer( unt.getTopic(), unt.getNamespace() ); } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/RocketMQConfig.java ================================================ package com.pinecone.hydra.umb.rocket; import java.util.Map; public class RocketMQConfig implements RocketConfig { protected String mszNameServerAddr; protected String mszGroupName; protected int mnMaxMessageSize; protected int mnSendMsgTimeout; protected int mnRetryTimesWhenSendFailed; public RocketMQConfig( Map conf ){ this.mszNameServerAddr = (String) conf.get( "NameServerAddr" ); this.mszGroupName = (String) conf.get( "GroupName" ); this.mnMaxMessageSize = ( (Number) conf.get( "MaxMessageSize" ) ).intValue(); this.mnSendMsgTimeout = ( (Number) conf.get( "SendMsgTimeout" ) ).intValue(); this.mnRetryTimesWhenSendFailed = ( (Number) conf.get( "RetryTimesWhenSendFailed" ) ).intValue(); } public RocketMQConfig( String nameServerAddr, String groupName, int maxMessageSize, int sendMsgTimeout, int retryTimesWhenSendFailed ) { this.mszNameServerAddr = nameServerAddr; this.mszGroupName = groupName; this.mnMaxMessageSize = maxMessageSize; this.mnSendMsgTimeout = sendMsgTimeout; this.mnRetryTimesWhenSendFailed = retryTimesWhenSendFailed; } @Override public String getNameServerAddr() { return this.mszNameServerAddr; } @Override public String getGroupName() { return this.mszGroupName; } @Override public int getMaxMessageSize() { return this.mnMaxMessageSize; } @Override public int getSendMsgTimeout() { return this.mnSendMsgTimeout; } @Override public int getRetryTimesWhenSendFailed() { return this.mnRetryTimesWhenSendFailed; } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/RocketMedium.java ================================================ package com.pinecone.hydra.umb.rocket; import java.io.InputStream; import java.io.OutputStream; import com.pinecone.framework.system.NotImplementedException; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.MessageNodus; public class RocketMedium implements Medium { protected MessageNodus mMessageNode; public RocketMedium( MessageNodus medium ) { this.mMessageNode = medium; } @Override public OutputStream getOutputStream(){ throw new NotImplementedException(); } @Override public InputStream getInputStream(){ throw new NotImplementedException(); } @Override public Object getNativeMessageSource(){ throw new NotImplementedException(); } @Override public String sourceName(){ return "RocketMQ"; } @Override public MessageNodus getMessageNode() { return this.mMessageNode; } @Override public void release() { } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/RocketReceiver.java ================================================ package com.pinecone.hydra.umb.rocket; import java.io.IOException; import com.pinecone.hydra.umb.broadcast.ArchUnidirectionalMCProtocol; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.UMCReceiver; public class RocketReceiver extends ArchUnidirectionalMCProtocol implements UMCReceiver { public RocketReceiver( Medium messageSource ) { super( messageSource ); } @Override public Object readInformMsg() throws IOException { return null; } @Override public UMCMessage readTransferMsg() throws IOException { return null; } @Override public UMCMessage readTransferMsgBytes() throws IOException { return null; } @Override public UMCMessage readMsg() throws IOException { return null; } @Override public UMCMessage readMsgBytes() throws IOException { return null; } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/RocketTransmit.java ================================================ package com.pinecone.hydra.umb.rocket; import java.io.IOException; import java.io.InputStream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pinecone.hydra.umb.broadcast.ArchUnidirectionalMCProtocol; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.Status; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.UMCTransmit; public class RocketTransmit extends ArchUnidirectionalMCProtocol implements UMCTransmit { protected Logger logger = LoggerFactory.getLogger( this.getClass() ); public RocketTransmit( Medium messageSource ) { super( messageSource ); } @Override public void sendInformMsg( Object msg ) throws IOException { } @Override public void sendInformMsg( Object msg, Status status ) throws IOException { if ( status != Status.OK ) { this.logger.warn( "IllegalTransmitResponse for broadcast message nodes. what => {}, {}", msg, status ); } } @Override public void sendTransferMsg( Object msg, byte[] bytes ) throws IOException { } @Override public void sendTransferMsg( Object msg, byte[] bytes, Status status ) throws IOException { if ( status != Status.OK ) { this.logger.warn( "IllegalTransmitResponse for broadcast message nodes. what => {}, {}", msg, status ); } } @Override public void sendTransferMsg( Object msg, InputStream is ) throws IOException { } @Override public void sendMsg( UMCMessage msg, boolean bNoneBuffered ) throws IOException { if ( msg.getHead().getStatus() != Status.OK ) { this.logger.warn( "IllegalTransmitResponse for broadcast message nodes. what => {}", msg ); } } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/UlfBroadcastProducer.java ================================================ package com.pinecone.hydra.umb.rocket; import com.pinecone.hydra.umb.UMBClientException; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umb.broadcast.BroadcastNode; import com.pinecone.hydra.umb.broadcast.BroadcastProducer; import com.pinecone.hydra.umb.broadcast.UNT; import org.apache.rocketmq.client.exception.MQBrokerException; import org.apache.rocketmq.client.exception.MQClientException; import org.apache.rocketmq.client.producer.DefaultMQProducer; import org.apache.rocketmq.client.producer.MQProducer; import org.apache.rocketmq.common.message.Message; import org.apache.rocketmq.remoting.exception.RemotingException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Supplier; public class UlfBroadcastProducer implements BroadcastProducer { protected String mszNameServerAddr; protected String mszGroupName; protected int mnMaxMessageSize ; protected int mnSendMsgTimeout; protected int mnRetryTimesWhenSendFailed; protected MQProducer mWrappedProducer; protected RocketClient mRocketClient; protected AtomicBoolean mStart = new AtomicBoolean( false ); public UlfBroadcastProducer( RocketClient client, Supplier producerSupplier ) { this.mRocketClient = client; RocketConfig config = client.getRocketConfig(); this.mszNameServerAddr = config.getNameServerAddr(); this.mszGroupName = config.getGroupName(); this.mnMaxMessageSize = config.getMaxMessageSize(); this.mnSendMsgTimeout = config.getSendMsgTimeout(); this.mnRetryTimesWhenSendFailed = config.getRetryTimesWhenSendFailed(); DefaultMQProducer producer = producerSupplier.get(); producer.setProducerGroup(this.mszGroupName); producer.setNamesrvAddr(this.mszNameServerAddr); producer.setMaxMessageSize(this.mnMaxMessageSize); producer.setSendMsgTimeout(this.mnSendMsgTimeout); producer.setRetryTimesWhenSendFailed(this.mnRetryTimesWhenSendFailed); this.mWrappedProducer = producer; } public UlfBroadcastProducer( RocketClient client ) { this( client, DefaultMQProducer::new ); } public RocketClient getRocketClient() { return this.mRocketClient; } @Override public void sendMessage( String topic, String ns, String name, byte[] body ) throws UMBClientException { Message msg = new Message( topic, ns, name, body ); try { this.mWrappedProducer.send( msg ); } catch ( MQClientException | RemotingException | MQBrokerException | InterruptedException e ) { throw new UMBClientException( e ); } } @Override public void sendMessage( String topic, byte[] body ) throws UMBClientException { this.sendMessage( topic, "", BroadcastNode.DefaultEntityName, body ); } @Override public void sendMessage( UNT unt, String name, byte[] body ) throws UMBClientException { this.sendMessage( unt.getTopic(), unt.getNamespace(), name, body ); } @Override public void close() { this.mWrappedProducer.shutdown(); this.mRocketClient.deregister( this ); this.mStart.compareAndSet( true, false ); } @Override public void start() throws UMBServiceException { try { this.mWrappedProducer.start(); this.mStart.compareAndSet( false, true ); } catch ( MQClientException e ) { throw new UMBServiceException( e ); } } @Override public boolean isClosed() { return !this.mStart.get(); } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/UlfPushConsumer.java ================================================ package com.pinecone.hydra.umb.rocket; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umb.UlfPackageMessageHandler; import com.pinecone.hydra.umb.broadcast.PushConsumer; import org.apache.rocketmq.client.consumer.DefaultMQPushConsumer; import org.apache.rocketmq.client.consumer.MQPushConsumer; import org.apache.rocketmq.client.consumer.listener.ConsumeConcurrentlyContext; import org.apache.rocketmq.client.consumer.listener.ConsumeConcurrentlyStatus; import org.apache.rocketmq.client.consumer.listener.MessageListenerConcurrently; import org.apache.rocketmq.client.exception.MQClientException; import org.apache.rocketmq.common.message.MessageExt; import java.util.List; public class UlfPushConsumer extends ArchMQConsumer implements PushConsumer { protected MQPushConsumer wrappedConsumer; protected RocketClient mRocketClient; public UlfPushConsumer( RocketClient client, String topic, String tag ) { super( client.getRocketConfig().getNameServerAddr(), client.getRocketConfig().getGroupName(), topic, tag ); this.mRocketClient = client; } public MQPushConsumer newMQPushConsumer( UlfPackageMessageHandler handler ) throws UMBServiceException { DefaultMQPushConsumer consumer = new DefaultMQPushConsumer( this.mszGroupName ); consumer.setNamesrvAddr( this.mszNameServerAddr ); try { consumer.subscribe( this.mszTopic, this.mszTag ); consumer.registerMessageListener(new MessageListenerConcurrently() { @Override public ConsumeConcurrentlyStatus consumeMessage( List msgs, ConsumeConcurrentlyContext context ) { for ( MessageExt msg : msgs ) { try{ handler.onSuccessfulMsgReceived( msg.getBody(), new Object[] { msg, msgs, context } ); } catch ( Exception e ) { handler.onError( msg.getBody(), e ); return ConsumeConcurrentlyStatus.RECONSUME_LATER; } } return ConsumeConcurrentlyStatus.CONSUME_SUCCESS; } }); //consumer.start(); } catch ( MQClientException e ) { throw new UMBServiceException( e ); } return consumer; } @Override public void start( UlfPackageMessageHandler handler ) throws UMBServiceException { MQPushConsumer consumer = this.newMQPushConsumer( handler ); if ( this.wrappedConsumer == null ) { this.wrappedConsumer = consumer; } try{ consumer.start(); } catch ( MQClientException e ) { throw new UMBServiceException( e ); } } @Override public void close() { if ( this.wrappedConsumer != null ) { this.wrappedConsumer.shutdown(); this.mRocketClient.deregister( this ); this.wrappedConsumer = null; } } @Override public boolean isClosed() { return this.wrappedConsumer == null; } public RocketClient getRocketClient() { return this.mRocketClient; } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/UlfRocketClient.java ================================================ package com.pinecone.hydra.umb.rocket; import java.util.function.Supplier; import org.apache.rocketmq.client.producer.DefaultMQProducer; import com.pinecone.hydra.umb.broadcast.UMCBroadcastNode; import com.pinecone.hydra.umb.broadcast.UMCBroadcastProducer; public interface UlfRocketClient extends RocketClient, UMCBroadcastNode { UMCBroadcastProducer createUlfProducer( Supplier producerSupplier ) ; } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/WolfBroadcastProducer.java ================================================ package com.pinecone.hydra.umb.rocket; import com.pinecone.framework.system.Nullable; import com.pinecone.hydra.umb.UMBClientException; import com.pinecone.hydra.umb.UMCPackageMessageEncoder; import com.pinecone.hydra.umb.UlfPackageMessageEncoder; import com.pinecone.hydra.umb.broadcast.BroadcastNode; import com.pinecone.hydra.umb.broadcast.UMCBroadcastProducer; import com.pinecone.hydra.umb.broadcast.UNT; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; import org.apache.rocketmq.client.producer.DefaultMQProducer; import java.io.IOException; import java.util.function.Supplier; public class WolfBroadcastProducer extends UlfBroadcastProducer implements UMCBroadcastProducer { protected UMCPackageMessageEncoder mUMCPackageMessageEncoder; protected ExtraHeadCoder mExtraHeadCoder; public WolfBroadcastProducer( UlfRocketClient client, Supplier producerSupplier, @Nullable ExtraHeadCoder extraHeadCoder ) { super( client, producerSupplier ); this.mExtraHeadCoder = extraHeadCoder; if ( this.mExtraHeadCoder == null ) { this.mExtraHeadCoder = client.getExtraHeadCoder(); } this.mUMCPackageMessageEncoder = new UlfPackageMessageEncoder( this.mExtraHeadCoder ); } public WolfBroadcastProducer( UlfRocketClient client ) { this( client, DefaultMQProducer::new, null ); } @Override public UlfRocketClient getRocketClient() { return (UlfRocketClient)this.mRocketClient; } @Override public void sendMessage( String topic, String ns, String name, UMCMessage message ) throws UMBClientException { try{ this.sendMessage( topic, ns, name, this.mUMCPackageMessageEncoder.encode( message ) ); } catch ( IOException e ) { throw new UMBClientException( e ); } } @Override public void sendMessage( String topic, UMCMessage message ) throws UMBClientException { this.sendMessage( topic, "", BroadcastNode.DefaultEntityName, message ); } @Override public void sendMessage( UNT unt, String name, UMCMessage message ) throws UMBClientException { this.sendMessage( unt.getTopic(), unt.getNamespace(), name, message ); } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/WolfMCRocketClient.java ================================================ package com.pinecone.hydra.umb.rocket; import java.util.Map; import java.util.function.Supplier; import org.apache.rocketmq.client.producer.DefaultMQProducer; import com.pinecone.hydra.umb.broadcast.UMCBroadcastConsumer; import com.pinecone.hydra.umb.broadcast.UMCBroadcastProducer; import com.pinecone.hydra.umb.broadcast.UNT; import com.pinecone.hydra.umc.msg.Messagus; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; import com.pinecone.hydra.umc.msg.extra.GenericExtraHeadCoder; import com.pinecone.hydra.umc.msg.handler.ErrorMessageAudit; import com.pinecone.hydra.umc.msg.handler.GenericErrorMessageAudit; /** * Pinecone Ursus For Java Wolf-UMC-RocketMQ [ Wolf, Uniform Message Control Protocol Client ] * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ********************************************************** * Uniform Message Control Protocol for RocketMQ Client * 统一消息广播控制客户端 (RocketMQ 版本) * ********************************************************** */ public class WolfMCRocketClient extends RocketMQClient implements UlfRocketClient { protected ExtraHeadCoder mExtraHeadCoder; protected ErrorMessageAudit mErrorMessageAudit; public WolfMCRocketClient( long nodeId, RocketConfig config, ExtraHeadCoder extraHeadCoder ) { super( nodeId, config ); this.mExtraHeadCoder = extraHeadCoder; this.mErrorMessageAudit = new GenericErrorMessageAudit( this ); } public WolfMCRocketClient( long nodeId, String nameSrvAddr, String groupName, ExtraHeadCoder extraHeadCoder ) { this( nodeId, new RocketMQConfig( nameSrvAddr, groupName, RocketConstants.DefaultMaxMessageSize, RocketConstants.DefaultSendMsgTimeout, RocketConstants.DefaultRetryTimesWhenSendFailed ), extraHeadCoder ); } public WolfMCRocketClient( String nameSrvAddr, String groupName, ExtraHeadCoder extraHeadCoder ) { this( Messagus.nextLocalId(), nameSrvAddr, groupName, extraHeadCoder ); } public WolfMCRocketClient( String nameSrvAddr, String groupName ) { this( Messagus.nextLocalId(), nameSrvAddr, groupName, new GenericExtraHeadCoder() ); } public WolfMCRocketClient( long nodeId, Map config, ExtraHeadCoder extraHeadCoder ){ this( nodeId, new RocketMQConfig( config ), extraHeadCoder ); } public WolfMCRocketClient( Map config, ExtraHeadCoder extraHeadCoder ){ this( Messagus.nextLocalId(), config, extraHeadCoder ); } @Override public ErrorMessageAudit getErrorMessageAudit() { return this.mErrorMessageAudit; } @Override public void setErrorMessageAudit( ErrorMessageAudit audit ){ this.mErrorMessageAudit = audit; } @Override public ExtraHeadCoder getExtraHeadCoder() { return this.mExtraHeadCoder; } @Override public UMCBroadcastProducer createUlfProducer( Supplier producerSupplier ) { UMCBroadcastProducer producer = new WolfBroadcastProducer( this, producerSupplier, this.mExtraHeadCoder ); this.register( producer ); return producer; } @Override public UMCBroadcastProducer createUlfProducer() { return this.createUlfProducer( DefaultMQProducer::new ); } @Override public UMCBroadcastConsumer createUlfConsumer( String topic, String ns ) { UMCBroadcastConsumer consumer = new WolfPushConsumer( this, topic, ns, this.mExtraHeadCoder ); this.register( consumer ); return consumer; } @Override public UMCBroadcastConsumer createUlfConsumer( String topic ) { return this.createUlfConsumer( topic, "" ); } @Override public UMCBroadcastConsumer createUlfConsumer( UNT unt ) { return this.createUlfConsumer( unt.getTopic(), unt.getNamespace() ); } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/WolfPushConsumer.java ================================================ package com.pinecone.hydra.umb.rocket; import java.io.IOException; import com.pinecone.framework.system.Nullable; import com.pinecone.hydra.umb.UMBBytesDecoder; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umb.UlfMBInformMessage; import com.pinecone.hydra.umb.UlfPackageMessageHandler; import com.pinecone.hydra.umb.broadcast.UMCBroadcastConsumer; import com.pinecone.hydra.umc.msg.EMCBytesDecoder; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCHead; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.UMCMethod; import com.pinecone.hydra.umc.msg.UMCReceiver; import com.pinecone.hydra.umc.msg.UMCTransmit; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; import com.pinecone.hydra.umc.wolf.UlfBytesTransferMessage; import com.pinecone.hydra.umct.UMCTExpressHandler; public class WolfPushConsumer extends UlfPushConsumer implements UMCBroadcastConsumer { protected EMCBytesDecoder mEMCBytesDecoder; protected ExtraHeadCoder mExtraHeadCoder; protected Medium mMedium; protected UMCTransmit mUMCTransmit; protected UMCReceiver mUMCReceiver; public WolfPushConsumer( UlfRocketClient client, String topic, String tag, @Nullable ExtraHeadCoder extraHeadCoder ) { super( client, topic, tag ); this.mExtraHeadCoder = extraHeadCoder; if ( this.mExtraHeadCoder == null ) { this.mExtraHeadCoder = client.getExtraHeadCoder(); } this.mEMCBytesDecoder = new UMBBytesDecoder(); // Dummy [ MQ is base on unidirectional communication. ] this.mMedium = new RocketMedium( this.getRocketClient() ); this.mUMCReceiver = new RocketReceiver( this.mMedium ); this.mUMCTransmit = new RocketTransmit( this.mMedium ); } public WolfPushConsumer( UlfRocketClient client, String topic, String tag ) { this( client, topic, tag, null ); } @Override public UlfRocketClient getRocketClient() { return (UlfRocketClient)this.mRocketClient; } protected UMCMessage decodeMessage( byte[] raw ) throws IOException { UMCHead head = WolfPushConsumer.this.mEMCBytesDecoder.decodeIntegrated( raw, WolfPushConsumer.this.mExtraHeadCoder ); if ( head.getMethod() == UMCMethod.TRANSFER ) { int bodyLen = (int)head.getBodyLength(); byte[] bodyBuf = new byte[ bodyLen ]; int headSize = head.sizeof() + head.getExtraHeadLength(); System.arraycopy( raw, headSize, bodyBuf, 0, bodyLen ); return new UlfBytesTransferMessage( head, bodyBuf ); } return new UlfMBInformMessage( head ); } @Override public void start( UMCTExpressHandler handler ) throws UMBServiceException { super.start(new UlfPackageMessageHandler() { @Override public void onSuccessfulMsgReceived( byte[] raw, Object[] args ) throws Exception { UMCMessage message = WolfPushConsumer.this.decodeMessage( raw ); handler.onSuccessfulMsgReceived( WolfPushConsumer.this.mMedium, WolfPushConsumer.this.mUMCTransmit, WolfPushConsumer.this.mUMCReceiver, message, args ); } @Override public void onErrorMsgReceived( byte[] raw, Object[] args ) throws Exception { UMCMessage message = WolfPushConsumer.this.decodeMessage( raw ); handler.onErrorMsgReceived( WolfPushConsumer.this.mMedium, WolfPushConsumer.this.mUMCTransmit, WolfPushConsumer.this.mUMCReceiver, message, args ); } @Override public void onError( Object data, Throwable cause ) { handler.onError( data, cause ); } }); } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/wolf/ArchBroadcastControlAgent.java ================================================ package com.pinecone.hydra.umb.wolf; import com.pinecone.hydra.umb.broadcast.BroadcastControlAgent; import com.pinecone.hydra.umb.broadcast.BroadcastControlNode; import com.pinecone.hydra.umct.husky.compiler.ClassDigest; import com.pinecone.hydra.umct.husky.compiler.InterfacialCompiler; import com.pinecone.hydra.umct.husky.compiler.MethodDigest; import com.pinecone.hydra.umct.husky.machinery.MCTContextMachinery; public abstract class ArchBroadcastControlAgent implements BroadcastControlAgent { protected MCTContextMachinery mMCTContextMachinery; protected BroadcastControlNode mBroadcastControlNode; public ArchBroadcastControlAgent( BroadcastControlNode controlNode ) { this.mBroadcastControlNode = controlNode; this.mMCTContextMachinery = controlNode.getMCTTransformer(); } @Override public InterfacialCompiler getInterfacialCompiler() { return this.mBroadcastControlNode.getInterfacialCompiler(); } @Override public MCTContextMachinery getMCTTransformer() { return this.mMCTContextMachinery; } @Override public BroadcastControlNode broadcastControlNode() { return this.mBroadcastControlNode; } @Override public ClassDigest queryClassDigest( String name ) { return this.mBroadcastControlNode.queryClassDigest( name ); } @Override public MethodDigest queryMethodDigest( String name ) { return this.mBroadcastControlNode.queryMethodDigest( name ); } @Override public void addClassDigest( ClassDigest that ) { this.mBroadcastControlNode.addClassDigest( that ); } @Override public void addMethodDigest( MethodDigest that ) { this.mBroadcastControlNode.addMethodDigest( that ); } @Override public ClassDigest compile( Class clazz, boolean bAsIface ) { return this.mBroadcastControlNode.compile( clazz, bAsIface ); } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/wolf/ArchUlfBroadcastControlAgent.java ================================================ package com.pinecone.hydra.umb.wolf; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.pinecone.framework.lang.field.FieldEntity; import com.pinecone.hydra.umb.broadcast.BroadcastControlNode; import com.pinecone.hydra.umct.husky.compiler.CompilerEncoder; import com.pinecone.hydra.umct.husky.compiler.DynamicMethodPrototype; import com.pinecone.hydra.umct.husky.compiler.MethodPrototype; import com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler; import com.pinecone.hydra.umct.husky.machinery.PMCTContextMachinery; import com.pinecone.ulf.util.protobuf.FieldProtobufDecoder; import com.pinecone.ulf.util.protobuf.FieldProtobufEncoder; public abstract class ArchUlfBroadcastControlAgent extends ArchBroadcastControlAgent implements UlfBroadcastControlAgent { public ArchUlfBroadcastControlAgent( BroadcastControlNode controlNode ) { super( controlNode ); } @Override public ProtoInterfacialCompiler getInterfacialCompiler() { return this.broadcastControlNode().getInterfacialCompiler(); } @Override public PMCTContextMachinery getMCTTransformer() { return (PMCTContextMachinery) this.mMCTContextMachinery; } @Override public FieldProtobufEncoder getFieldProtobufEncoder() { return this.broadcastControlNode().getFieldProtobufEncoder(); } @Override public FieldProtobufDecoder getFieldProtobufDecoder() { return this.broadcastControlNode().getFieldProtobufDecoder(); } @Override public UlfBroadcastControlNode broadcastControlNode() { return (UlfBroadcastControlNode) super.broadcastControlNode(); } protected CompilerEncoder getCompilerEncoder() { return this.getInterfacialCompiler().getCompilerEncoder(); } protected DynamicMessage reinterpretMsg( MethodPrototype prototype, Object[] args ) { FieldProtobufEncoder encoder = this.getFieldProtobufEncoder(); Descriptors.Descriptor descriptor = prototype.getArgumentsDescriptor(); FieldEntity[] types = prototype.getArgumentTemplate().getSegments(); for ( int i = 0; i < args.length; ++i ) { Object v = args [ i ]; // Fuck duplicated codes. types[ i + 1 ].setValue( v ); } return encoder.encode( descriptor, types, this.getCompilerEncoder().getExceptedKeys(), this.getCompilerEncoder().getOptions() ); } protected DynamicMethodPrototype queryMethodPrototype(String szMethodAddress ) { DynamicMethodPrototype method = (DynamicMethodPrototype) this.queryMethodDigest( szMethodAddress ); if ( method == null ) { throw new IllegalArgumentException( "Method address: `" + szMethodAddress + "` is invalid." ); } return method; } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/wolf/UlfBroadcastControlAgent.java ================================================ package com.pinecone.hydra.umb.wolf; import com.pinecone.hydra.umb.broadcast.BroadcastControlAgent; import com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler; import com.pinecone.hydra.umct.husky.machinery.PMCTContextMachinery; import com.pinecone.ulf.util.protobuf.FieldProtobufDecoder; import com.pinecone.ulf.util.protobuf.FieldProtobufEncoder; public interface UlfBroadcastControlAgent extends BroadcastControlAgent { @Override PMCTContextMachinery getMCTTransformer(); @Override ProtoInterfacialCompiler getInterfacialCompiler(); default FieldProtobufEncoder getFieldProtobufEncoder() { return this.getInterfacialCompiler().getCompilerEncoder().getEncoder(); } FieldProtobufDecoder getFieldProtobufDecoder(); } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/wolf/UlfBroadcastControlNode.java ================================================ package com.pinecone.hydra.umb.wolf; import com.pinecone.hydra.umb.broadcast.BroadcastControlNode; import com.pinecone.hydra.umb.broadcast.UMCBroadcastNode; import com.pinecone.hydra.umct.UMCTExpress; import com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler; import com.pinecone.hydra.umct.husky.machinery.PMCTContextMachinery; import com.pinecone.hydra.umct.husky.machinery.RouteDispatcher; import com.pinecone.ulf.util.protobuf.FieldProtobufDecoder; import com.pinecone.ulf.util.protobuf.FieldProtobufEncoder; public interface UlfBroadcastControlNode extends BroadcastControlNode { UMCTExpress createUlfExpress( String name ) ; RouteDispatcher createHuskyRoute() ; RouteDispatcher createHuskyRoute( UMCTExpress express ) ; @Override PMCTContextMachinery getMCTTransformer(); @Override ProtoInterfacialCompiler getInterfacialCompiler(); default FieldProtobufEncoder getFieldProtobufEncoder() { return this.getInterfacialCompiler().getCompilerEncoder().getEncoder(); } FieldProtobufDecoder getFieldProtobufDecoder(); } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/wolf/UlfBroadcastControlProducer.java ================================================ package com.pinecone.hydra.umb.wolf; import com.pinecone.hydra.umb.broadcast.BroadcastControlProducer; import com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler; import com.pinecone.hydra.umct.husky.machinery.PMCTContextMachinery; public interface UlfBroadcastControlProducer extends BroadcastControlProducer { @Override PMCTContextMachinery getMCTTransformer(); @Override ProtoInterfacialCompiler getInterfacialCompiler(); } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/wolf/WolfMCBClient.java ================================================ package com.pinecone.hydra.umb.wolf; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import org.slf4j.Logger; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.servgram.ArchServgramium; import com.pinecone.hydra.umb.broadcast.BroadcastConsumer; import com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer; import com.pinecone.hydra.umb.broadcast.BroadcastControlProducer; import com.pinecone.hydra.umb.broadcast.BroadcastNode; import com.pinecone.hydra.umb.broadcast.BroadcastProducer; import com.pinecone.hydra.umb.broadcast.UMCBroadcastConsumer; import com.pinecone.hydra.umb.broadcast.UMCBroadcastNode; import com.pinecone.hydra.umb.broadcast.UMCBroadcastProducer; import com.pinecone.hydra.umb.broadcast.UNT; import com.pinecone.hydra.umc.msg.MsgNodeConfig; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; import com.pinecone.hydra.umc.msg.handler.ErrorMessageAudit; import com.pinecone.hydra.umct.MessageJunction; import com.pinecone.hydra.umct.UMCTExpress; import com.pinecone.hydra.umct.WolfMCExpress; import com.pinecone.hydra.umct.husky.compiler.BytecodeIfaceCompiler; import com.pinecone.hydra.umct.husky.compiler.ClassDigest; import com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler; import com.pinecone.hydra.umct.husky.compiler.MethodDigest; import com.pinecone.hydra.umct.husky.machinery.HuskyContextMachinery; import com.pinecone.hydra.umct.husky.machinery.HuskyRouteDispatcher; import com.pinecone.hydra.umct.husky.machinery.HuskyRouteDispatcherFabricator; import com.pinecone.hydra.umct.husky.machinery.MCTContextMachinery; import com.pinecone.hydra.umct.husky.machinery.PMCTContextMachinery; import com.pinecone.hydra.umct.husky.machinery.RouteDispatcher; import com.pinecone.hydra.umct.mapping.BytecodeControllerInspector; import com.pinecone.ulf.util.protobuf.FieldProtobufDecoder; import com.pinecone.ulf.util.protobuf.FieldProtobufEncoder; import com.pinecone.ulf.util.protobuf.GenericFieldProtobufDecoder; import javassist.ClassPool; /** * Pinecone Ursus For Java Wolf-UMCT-B [ Uniform Message Broadcast Control Transmit ] * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ********************************************************** * Uniform Message Control Transmission Protocol - Broadcast [UMC-T-B] * 统一消息广播传输控制传输协议 * ********************************************************** */ public class WolfMCBClient extends ArchServgramium implements UlfBroadcastControlNode { protected PMCTContextMachinery mPMCTContextMachinery; protected RouteDispatcher mRouteDispatcher; protected UMCBroadcastNode mUMCBroadcastNode; public WolfMCBClient( UMCBroadcastNode broadcastNode, @Nullable RouteDispatcher routeDispatcher, PMCTContextMachinery machinery, String szGramName, Processum parent ) { super( szGramName, parent ); this.mPMCTContextMachinery = machinery; this.mRouteDispatcher = routeDispatcher; this.mUMCBroadcastNode = broadcastNode; } public WolfMCBClient( UMCBroadcastNode broadcastNode, PMCTContextMachinery machinery, String szGramName, Processum parent ) { this( broadcastNode, null, machinery, szGramName, parent ); this.mRouteDispatcher = this.createHuskyRoute(); } public WolfMCBClient( UMCBroadcastNode broadcastNode, PMCTContextMachinery machinery, String szGramName, Processum parent, Class expressType ) { this( broadcastNode, null, machinery, szGramName, parent ); UMCTExpress express = this.createUMCTExpress( BroadcastNode.DefaultEntityName, expressType ); this.mRouteDispatcher = this.createHuskyRoute( express ); HuskyRouteDispatcherFabricator.afterConstructed( (HuskyRouteDispatcher)this.mRouteDispatcher, express ); } public WolfMCBClient( UMCBroadcastNode broadcastNode, String szGramName, Processum parent, Class expressType ) { this( broadcastNode, null, new HuskyContextMachinery( new BytecodeIfaceCompiler( ClassPool.getDefault(), parent.getTaskManager().getClassLoader() ), new BytecodeControllerInspector( ClassPool.getDefault(), parent.getTaskManager().getClassLoader() ), new GenericFieldProtobufDecoder() ), szGramName, parent ); UMCTExpress express = this.createUMCTExpress( BroadcastNode.DefaultEntityName, expressType ); this.mRouteDispatcher = this.createHuskyRoute( express ); HuskyRouteDispatcherFabricator.afterConstructed( (HuskyRouteDispatcher)this.mRouteDispatcher, express ); } @Override public long getMessageNodeId() { return this.mUMCBroadcastNode.getMessageNodeId(); } @Override public ErrorMessageAudit getErrorMessageAudit() { return this.mUMCBroadcastNode.getErrorMessageAudit(); } @Override public void setErrorMessageAudit( ErrorMessageAudit audit ) { this.mUMCBroadcastNode.setErrorMessageAudit( audit ); } @Override public MsgNodeConfig getMessageNodeConfig() { return this.mUMCBroadcastNode.getMessageNodeConfig(); } @Override public void applyMCTContextMachinery( MCTContextMachinery mctContextMachinery ) { this.mPMCTContextMachinery = (PMCTContextMachinery) mctContextMachinery; } @Override public void applyRouteDispatcher( RouteDispatcher routeDispatcher ) { this.mRouteDispatcher = routeDispatcher; } @Override public UMCTExpress createUMCTExpress( String name, Class expressType ) { try{ Constructor constructor = expressType.getConstructor( String.class, MessageJunction.class, Logger.class ); return (UMCTExpress) constructor.newInstance( name, this, this.getLogger() ); } catch ( NoSuchMethodException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) { throw new IllegalArgumentException( "`" + expressType.getSimpleName() + "` is not UMCTExpress calibre qualified." ); } } @Override public UMCTExpress createUlfExpress( String name ) { return this.createUMCTExpress( name, WolfMCExpress.class ); } @Override public RouteDispatcher createHuskyRoute() { return new HuskyRouteDispatcher( this.getTaskManager().getClassLoader(), true ); } @Override public RouteDispatcher createHuskyRoute( UMCTExpress express ) { RouteDispatcher dispatcher = this.createHuskyRoute(); dispatcher.setUMCTExpress( express ); return dispatcher; } @Override public ProtoInterfacialCompiler getInterfacialCompiler() { return this.mPMCTContextMachinery.getInterfacialCompiler(); } @Override public PMCTContextMachinery getMCTTransformer() { return this.mPMCTContextMachinery; } @Override public RouteDispatcher getRouteDispatcher() { return this.mRouteDispatcher; } @Override public FieldProtobufEncoder getFieldProtobufEncoder() { return this.mPMCTContextMachinery.getFieldProtobufEncoder(); } @Override public FieldProtobufDecoder getFieldProtobufDecoder() { return this.mPMCTContextMachinery.getFieldProtobufDecoder(); } @Override public ClassDigest queryClassDigest( String name ) { return this.mPMCTContextMachinery.queryClassDigest( name ); } @Override public MethodDigest queryMethodDigest( String name ) { return this.mPMCTContextMachinery.queryMethodDigest( name ); } @Override public void addClassDigest( ClassDigest that ) { this.mPMCTContextMachinery.addClassDigest( that ); } @Override public void addMethodDigest( MethodDigest that ) { this.mPMCTContextMachinery.addMethodDigest( that ); } @Override public ClassDigest compile( Class clazz, boolean bAsIface ) { return this.mPMCTContextMachinery.compile( clazz, bAsIface ); } @Override public void registerInstance( String deliverName, Object instance, Class iface ) { this.mRouteDispatcher.registerInstance( deliverName, instance, iface ); } @Override public void registerInstance( Object instance, Class iface ) { this.mRouteDispatcher.registerInstance( instance, iface ); } @Override public void registerController( String deliverName, Object instance, Class controllerType ) { this.mRouteDispatcher.registerController( deliverName, instance, controllerType ); } @Override public void registerController( Object instance, Class controllerType ) { this.mRouteDispatcher.registerController( instance, controllerType ); } @Override public BroadcastControlConsumer createBroadcastControlConsumer( UMCBroadcastConsumer workAgent, RouteDispatcher routeDispatcher ) { return new WolfMCBConsumer( this, routeDispatcher, workAgent ); } @Override public BroadcastControlConsumer createBroadcastControlConsumer( UMCBroadcastConsumer workAgent ) { return this.createBroadcastControlConsumer( workAgent, this.getRouteDispatcher() ); } @Override public BroadcastControlConsumer createBroadcastControlConsumer( UNT unt ) { return this.createBroadcastControlConsumer( this.createUlfConsumer( unt ), this.getRouteDispatcher() ); } @Override public BroadcastControlConsumer createBroadcastControlConsumer( String topic, String ns ) { return this.createBroadcastControlConsumer( this.createUlfConsumer( topic, ns ), this.getRouteDispatcher() ); } @Override public BroadcastControlConsumer createBroadcastControlConsumer( String topic ) { return this.createBroadcastControlConsumer( this.createUlfConsumer( topic ), this.getRouteDispatcher() ); } @Override public BroadcastControlProducer createBroadcastControlProducer( UMCBroadcastProducer workAgent ) { return new WolfMCBProducer( this, workAgent ); } @Override public BroadcastControlProducer createBroadcastControlProducer() { return this.createBroadcastControlProducer( this.createUlfProducer() ); } @Override public UMCBroadcastNode getUMCBroadcastNode() { return this.mUMCBroadcastNode; } @Override public ExtraHeadCoder getExtraHeadCoder() { return this.mUMCBroadcastNode.getExtraHeadCoder(); } @Override public UMCBroadcastProducer createUlfProducer() { return this.mUMCBroadcastNode.createUlfProducer(); } @Override public UMCBroadcastConsumer createUlfConsumer( String topic, String ns ) { return this.mUMCBroadcastNode.createUlfConsumer( topic, ns ); } @Override public UMCBroadcastConsumer createUlfConsumer( String topic ) { return this.mUMCBroadcastNode.createUlfConsumer( topic ); } @Override public UMCBroadcastConsumer createUlfConsumer( UNT unt ) { return this.mUMCBroadcastNode.createUlfConsumer( unt ); } @Override public void register( BroadcastProducer producer ) { this.mUMCBroadcastNode.register( producer ); } @Override public void register( BroadcastConsumer consumer ) { this.mUMCBroadcastNode.register( consumer ); } @Override public void deregister( BroadcastProducer producer ) { this.mUMCBroadcastNode.deregister( producer ); } @Override public void deregister( BroadcastConsumer consumer ) { this.mUMCBroadcastNode.deregister( consumer ); } @Override public BroadcastProducer createProducer() { return this.mUMCBroadcastNode.createUlfProducer(); } @Override public BroadcastConsumer createConsumer( String topic, String ns ) { return this.mUMCBroadcastNode.createUlfConsumer( topic, ns ); } @Override public BroadcastConsumer createConsumer( String topic ) { return this.mUMCBroadcastNode.createUlfConsumer( topic ); } @Override public BroadcastConsumer createConsumer( UNT unt ) { return this.mUMCBroadcastNode.createUlfConsumer( unt ); } @Override public void execute() throws Exception { } @Override public void close() { this.mUMCBroadcastNode.close(); } @Override public void terminate() { this.close(); } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/wolf/WolfMCBConsumer.java ================================================ package com.pinecone.hydra.umb.wolf; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer; import com.pinecone.hydra.umb.broadcast.BroadcastControlNode; import com.pinecone.hydra.umb.broadcast.UMCBroadcastConsumer; import com.pinecone.hydra.umct.UMCTExpressHandler; import com.pinecone.hydra.umct.husky.machinery.RouteDispatcher; public class WolfMCBConsumer extends ArchBroadcastControlAgent implements BroadcastControlConsumer { protected RouteDispatcher mRouteDispatcher; protected UMCBroadcastConsumer mBroadcastConsumer; public WolfMCBConsumer ( BroadcastControlNode controlNode, RouteDispatcher routeDispatcher, UMCBroadcastConsumer broadcastConsumer ) { super( controlNode ); this.mRouteDispatcher = routeDispatcher; this.mBroadcastConsumer = broadcastConsumer; } public WolfMCBConsumer ( BroadcastControlNode controlNode, UMCBroadcastConsumer broadcastConsumer ) { this( controlNode, controlNode.getRouteDispatcher(), broadcastConsumer ); } @Override public void start() throws UMBServiceException { this.start( this.mRouteDispatcher.getUMCTExpress() ); } @Override public void start( UMCTExpressHandler handler ) throws UMBServiceException { this.mBroadcastConsumer.start( handler ); } @Override public void close() { this.mBroadcastConsumer.close(); } @Override public void registerInstance( String deliverName, Object instance, Class iface ) { this.mRouteDispatcher.registerInstance( deliverName, instance, iface ); } @Override public void registerInstance( Object instance, Class iface ) { this.mRouteDispatcher.registerInstance( instance, iface ); } @Override public void registerController( String deliverName, Object instance, Class controllerType ) { this.mRouteDispatcher.registerController( deliverName, instance, controllerType ); } @Override public void registerController( Object instance, Class controllerType ) { this.mRouteDispatcher.registerController( instance, controllerType ); } } ================================================ FILE: Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/wolf/WolfMCBProducer.java ================================================ package com.pinecone.hydra.umb.wolf; import java.io.IOException; import com.pinecone.hydra.umb.broadcast.proxy.GenericIfaceProxyFactory; import com.pinecone.hydra.umb.broadcast.proxy.IfaceProxyFactory; import com.google.protobuf.DynamicMessage; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umb.UlfMBInformMessage; import com.pinecone.hydra.umb.broadcast.BroadcastControlNode; import com.pinecone.hydra.umb.broadcast.UMCBroadcastProducer; import com.pinecone.hydra.umb.broadcast.UNT; import com.pinecone.hydra.umct.husky.compiler.MethodPrototype; public class WolfMCBProducer extends ArchUlfBroadcastControlAgent implements UlfBroadcastControlProducer { protected UMCBroadcastProducer mBroadcastProducer; protected IfaceProxyFactory mIfaceProxyFactory; public WolfMCBProducer ( BroadcastControlNode controlNode, UMCBroadcastProducer broadcastProducer ) { super( controlNode ); this.mBroadcastProducer = broadcastProducer; this.mIfaceProxyFactory = new GenericIfaceProxyFactory( this ); } @Override public void issueInform( UNT unt, String name, MethodPrototype method, Object[] args ) throws IOException { DynamicMessage message = this.reinterpretMsg( method, args ); this.mBroadcastProducer.sendMessage( unt, name, new UlfMBInformMessage( message.toByteArray() ) ); } @Override public void issueInform( String topic, String ns, String name, MethodPrototype method, Object[] args ) throws IOException { DynamicMessage message = this.reinterpretMsg( method, args ); this.mBroadcastProducer.sendMessage( topic, ns, name, new UlfMBInformMessage( message.toByteArray() ) ); } @Override public void issueInform( String topic, MethodPrototype method, Object[] args ) throws IOException { DynamicMessage message = this.reinterpretMsg( method, args ); this.mBroadcastProducer.sendMessage( topic, new UlfMBInformMessage( message.toByteArray() ) ); } @Override public void issueInform( String topic, String szMethodAddress, Object... args ) throws IOException { this.issueInform( topic, this.queryMethodPrototype( szMethodAddress ), args ); } @Override public T getIface( Class iface, String topic, String ns, String name ) { return this.mIfaceProxyFactory.createProxy( iface, topic, ns, name ); } @Override public void close() { this.mBroadcastProducer.close(); } @Override public void start() throws UMBServiceException { this.mBroadcastProducer.start(); } } ================================================ FILE: Hydra/hydra-message-control/pom.xml ================================================ hydra com.pinecone.hydra 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.pinecone.hydra.kernel hydra-message-control 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile com.pinecone.hydra.kernel hydra-architecture 2.1.0 compile com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.pinecone.ulf ulfhedinn 1.2.1 compile io.netty netty-all 4.1.80.Final org.javassist javassist 3.29.0-GA ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/appoints/AppointNodus.java ================================================ package com.pinecone.hydra.appoints; import com.pinecone.framework.util.config.PatriarchalConfig; import com.pinecone.hydra.umc.msg.Messagus; public interface AppointNodus extends Messagus { String getName(); PatriarchalConfig getConfig(); void close() ; void execute() throws Exception ; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/ActingDuplexExpress.java ================================================ package com.pinecone.hydra.uma; import com.pinecone.hydra.express.Deliver; import com.pinecone.hydra.umct.MessageDeliver; import com.pinecone.hydra.umct.MessageExpress; import com.pinecone.hydra.umct.MessageJunction; public abstract class ActingDuplexExpress extends ArchDuplexExpress { @Override public String getName() { return null; } @Override public MessageJunction getJunction() { return null; } @Override public MessageDeliver recruit( String szName ) { return null; } @Override public MessageExpress register( Deliver deliver ) { return null; } @Override public MessageExpress fired( Deliver deliver ) { return null; } @Override public MessageDeliver getDeliver( String szName ) { return null; } @Override public boolean hasOwnDeliver( Deliver deliver ) { return false; } @Override public boolean hasOwnDeliver( String deliverName ) { return false; } @Override public int size() { return 0; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/AppointClient.java ================================================ package com.pinecone.hydra.uma; import java.io.IOException; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umct.IlleagalResponseException; import com.pinecone.hydra.umct.husky.compiler.MethodPrototype; public interface AppointClient extends AppointNode { UMCMessage sendSyncMsg( UMCMessage request ) throws IOException; UMCMessage sendSyncMsg( UMCMessage request, boolean bNoneBuffered ) throws IOException ; void sendAsynMsg( UMCMessage request ) throws IOException ; void sendAsynMsg( UMCMessage request, AsynMsgHandler handler ) throws IOException ; void invokeInformAsyn( MethodPrototype method, Object[] args, AsynMsgHandler handler ) throws IOException; void invokeInformAsyn( MethodPrototype method, Object[] args, AsynReturnHandler handler ) throws IOException ; Object invokeInform( MethodPrototype method, Object[] args, long nWaitTimeMil ) throws IlleagalResponseException, IOException ; Object invokeInform( MethodPrototype method, Object... args ) throws IlleagalResponseException, IOException ; void invokeInformAsyn( String szMethodAddress, Object[] args, AsynMsgHandler handler ) throws IOException ; void invokeInformAsyn( String szMethodAddress, Object[] args, AsynReturnHandler handler ) throws IOException ; Object invokeInform( String szMethodAddress, Object[] args, long nWaitTimeMil ) throws IlleagalResponseException, IOException ; Object invokeInform( String szMethodAddress, Object... args ) throws IlleagalResponseException, IOException ; T getIface( Class iface ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/AppointNode.java ================================================ package com.pinecone.hydra.uma; import com.pinecone.hydra.appoints.AppointNodus; import com.pinecone.hydra.umc.msg.MessageNode; import com.pinecone.hydra.umct.UMCTNode; import com.pinecone.hydra.umct.husky.compiler.ClassDigest; import com.pinecone.hydra.umct.husky.compiler.InterfacialCompiler; import com.pinecone.hydra.umct.husky.compiler.MethodDigest; import com.pinecone.hydra.umct.husky.machinery.MCTContextMachinery; public interface AppointNode extends UMCTNode, AppointNodus { MessageNode getMessageNode(); default long getMessageNodeId() { return getMessageNode().getMessageNodeId(); } MCTContextMachinery getMCTTransformer(); InterfacialCompiler getInterfacialCompiler(); ClassDigest queryClassDigest( String name ); MethodDigest queryMethodDigest( String name ); void addClassDigest( ClassDigest that ); void addMethodDigest( MethodDigest that ); ClassDigest compile( Class clazz, boolean bAsIface ); void close(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/AppointServer.java ================================================ package com.pinecone.hydra.uma; import com.pinecone.hydra.express.Deliver; import com.pinecone.hydra.umct.MessageDeliver; import com.pinecone.hydra.umct.MessageExpress; import com.pinecone.hydra.umct.UMCTExpress; public interface AppointServer extends AppointNode { String DefaultEntityName = "__DEFAULT__"; AppointServer apply( UMCTExpress handler ); UMCTExpress getUMCTExpress(); MessageExpress register ( Deliver deliver ) ; MessageExpress fired ( Deliver deliver ) ; MessageDeliver getDeliver ( String name ); MessageDeliver getDefaultDeliver (); void registerInstance( String deliverName, Object instance, Class iface ) ; void registerInstance( Object instance, Class iface ) ; void registerController( String deliverName, Object instance, Class controllerType ) ; void registerController( Object instance, Class controllerType ) ; default void registerController( Object instance ) { this.registerController( instance, instance.getClass() ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/ArchAppointNode.java ================================================ package com.pinecone.hydra.uma; import com.pinecone.framework.system.GenericMasterTaskManager; import com.pinecone.framework.system.executum.ArchProcessum; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.servgram.ArchServgramium; import com.pinecone.hydra.servgram.Servgram; import com.pinecone.hydra.servgram.Servgramium; import com.pinecone.hydra.umc.msg.MessageNode; import com.pinecone.hydra.umct.ServiceException; import com.pinecone.hydra.umct.husky.compiler.ClassDigest; import com.pinecone.hydra.umct.husky.compiler.InterfacialCompiler; import com.pinecone.hydra.umct.husky.compiler.MethodDigest; import com.pinecone.hydra.umct.husky.machinery.MCTContextMachinery; public abstract class ArchAppointNode extends ArchServgramium implements AppointNode { protected MCTContextMachinery mMCTContextMachinery; protected ArchAppointNode( Servgramium sharded ) { super( sharded, true ); this.mAffiliateThread = sharded.getAffiliateThread(); } protected ArchAppointNode( Servgramium sharded, MCTContextMachinery machinery ) { this( sharded ); this.mMCTContextMachinery = machinery; } public abstract MessageNode getMessageNode(); @Override public InterfacialCompiler getInterfacialCompiler() { return this.mMCTContextMachinery.getInterfacialCompiler(); } @Override public MCTContextMachinery getMCTTransformer() { return this.mMCTContextMachinery; } @Override public Thread getAffiliateThread() { return this.getMessageNode().getAffiliateThread(); } @Override public ArchProcessum setThreadAffinity( Thread affinity ) { this.getMessageNode().setThreadAffinity( affinity ); return super.setThreadAffinity(affinity); } @Override public boolean isTerminated() { return this.getMessageNode().isTerminated(); } @Override public void interrupt() { this.getMessageNode().interrupt(); } @Override public void kill() { this.getMessageNode().kill(); } @Override public Processum parentExecutum() { return (Processum) this.getMessageNode().parentExecutum(); } @Override public void apoptosis() { this.getMessageNode().apoptosis(); } @Override public GenericMasterTaskManager getTaskManager() { return (GenericMasterTaskManager) this.getMessageNode().getTaskManager(); } @Override public void execute() throws ServiceException { try{ ( (Servgram) this.getMessageNode() ).execute(); } catch ( Exception e ) { throw new ServiceException( e ); } } @Override public ClassDigest queryClassDigest( String name ) { return this.mMCTContextMachinery.queryClassDigest( name ); } @Override public MethodDigest queryMethodDigest( String name ) { return this.mMCTContextMachinery.queryMethodDigest( name ); } @Override public void addClassDigest( ClassDigest that ) { this.mMCTContextMachinery.addClassDigest( that ); } @Override public void addMethodDigest( MethodDigest that ) { this.mMCTContextMachinery.addMethodDigest( that ); } @Override public ClassDigest compile( Class clazz, boolean bAsIface ) { return this.mMCTContextMachinery.compile( clazz, bAsIface ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/ArchDuplexExpress.java ================================================ package com.pinecone.hydra.uma; import java.io.IOException; import org.slf4j.Logger; import com.pinecone.hydra.uma.pool.GenericMultiClientChannelRegistry; import com.pinecone.hydra.express.Package; import com.pinecone.hydra.system.component.Slf4jTraceable; import com.pinecone.hydra.umc.msg.ChannelAllocateException; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.ChannelPool; import com.pinecone.hydra.umc.msg.FairChannelPool; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.MessageNodus; import com.pinecone.hydra.umc.msg.MultiClientChannelRegistry; import com.pinecone.hydra.umc.msg.RecipientChannelControlBlock; import com.pinecone.hydra.umc.msg.UMCChannel; import com.pinecone.hydra.umc.msg.UMCConstants; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.UMCReceiver; import com.pinecone.hydra.umc.msg.UMCTransmit; import com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter; import com.pinecone.hydra.umc.wolf.UlfChannelStatus; import com.pinecone.hydra.umc.wolf.UlfMessageNode; import com.pinecone.hydra.umc.wolf.WolfMCStandardConstants; import com.pinecone.hydra.umc.wolf.server.RecipientNettyChannelControlBlock; import com.pinecone.hydra.umct.DuplexExpress; import com.pinecone.hydra.umct.MessageExpress; import com.pinecone.hydra.umct.ServiceInternalException; import com.pinecone.hydra.umct.UMCConnection; import com.pinecone.hydra.umct.UlfConnection; import com.pinecone.hydra.umct.husky.HuskyCTPConstants; import io.netty.channel.Channel; import io.netty.util.AttributeKey; public abstract class ArchDuplexExpress implements DuplexExpress, MessageExpress, Slf4jTraceable { protected Logger mLogger ; protected MultiClientChannelRegistry mMultiClientChannelRegistry; protected ArchDuplexExpress() { this.mMultiClientChannelRegistry = new GenericMultiClientChannelRegistry<>(); } public ArchDuplexExpress( Logger logger ) { this(); this.mLogger = logger; } @Override public Logger getLogger() { return this.mLogger; } protected UMCConnection wrap( Package that ) { return (UMCConnection) that; } @Override public UMCMessage processResponse( UMCMessage request, UMCMessage response ) { if ( request.getHead().getControlBits() == HuskyCTPConstants.HCTP_DUP_CONTROL_PASSIVE_REQUEST ) { response.getHead().setControlBits( HuskyCTPConstants.HCTP_DUP_CONTROL_PASSIVE_RESPONSE ); } return response; } protected abstract void onSuccessfulMsgReceived( UMCConnection connection, Object[] args ) throws Exception ; protected boolean handleDuplexControlMessage( UMCConnection connection, Object[] args ) throws Exception { if ( this.interceptPassiveChannel( connection, args ) ) { return true; } if ( this.interceptHandlePassiveResponse( connection, args ) ) { return true; } return false; } @Override public void onSuccessfulMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception { UlfConnection connection = new UlfConnection( medium, msg, transmit, receiver, args ); if ( this.handleDuplexControlMessage( connection, args ) ) { return; } this.onSuccessfulMsgReceived( connection, args ); } protected boolean interceptHandlePassiveResponse( UMCConnection connection, Object[] args ) throws ServiceInternalException { UMCConnection uc = this.wrap( connection ); UMCMessage msg = uc.getMessage(); int controlBits = msg.getHead().getControlBits(); // Notice: // For duplex passive channel, it is necessary to use control-bits markers and explicitly call the `handler`, otherwise it will be intercepted by the `express`. // 对于双工被动链路,必须走VIP通道使用控制位标记,并显式调用绑定的回调函数,不然会被总线错误拦截。 if ( controlBits == HuskyCTPConstants.HCTP_DUP_CONTROL_PASSIVE_RESPONSE ) { RecipientChannelControlBlock cb = (RecipientChannelControlBlock)args[ 0 ]; Channel channel = (Channel)cb.getChannel().getNativeHandle(); long nWaitMillis; MessageNodus nodus = connection.getMessageSource().getMessageNode(); if ( nodus != null ) { nWaitMillis = nodus.getMessageNodeConfig().getSyncWaitingMillis(); } else { nWaitMillis = UMCConstants.DefaultSyncWaitingMillis; } try { UlfAsyncMsgHandleAdapter handle = (UlfAsyncMsgHandleAdapter) channel.attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_ASYNC_MSG_HANDLE_KEY ) ).get(); if ( handle == null ) { handle = cb.pollMsgHandle( nWaitMillis ); } if ( handle == null ) { throw new ServiceInternalException( "Undefined MsgHandle." ); } try { handle.onSuccessfulMsgReceived( connection.getMessageSource(), connection.getTransmit(), connection.getReceiver(), msg, args ); } catch ( Exception e ) { throw new ServiceInternalException( e ); } } catch ( InterruptedException e ) { throw new ServiceInternalException( e ); } return true; } return false; } protected boolean interceptPassiveChannel( UMCConnection connection, Object[] args ) { UMCConnection uc = this.wrap( connection ); UMCMessage msg = uc.getMessage(); int controlBits = msg.getHead().getControlBits(); if ( controlBits == HuskyCTPConstants.HCTP_DUP_CONTROL_REGISTER ) { this.registerPassiveChannel( uc, connection, args ); return true; } return false; } protected void registerPassiveChannel( UMCConnection uc, UMCConnection connection, Object[] args ) { ChannelControlBlock ccb = (ChannelControlBlock) args[ 0 ]; UMCChannel channel = ccb.getChannel(); long cid = channel.getIdentityID(); this.mMultiClientChannelRegistry.register( cid, ccb ); this.getLogger().info( "[PassiveChannel] [ClientId: {}, ChannelId: {}] <{}>", cid, ccb.getChannel().getChannelID(), "Registered" ); } static void reconnect( ChannelControlBlock block, long mils ) throws IOException { if( block.isShutdown() ) { block.getChannel().reconnect( mils ); ( (UlfMessageNode)block.getParentMessageNode() ).getChannelPool().setIdleChannel( block ); } } RecipientNettyChannelControlBlock nextAsyChannelCB( FairChannelPool pool ) throws IOException { RecipientNettyChannelControlBlock block = (RecipientNettyChannelControlBlock) pool.nextAsynChannel( pool.getMajorWaitTimeout() * 2 ); if( block == null ) { throw new ChannelAllocateException( "Channel allocate failed." ); } reconnect( block, pool.getMajorWaitTimeout() ); return block; } @Override public void afterChannelInactive( ChannelControlBlock controlBlock ) { this.mMultiClientChannelRegistry.deregister( controlBlock.getChannel().getIdentityID(), controlBlock ); } @Override public ChannelPool getPoolByClientId( long clientId ) { return this.mMultiClientChannelRegistry.getPool( clientId ); } @Override public void sendAsynMsg( long clientId, UMCMessage request, boolean bNoneBuffered, UlfAsyncMsgHandleAdapter handler ) throws IOException, IllegalArgumentException { try{ ChannelPool pool = this.mMultiClientChannelRegistry.getPool( clientId ); if ( pool == null ) { throw new IllegalArgumentException( "No such client " + clientId ); } FairChannelPool fp = (FairChannelPool) pool; RecipientNettyChannelControlBlock cb = this.nextAsyChannelCB( fp ); if ( handler != null ) { cb.pushMsgHandle( handler ); //cb.getChannel().getNativeHandle().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_ASYNC_MSG_HANDLE_KEY ) ).set( handler ); } cb.getChannel().setChannelStatus( UlfChannelStatus.WAITING_PASSIVE_SEND ); cb.getTransmit().sendMsg( request, bNoneBuffered ); cb.getChannel().setChannelStatus( UlfChannelStatus.WAITING_PASSIVE_RECEIVE ); } catch ( ChannelAllocateException e ) { throw new IOException( e ); } } @Override public void sendAsynMsg( long clientId, UMCMessage request, boolean bNoneBuffered, AsynMsgHandler handler ) throws IOException { this.sendAsynMsg( clientId, request, bNoneBuffered, AsynMsgHandler.wrap( handler ) ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/ArchUlfAppointNode.java ================================================ package com.pinecone.hydra.uma; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; import com.pinecone.framework.lang.field.FieldEntity; import com.pinecone.hydra.servgram.Servgramium; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umct.IlleagalResponseException; import com.pinecone.hydra.umct.husky.compiler.CompilerEncoder; import com.pinecone.hydra.umct.husky.compiler.DynamicMethodPrototype; import com.pinecone.hydra.umct.husky.compiler.MethodPrototype; import com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler; import com.pinecone.hydra.umct.husky.machinery.MCTContextMachinery; import com.pinecone.hydra.umct.husky.machinery.PMCTContextMachinery; import com.pinecone.ulf.util.protobuf.FieldProtobufDecoder; import com.pinecone.ulf.util.protobuf.FieldProtobufEncoder; public abstract class ArchUlfAppointNode extends ArchAppointNode implements UlfAppointNode { protected ArchUlfAppointNode( Servgramium sharded ) { super( sharded ); } protected ArchUlfAppointNode( Servgramium sharded, MCTContextMachinery machinery ) { super( sharded, machinery ); } @Override public ProtoInterfacialCompiler getInterfacialCompiler() { return (ProtoInterfacialCompiler) super.getInterfacialCompiler(); } @Override public PMCTContextMachinery getMCTTransformer() { return (PMCTContextMachinery) super.getMCTTransformer(); } @Override public FieldProtobufEncoder getFieldProtobufEncoder() { return this.getMCTTransformer().getFieldProtobufEncoder(); } @Override public FieldProtobufDecoder getFieldProtobufDecoder() { return this.getMCTTransformer().getFieldProtobufDecoder(); } protected CompilerEncoder getCompilerEncoder() { return this.getInterfacialCompiler().getCompilerEncoder(); } protected DynamicMessage reinterpretMsg( MethodPrototype prototype, Object[] args ) { FieldProtobufEncoder encoder = this.getFieldProtobufEncoder(); Descriptors.Descriptor descriptor = prototype.getArgumentsDescriptor(); FieldEntity[] types = prototype.getArgumentTemplate().getSegments(); for ( int i = 0; i < args.length; ++i ) { types[ i + 1 ].setValue( args [ i ] ); } return encoder.encode( descriptor, types, this.getCompilerEncoder().getExceptedKeys(), this.getCompilerEncoder().getOptions() ); } public Object unmarshalResponse( MethodPrototype digest, byte[] raw ) throws IlleagalResponseException { try{ Descriptors.Descriptor retDes = digest.getReturnDescriptor(); if ( retDes == null ) { // undefined response for `void` type-return. if ( digest.getReturnType() == void.class || digest.getReturnType() == Void.class ) { return null; } throw new IlleagalResponseException( "Illegal undefined return type, what => " + digest.getReturnType() ); } DynamicMessage rm = DynamicMessage.parseFrom( retDes, raw ); FieldProtobufDecoder decoder = this.getMCTTransformer().getFieldProtobufDecoder(); return decoder.decode( digest.getReturnType(), digest.getGenericReturnTypeLabel(), retDes, rm, this.getCompilerEncoder().getExceptedKeys(), this.getCompilerEncoder().getOptions() ); } catch ( InvalidProtocolBufferException e ) { throw new IlleagalResponseException( e ); } } public Object unmarshalResponse( MethodPrototype digest, UMCMessage msg ) throws IlleagalResponseException { return this.unmarshalResponse( digest, (byte[]) msg.getHead().getExtraHead() ); } protected DynamicMethodPrototype queryMethodPrototype(String szMethodAddress ) { DynamicMethodPrototype method = (DynamicMethodPrototype) this.queryMethodDigest( szMethodAddress ); if ( method == null ) { throw new IllegalArgumentException( "Method address `" + szMethodAddress + "` is invalid." ); } return method; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/AsynMsgHandler.java ================================================ package com.pinecone.hydra.uma; import java.io.IOException; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.UMCReceiver; import com.pinecone.hydra.umc.msg.UMCTransmit; import com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter; import io.netty.channel.ChannelHandlerContext; public interface AsynMsgHandler extends Pinenut { default void onSuccessfulMsgReceived( UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg ) throws Exception { this.onSuccessfulMsgReceived( msg ); } void onSuccessfulMsgReceived( UMCMessage msg ) throws Exception ; default void onErrorMsgReceived( UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg ) throws Exception { this.onErrorMsgReceived( msg ); } void onErrorMsgReceived( UMCMessage msg ) throws Exception ; default void onError( Object data, Throwable cause ) { } static UlfAsyncMsgHandleAdapter wrap( AsynMsgHandler handler ) throws IOException { return new UlfAsyncMsgHandleAdapter() { @Override public void onSuccessfulMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception { handler.onSuccessfulMsgReceived( block.getTransmit(), block.getReceiver(), msg ); } @Override public void onSuccessfulMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception { handler.onSuccessfulMsgReceived( transmit, receiver, msg ); } @Override public void onErrorMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception { handler.onErrorMsgReceived( block.getTransmit(), block.getReceiver(), msg ); } @Override public void onErrorMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception { handler.onErrorMsgReceived( transmit, receiver, msg ); } @Override public void onError( Object data, Throwable cause ) { handler.onError( data, cause ); } }; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/AsynReturnHandler.java ================================================ package com.pinecone.hydra.uma; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umc.msg.UMCMessage; public interface AsynReturnHandler extends Pinenut { void onSuccessfulReturn( Object ret ) throws Exception ; void onErrorMsgReceived( UMCMessage msg ) throws Exception ; default void onError( Object data, Throwable cause ) { } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/DuplexAppointClient.java ================================================ package com.pinecone.hydra.uma; import java.io.IOException; import com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter; import com.pinecone.hydra.umct.UMCTExpressHandler; import com.pinecone.hydra.umct.husky.machinery.RouteDispatcher; public interface DuplexAppointClient extends AppointClient, DuplexAppointNode { void createPassiveChannel( int nLine ); void embraces( int nLine, UlfAsyncMsgHandleAdapter handler ) throws IOException; void embraces( int nLine, UMCTExpressHandler handler ) throws IOException; void embraces( int nLine ) throws IOException ; RouteDispatcher getRouteDispatcher(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/DuplexAppointNode.java ================================================ package com.pinecone.hydra.uma; public interface DuplexAppointNode extends AppointNode { boolean supportDuplex(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/DuplexAppointServer.java ================================================ package com.pinecone.hydra.uma; import java.io.IOException; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter; import com.pinecone.hydra.umct.DuplexExpress; import com.pinecone.hydra.umct.IlleagalResponseException; import com.pinecone.hydra.umct.husky.compiler.MethodPrototype; public interface DuplexAppointServer extends AppointServer, DuplexAppointNode { @Override DuplexExpress getUMCTExpress(); void sendAsynMsg( long clientId, UMCMessage request, boolean bNoneBuffered, UlfAsyncMsgHandleAdapter handler ) throws IOException; void sendAsynMsg( long clientId, UMCMessage request, boolean bNoneBuffered, AsynMsgHandler handler ) throws IOException; void sendAsynMsg( long clientId, UMCMessage request, AsynMsgHandler handler ) throws IOException; void invokeInformAsyn( long clientId, MethodPrototype method, Object[] args, AsynMsgHandler handler ) throws IOException ; void invokeInformAsyn( long clientId, MethodPrototype method, Object[] args, AsynReturnHandler handler ) throws IOException ; void invokeInformAsyn( long clientId, String szMethodAddress, Object[] args, AsynMsgHandler handler ) throws IOException ; void invokeInformAsyn( long clientId, String szMethodAddress, Object[] args, AsynReturnHandler handler ) throws IOException ; Object invokeInform( long clientId, MethodPrototype method, Object[] args, long nWaitTimeMil ) throws IlleagalResponseException, IOException ; Object invokeInform( long clientId, MethodPrototype method, Object... args ) throws IlleagalResponseException, IOException ; Object invokeInform( long clientId, String szMethodAddress, Object[] args, long nWaitTimeMil ) throws IlleagalResponseException, IOException ; Object invokeInform( long clientId, String szMethodAddress, Object... args ) throws IlleagalResponseException, IOException ; T getIface( long clientId, Class iface ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/HuskyDuplexExpress.java ================================================ package com.pinecone.hydra.uma; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pinecone.hydra.express.Deliver; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.UMCReceiver; import com.pinecone.hydra.umc.msg.UMCTransmit; import com.pinecone.hydra.umct.MessageDeliver; import com.pinecone.hydra.umct.MessageJunction; import com.pinecone.hydra.umct.UMCConnection; import com.pinecone.hydra.umct.WolfMCExpress; public class HuskyDuplexExpress extends ArchDuplexExpress { protected WolfMCExpress mFriendExpress; public HuskyDuplexExpress( String name, MessageJunction messagram, Logger logger ) { super(); this.mFriendExpress = new HuskyMCDuplexExpress( name, messagram, logger, this ); this.mLogger = this.mFriendExpress.getLogger(); } public HuskyDuplexExpress( String name, MessageJunction messagram ) { this( name, messagram, LoggerFactory.getLogger( HuskyDuplexExpress.class.getName() ) ); } public HuskyDuplexExpress( MessageJunction messagram ) { this( null, messagram ); } @Override public UMCMessage processResponse( UMCMessage request, UMCMessage response ) { response = this.mFriendExpress.processResponse( request, response ); response = super.processResponse( request, response ); // Maintain the chain-of-responsibility. // 保持责任链,确保每一层级的处理序. return response; } @Override protected void onSuccessfulMsgReceived( UMCConnection connection, Object[] args ) throws Exception { // Dummy } @Override public void onSuccessfulMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception { this.mFriendExpress.onSuccessfulMsgReceived( medium, transmit, receiver, msg, args ); } @Override public void onErrorMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception { this.mFriendExpress.onErrorMsgReceived( medium, transmit, receiver, msg, args ); } @Override public void onError( Object data, Throwable cause ) { this.mFriendExpress.onError( data, cause ); } @Override public String getName() { return this.mFriendExpress.getName(); } @Override public MessageJunction getJunction() { return this.mFriendExpress.getJunction(); } @Override public Logger getLogger() { return this.mFriendExpress.getLogger(); } @Override public MessageDeliver recruit ( String szName ) { return this.mFriendExpress.recruit( szName ); } @Override public HuskyDuplexExpress register ( Deliver deliver ) { this.mFriendExpress.register( deliver ); return this; } @Override public HuskyDuplexExpress fired ( Deliver deliver ) { this.mFriendExpress.fired( deliver ); return this; } @Override public MessageDeliver getDeliver ( String szName ) { return this.mFriendExpress.getDeliver( szName ); } @Override public boolean hasOwnDeliver( Deliver deliver ) { return this.mFriendExpress.hasOwnDeliver( deliver ); } @Override public boolean hasOwnDeliver( String deliverName ) { return this.mFriendExpress.hasOwnDeliver( deliverName ); } @Override public int size() { return this.mFriendExpress.size(); } static class HuskyMCDuplexExpress extends WolfMCExpress { private HuskyDuplexExpress husky; public HuskyMCDuplexExpress( String name, MessageJunction messagram, Logger logger, HuskyDuplexExpress self ) { super( name, messagram ); this.husky = self; this.mLogger = logger; } @Override protected void onSuccessfulMsgReceived( UMCConnection connection, Object[] args ) throws Exception { boolean isDuplexControlMessage = this.husky.handleDuplexControlMessage( connection, args ); if ( !isDuplexControlMessage ) { super.onSuccessfulMsgReceived( connection, args ); } } } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/UlfAppointClient.java ================================================ package com.pinecone.hydra.uma; public interface UlfAppointClient extends AppointClient{ } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/UlfAppointNode.java ================================================ package com.pinecone.hydra.uma; import com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler; import com.pinecone.hydra.umct.husky.machinery.PMCTContextMachinery; import com.pinecone.ulf.util.protobuf.FieldProtobufDecoder; import com.pinecone.ulf.util.protobuf.FieldProtobufEncoder; public interface UlfAppointNode extends AppointNode { @Override PMCTContextMachinery getMCTTransformer(); @Override ProtoInterfacialCompiler getInterfacialCompiler(); default FieldProtobufEncoder getFieldProtobufEncoder() { return this.getInterfacialCompiler().getCompilerEncoder().getEncoder(); } FieldProtobufDecoder getFieldProtobufDecoder(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/UlfAppointServer.java ================================================ package com.pinecone.hydra.uma; public interface UlfAppointServer extends AppointServer { } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/UlfDuplexAppointClient.java ================================================ package com.pinecone.hydra.uma; public interface UlfDuplexAppointClient extends DuplexAppointClient { } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/UlfDuplexAppointServer.java ================================================ package com.pinecone.hydra.uma; public interface UlfDuplexAppointServer extends DuplexAppointServer { } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/pool/GenericMultiClientChannelRegistry.java ================================================ package com.pinecone.hydra.uma.pool; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import com.pinecone.hydra.system.component.Slf4jTraceable; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.ChannelPool; import com.pinecone.hydra.umc.msg.FairChannelPool; import com.pinecone.hydra.umc.msg.MessageNode; import com.pinecone.hydra.umc.msg.MultiClientChannelRegistry; import com.pinecone.hydra.umc.wolf.UlfIOLoadBalanceStrategy; import com.pinecone.hydra.umc.wolf.UlfIdleFirstBalanceStrategy; import com.pinecone.hydra.umc.wolf.client.ProactiveParallelFairChannelPool; public class GenericMultiClientChannelRegistry implements MultiClientChannelRegistry { protected static final UlfIOLoadBalanceStrategy LoadBalanceStrategy = new UlfIdleFirstBalanceStrategy(); protected Lock mPoolLock; protected Map mClientChannelRegistry; public GenericMultiClientChannelRegistry() { this.mClientChannelRegistry = new ConcurrentHashMap<>(); this.mPoolLock = new ReentrantLock(); } @Override public void register( CID id, ChannelControlBlock controlBlock ) { FairChannelPool pool = this.mClientChannelRegistry.computeIfAbsent( id, (k)->{ return new ProactiveParallelFairChannelPool<>( LoadBalanceStrategy ); } ); pool.add( controlBlock ); } @Override public void deregister( CID id, ChannelControlBlock controlBlock ) { FairChannelPool pool = this.mClientChannelRegistry.computeIfPresent( id, (k, v)->{ v.remove( controlBlock ); if ( v.isEmpty() ) { MessageNode messageNode = controlBlock.getParentMessageNode(); if ( messageNode instanceof Slf4jTraceable) { ((Slf4jTraceable) messageNode).getLogger().info( "Client `{}` is detached.", id ); } return null; } return v; } ); } @Override public void deregister( CID id ) { FairChannelPool pool = this.mClientChannelRegistry.remove( id ); if ( pool != null ) { pool.clear(); // All channels should be closed in this method, in principle. } } @Override public ChannelPool getPool( CID id ) { return this.mClientChannelRegistry.get( id ); } @Override public int size() { return this.mClientChannelRegistry.size(); } @Override public void clear() { this.mPoolLock.lock(); try{ for( FairChannelPool pool : this.mClientChannelRegistry.values() ) { pool.clear(); // All channels should be closed in this method, in principle. } this.mClientChannelRegistry.clear(); } finally { this.mPoolLock.unlock(); } } @Override public boolean isEmpty() { return this.mClientChannelRegistry.isEmpty(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/proxy/GenericIfaceProxyFactory.java ================================================ package com.pinecone.hydra.uma.proxy; import com.pinecone.hydra.umct.husky.compiler.MethodPrototype; import com.pinecone.hydra.umct.proxy.UMCTHub; import org.springframework.cglib.proxy.Enhancer; import org.springframework.cglib.proxy.MethodInterceptor; import org.springframework.cglib.proxy.MethodProxy; import java.lang.reflect.Method; import java.util.concurrent.ConcurrentHashMap; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.uma.AppointClient; import com.pinecone.hydra.umct.husky.compiler.ClassDigest; import com.pinecone.hydra.umct.husky.compiler.DynamicMethodPrototype; import com.pinecone.hydra.umct.stereotype.IfaceUtils; public class GenericIfaceProxyFactory implements IfaceProxyFactory { protected final ConcurrentHashMap, Enhancer> mEnhancerCache = new ConcurrentHashMap<>(); protected AppointClient mClient; public GenericIfaceProxyFactory( AppointClient client ) { this.mClient = client; } @Override public T createProxy( AppointClient client, ClassDigest classDigest, Class iface ) { // if (!iface.isInterface()) { // throw new IllegalArgumentException("The provided class must be an interface."); // } Enhancer enhancer = this.mEnhancerCache.computeIfAbsent(iface, clazz -> { Enhancer e = new Enhancer(); e.setSuperclass(UMCTHub.class); e.setInterfaces( new Class[]{iface} ); e.setCallback(new MethodInterceptor() { @Override public Object intercept( Object obj, Method method, Object[] args, MethodProxy proxy ) throws Throwable { String methodName = IfaceUtils.getIfaceMethodName( method ); MethodPrototype methodPrototype = (DynamicMethodPrototype) client.queryMethodDigest( classDigest.getClassName() + Namespace.DEFAULT_SEPARATOR + methodName ); return client.invokeInform( methodPrototype, args ); } }); return e; }); return iface.cast( enhancer.create() ); } @Override public T createProxy( AppointClient client, Class iface ) { ClassDigest classDigest = client.queryClassDigest( IfaceUtils.queryIfaceClassNameAddress( iface ) ); return this.createProxy( client, classDigest, iface ); } @Override public T createProxy( Class iface ) { return this.createProxy( this.mClient, iface ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/proxy/GenericPassiveClientIfaceProxyFactory.java ================================================ package com.pinecone.hydra.uma.proxy; import java.lang.reflect.Method; import java.util.concurrent.ConcurrentHashMap; import com.pinecone.hydra.umct.husky.compiler.MethodPrototype; import com.pinecone.hydra.umct.proxy.UMCTHub; import org.springframework.cglib.proxy.Enhancer; import org.springframework.cglib.proxy.MethodInterceptor; import org.springframework.cglib.proxy.MethodProxy; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.uma.DuplexAppointServer; import com.pinecone.hydra.umct.husky.compiler.ClassDigest; import com.pinecone.hydra.umct.husky.compiler.DynamicMethodPrototype; import com.pinecone.hydra.umct.stereotype.IfaceUtils; public class GenericPassiveClientIfaceProxyFactory implements PassiveClientIfaceProxyFactory { protected final ConcurrentHashMap, Enhancer> mEnhancerCache = new ConcurrentHashMap<>(); protected DuplexAppointServer mServer; public GenericPassiveClientIfaceProxyFactory( DuplexAppointServer server ) { this.mServer = server; } @Override public T createProxy( long clientId, DuplexAppointServer server, ClassDigest classDigest, Class iface ) { Enhancer enhancer = this.mEnhancerCache.computeIfAbsent(iface, clazz -> { Enhancer e = new Enhancer(); e.setSuperclass( UMCTHub.class ); e.setInterfaces( new Class[]{iface} ); e.setCallback(new MethodInterceptor() { @Override public Object intercept( Object obj, Method method, Object[] args, MethodProxy proxy ) throws Throwable { String methodName = IfaceUtils.getIfaceMethodName( method ); MethodPrototype methodPrototype = (DynamicMethodPrototype) server.queryMethodDigest( classDigest.getClassName() + Namespace.DEFAULT_SEPARATOR + methodName ); return server.invokeInform( clientId, methodPrototype, args ); } }); return e; }); return iface.cast( enhancer.create() ); } @Override public T createProxy( long clientId, DuplexAppointServer server, Class iface ) { ClassDigest classDigest = server.queryClassDigest( IfaceUtils.queryIfaceClassNameAddress( iface ) ); return this.createProxy( clientId, server, classDigest, iface ); } @Override public T createProxy( long clientId, Class iface ) { return this.createProxy( clientId, this.mServer, iface ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/proxy/IfaceProxyFactory.java ================================================ package com.pinecone.hydra.uma.proxy; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.uma.AppointClient; import com.pinecone.hydra.umct.husky.compiler.ClassDigest; public interface IfaceProxyFactory extends Pinenut { T createProxy( AppointClient client, ClassDigest classDigest, Class iface ) ; T createProxy( AppointClient client, Class iface ) ; T createProxy( Class iface ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/proxy/PassiveClientIfaceProxyFactory.java ================================================ package com.pinecone.hydra.uma.proxy; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.uma.DuplexAppointServer; import com.pinecone.hydra.umct.husky.compiler.ClassDigest; public interface PassiveClientIfaceProxyFactory extends Pinenut { T createProxy( long clientId, DuplexAppointServer server, ClassDigest classDigest, Class iface ) ; T createProxy( long clientId, DuplexAppointServer server, Class iface ) ; T createProxy( long clientId, Class iface ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/wolf/WolfAppointClient.java ================================================ package com.pinecone.hydra.uma.wolf; import java.io.IOException; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeoutException; import com.google.protobuf.DynamicMessage; import com.pinecone.framework.system.ProvokeHandleException; import com.pinecone.hydra.uma.UlfAppointClient; import com.pinecone.hydra.uma.ArchUlfAppointNode; import com.pinecone.hydra.uma.AsynMsgHandler; import com.pinecone.hydra.uma.AsynReturnHandler; import com.pinecone.hydra.uma.proxy.GenericIfaceProxyFactory; import com.pinecone.hydra.uma.proxy.IfaceProxyFactory; import com.pinecone.hydra.servgram.Servgramium; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.ChannelHandleException; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.MediumTerminationException; import com.pinecone.hydra.umc.msg.Messenger; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.event.ChannelDataInterceptor; import com.pinecone.hydra.umc.msg.event.ChannelEventHandler; import com.pinecone.hydra.umc.msg.event.ChannelInactiveHandler; import com.pinecone.hydra.umc.vita.HeartbeatControl; import com.pinecone.hydra.umc.wolf.UlfInformMessage; import com.pinecone.hydra.umc.wolf.client.ArchAsyncMessenger; import com.pinecone.hydra.umc.wolf.client.ClientConnectArguments; import com.pinecone.hydra.umc.wolf.client.UlfAsyncMessengerChannelControlBlock; import com.pinecone.hydra.umc.wolf.client.UlfClient; import com.pinecone.hydra.umc.wolf.client.WolfMCClient; import com.pinecone.hydra.umct.IlleagalResponseException; import com.pinecone.hydra.umct.husky.compiler.BytecodeIfaceCompiler; import com.pinecone.hydra.umct.husky.compiler.CompilerEncoder; import com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler; import com.pinecone.hydra.umct.husky.compiler.MethodPrototype; import com.pinecone.hydra.umct.husky.heartbeat.HuskyHeartbeatControl; import com.pinecone.hydra.umct.husky.machinery.HuskyContextMachinery; import com.pinecone.hydra.umct.mapping.BytecodeControllerInspector; import com.pinecone.hydra.umct.mapping.ControllerInspector; import com.pinecone.ulf.util.protobuf.GenericFieldProtobufDecoder; import io.netty.channel.Channel; import io.netty.channel.ChannelHandlerContext; import javassist.ClassPool; /** * Pinecone Ursus For Java WolfAppointClient [ Ulfhedinn Wolf RPC Client ] * Bean Nuts Walnut Ulfhedinn Wolves/Ulfar Family. * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** */ public class WolfAppointClient extends ArchUlfAppointNode implements UlfAppointClient { protected UlfClient mMessenger; protected IfaceProxyFactory mIfaceProxyFactory; protected HeartbeatControl mHeartbeatControl; protected boolean afterChannelInactive( ChannelControlBlock ccb, Object context ) throws ChannelHandleException { UlfAsyncMessengerChannelControlBlock cb = (UlfAsyncMessengerChannelControlBlock) ccb; Channel channel = cb.getChannel().getNativeHandle(); WolfAppointClient.this.getLogger().info( "Proactive channel ({}), has detached.", channel.id() ); UlfClient wrappedClient = WolfAppointClient.this.getMessageNode(); if ( wrappedClient.getConnectionArguments().isAutoReconnect() ) { try { ArchAsyncMessenger.reconnect( cb, (Messenger) wrappedClient, context ); WolfAppointClient.this.getLogger().info( "Proactive Channel ({}, `{}`), reconnect successfully.", channel.id(), cb.getChannel().getAddress() ); } catch ( MediumTerminationException e ) { WolfAppointClient.this.getLogger().info( "Service already terminated with inactive event. " ); } catch ( IOException e ) { WolfAppointClient.this.getLogger().error( "Proactive channel ({}), attempted to reconnect but failed.", channel.id(), e ); throw new ChannelHandleException( e.getCause() ); } } return true; // Blocking next inactive sequence. } protected void registerChannelInactiveHandler () { this.mMessenger.registerChannelInactiveHandler(new ChannelInactiveHandler() { @Override public boolean afterChannelInactive( ChannelControlBlock ccb, Object context ) throws ChannelHandleException { this.afterEventTriggered( ccb, context ); return WolfAppointClient.this.afterChannelInactive( ccb, context ); } }); } protected void registerChannelConnectedHandler () { ClientConnectArguments arguments = WolfAppointClient.this.getMessageNode().getConnectionArguments(); this.mMessenger.registerChannelConnectedHandler(new ChannelEventHandler() { @Override public void afterEventTriggered( ChannelControlBlock block, Object context ) { if ( arguments.isEnableHeartbeat() ) { WolfAppointClient.this.mHeartbeatControl.registerChannel( block, arguments.getHeartbeatInterval() ); } } }); } protected void initUlfClientHeartbeatInterceptors( UlfClient client ) { client.registerArrivedDataInterceptor(new ChannelDataInterceptor() { @Override public boolean interceptAfterDataArrived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) { try { return WolfAppointClient.this.mHeartbeatControl.interceptFeedback( block, msg ); } catch ( IOException e ) { throw new ProvokeHandleException( e ); } } }); } private void initSelf( UlfClient messenger ) { this.mMessenger = messenger; this.mIfaceProxyFactory = new GenericIfaceProxyFactory( this ); ClientConnectArguments arguments = WolfAppointClient.this.getMessageNode().getConnectionArguments(); if ( arguments.isEnableHeartbeat() ) { this.mHeartbeatControl = new HuskyHeartbeatControl( arguments.getHeartbeatInterval() ); this.registerChannelConnectedHandler(); this.initUlfClientHeartbeatInterceptors( messenger ); } this.registerChannelInactiveHandler(); } protected WolfAppointClient( UlfClient messenger, boolean delay ){ super( (Servgramium) messenger ); this.initSelf( messenger ); } public WolfAppointClient( UlfClient messenger, ProtoInterfacialCompiler compiler, ControllerInspector controllerInspector ){ this( messenger, true ); this.mMCTContextMachinery = new HuskyContextMachinery( compiler, controllerInspector, new GenericFieldProtobufDecoder() ); this.initSelf( messenger ); } public WolfAppointClient( UlfClient messenger, CompilerEncoder encoder ){ this( messenger, new BytecodeIfaceCompiler( ClassPool.getDefault(), messenger.getTaskManager().getClassLoader(), encoder ), new BytecodeControllerInspector( ClassPool.getDefault(), messenger.getTaskManager().getClassLoader() ) ); } public WolfAppointClient( UlfClient messenger ){ this( messenger, new BytecodeIfaceCompiler( ClassPool.getDefault(), messenger.getTaskManager().getClassLoader() ), new BytecodeControllerInspector( ClassPool.getDefault(), messenger.getTaskManager().getClassLoader() ) ); } @Override public void close() { this.mMessenger.close(); } @Override public UlfClient getMessageNode() { return this.mMessenger; } @Override public UMCMessage sendSyncMsg( UMCMessage request ) throws IOException { return this.sendSyncMsg( request, false ); } @Override public UMCMessage sendSyncMsg( UMCMessage request, boolean bNoneBuffered ) throws IOException { return this.mMessenger.sendSyncMsg( request, bNoneBuffered ); } @Override public void sendAsynMsg( UMCMessage request ) throws IOException { this.mMessenger.sendAsynMsg( request ); } @Override public void sendAsynMsg( UMCMessage request, AsynMsgHandler handler ) throws IOException { this.mMessenger.sendAsynMsg( request, AsynMsgHandler.wrap( handler ) ); } @Override public void invokeInformAsyn( MethodPrototype method, Object[] args, AsynMsgHandler handler ) throws IOException { DynamicMessage message = this.reinterpretMsg( method, args ); this.sendAsynMsg( new UlfInformMessage(message.toByteArray()), handler ); } @Override public void invokeInformAsyn( MethodPrototype method, Object[] args, AsynReturnHandler handler ) throws IOException { DynamicMessage message = this.reinterpretMsg( method, args ); this.sendAsynMsg(new UlfInformMessage(message.toByteArray()), new AsynMsgHandler() { @Override public void onSuccessfulMsgReceived( UMCMessage msg ) throws Exception { handler.onSuccessfulReturn( WolfAppointClient.this.unmarshalResponse( method, msg ) ); } @Override public void onErrorMsgReceived( UMCMessage msg ) throws Exception { handler.onErrorMsgReceived( msg ); } }); } @Override public Object invokeInform( MethodPrototype method, Object[] args, long nWaitTimeMil ) throws IlleagalResponseException, IOException { CompletableFuture future = new CompletableFuture<>(); DynamicMessage message = this.reinterpretMsg(method, args); this.sendAsynMsg(new UlfInformMessage(message.toByteArray()), new AsynMsgHandler() { @Override public void onSuccessfulMsgReceived( UMCMessage msg ) throws Exception { try { Object result = WolfAppointClient.this.unmarshalResponse( method, msg ); future.complete(result); } catch ( IlleagalResponseException e ) { future.completeExceptionally( e ); } } @Override public void onErrorMsgReceived( UMCMessage msg ) throws Exception { future.completeExceptionally( new IlleagalResponseException( "Error message received: " + msg ) ); } @Override public void onError( Object data, Throwable cause ) { future.completeExceptionally( cause ); } }); try { if ( nWaitTimeMil == -1 ) { if ( this.mMessenger instanceof WolfMCClient ) { nWaitTimeMil = ((WolfMCClient) this.mMessenger).getConnectionArguments().getSyncWaitingMillis(); } } return WolfAppointHelper.evalCompletableFuture( future, nWaitTimeMil ); } catch ( TimeoutException | ExecutionException e ) { throw new IlleagalResponseException( e ); } catch ( InterruptedException e ) { Thread.currentThread().interrupt(); throw new IlleagalResponseException( e ); } } @Override public Object invokeInform( MethodPrototype method, Object... args ) throws IlleagalResponseException, IOException { return this.invokeInform( method, args, -1 ); } @Override public void invokeInformAsyn( String szMethodAddress, Object[] args, AsynMsgHandler handler ) throws IOException { this.invokeInformAsyn( this.queryMethodPrototype( szMethodAddress ), args, handler ); } @Override public void invokeInformAsyn( String szMethodAddress, Object[] args, AsynReturnHandler handler ) throws IOException { this.invokeInformAsyn( this.queryMethodPrototype( szMethodAddress ), args, handler ); } @Override public Object invokeInform( String szMethodAddress, Object[] args, long nWaitTimeMil ) throws IlleagalResponseException, IOException { return this.invokeInform( this.queryMethodPrototype( szMethodAddress ), args, nWaitTimeMil ); } @Override public Object invokeInform( String szMethodAddress, Object... args ) throws IlleagalResponseException, IOException { return this.invokeInform( this.queryMethodPrototype( szMethodAddress ), args ); } @Override public T getIface( Class iface ) { return this.mIfaceProxyFactory.createProxy( iface ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/wolf/WolfAppointHelper.java ================================================ package com.pinecone.hydra.uma.wolf; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import com.pinecone.hydra.umct.IlleagalResponseException; public final class WolfAppointHelper { public static Object evalCompletableFuture( CompletableFuture future, long nWaitTimeMil ) throws IlleagalResponseException, TimeoutException, ExecutionException, InterruptedException { Object ret; if ( nWaitTimeMil != -1 ) { ret = future.get( nWaitTimeMil, TimeUnit.MILLISECONDS ); } else { ret = future.get(); } if ( ret instanceof Exception ) { throw new IlleagalResponseException( (Exception)ret ); } return ret; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/wolf/WolfAppointServer.java ================================================ package com.pinecone.hydra.uma.wolf; import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import org.slf4j.Logger; import com.pinecone.framework.system.ProvokeHandleException; import com.pinecone.hydra.express.Deliver; import com.pinecone.hydra.servgram.Servgramium; import com.pinecone.hydra.uma.UlfAppointServer; import com.pinecone.hydra.uma.ArchUlfAppointNode; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.event.ChannelDataInterceptor; import com.pinecone.hydra.umc.vita.HeartbeatFeedbackor; import com.pinecone.hydra.umc.wolf.server.UlfServer; import com.pinecone.hydra.umct.MessageDeliver; import com.pinecone.hydra.umct.MessageExpress; import com.pinecone.hydra.umct.MessageJunction; import com.pinecone.hydra.umct.UMCTExpress; import com.pinecone.hydra.umct.WolfMCExpress; import com.pinecone.hydra.umct.husky.heartbeat.HuskyHeartbeatFeedbackor; import com.pinecone.hydra.umct.husky.machinery.HuskyRouteDispatcher; import com.pinecone.hydra.umct.husky.machinery.HuskyRouteDispatcherFabricator; import com.pinecone.hydra.umct.husky.machinery.RouteDispatcher; import com.pinecone.hydra.umct.mapping.ControllerInspector; import com.pinecone.hydra.umct.husky.compiler.CompilerEncoder; import com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler; import io.netty.channel.ChannelHandlerContext; /** * Pinecone Ursus For Java WolfAppointServer [ Ulfhedinn Wolf RPC Server ] * Bean Nuts Walnut Ulfhedinn Wolves/Ulfar Family. * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** */ public class WolfAppointServer extends ArchUlfAppointNode implements UlfAppointServer { protected UlfServer mRecipient; protected RouteDispatcher mRouteDispatcher; protected HeartbeatFeedbackor mHeartbeatFeedbackor; protected void applyExpress( UMCTExpress express ) { this.mRecipient.apply( express ); } protected void initUlfServerHeartbeatInterceptors( UlfServer server ) { server.registerArrivedDataInterceptor(new ChannelDataInterceptor() { @Override public boolean interceptAfterDataArrived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) { //Debug.trace( msg ); try { return WolfAppointServer.this.mHeartbeatFeedbackor.interceptHeartbeat( block, msg ); } catch ( IOException e ) { throw new ProvokeHandleException( e ); } } }); } private void initSelf( UlfServer server ) { this.initUlfServerHeartbeatInterceptors( server ); } protected WolfAppointServer( UlfServer server, RouteDispatcher dispatcher ){ super( (Servgramium) server, dispatcher.getContextMachinery() ); this.mRecipient = server; this.mRouteDispatcher = dispatcher; this.mHeartbeatFeedbackor = new HuskyHeartbeatFeedbackor(); this.initSelf( server ); } public WolfAppointServer( UlfServer server, ProtoInterfacialCompiler compiler, ControllerInspector controllerInspector, UMCTExpress express ){ this( server, new HuskyRouteDispatcher( compiler, controllerInspector, express ) ); this.apply( express ); } public WolfAppointServer( UlfServer server, CompilerEncoder encoder, UMCTExpress express ){ this( server, new HuskyRouteDispatcher( encoder, express, server.getTaskManager().getClassLoader() ) ); this.apply( express ); } public WolfAppointServer( UlfServer server, UMCTExpress express ){ this( server, new HuskyRouteDispatcher( express, server.getTaskManager().getClassLoader() ) ); this.apply( express ); } public WolfAppointServer( UlfServer server, Class expressType ){ this( server, new HuskyRouteDispatcher( server.getTaskManager().getClassLoader(), true ) ); try{ Constructor constructor = expressType.getConstructor( String.class, MessageJunction.class, Logger.class ); UMCTExpress express = (UMCTExpress) constructor.newInstance(DefaultEntityName, this, this.getLogger() ); this.applyExpress( express ); HuskyRouteDispatcherFabricator.afterConstructed( (HuskyRouteDispatcher)this.mRouteDispatcher, express ); } catch ( NoSuchMethodException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) { throw new IllegalArgumentException( "`" + expressType.getSimpleName() + "` is not UMCTExpress calibre qualified." ); } } public WolfAppointServer( UlfServer server ){ this( server, WolfMCExpress.class ); } @Override public void close() { this.mRecipient.close(); } @Override public UlfServer getMessageNode() { return this.mRecipient; } @Override public WolfAppointServer apply( UMCTExpress handler ) { this.mRouteDispatcher.setUMCTExpress( handler ); this.mRecipient.apply( handler ); return this; } @Override public UMCTExpress getUMCTExpress() { return this.mRouteDispatcher.getUMCTExpress(); } @Override public MessageExpress register( Deliver deliver ) { return this.mRouteDispatcher.register( deliver ); } @Override public MessageExpress fired ( Deliver deliver ) { return this.mRouteDispatcher.fired( deliver ); } @Override public MessageDeliver getDeliver( String name ) { return this.mRouteDispatcher.getDeliver( name ); } @Override public MessageDeliver getDefaultDeliver() { return this.mRouteDispatcher.getDefaultDeliver(); } @Override public void registerInstance( String deliverName, Object instance, Class iface ) { this.mRouteDispatcher.registerInstance( deliverName, instance, iface ); } @Override public void registerInstance( Object instance, Class iface ) { this.mRouteDispatcher.registerInstance( instance, iface ); } @Override public void registerController( String deliverName, Object instance, Class controllerType ) { this.mRouteDispatcher.registerController( deliverName, instance, controllerType ); } @Override public void registerController( Object instance, Class controllerType ) { this.mRouteDispatcher.registerController( instance, controllerType ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/wolf/WolvesAppointClient.java ================================================ package com.pinecone.hydra.uma.wolf; import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.Map; import org.slf4j.Logger; import com.pinecone.framework.unit.LinkedTreeMap; import com.pinecone.hydra.uma.AppointServer; import com.pinecone.hydra.uma.HuskyDuplexExpress; import com.pinecone.hydra.uma.UlfDuplexAppointClient; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.ChannelHandleException; import com.pinecone.hydra.umc.msg.ChannelPool; import com.pinecone.hydra.umc.msg.MediumTerminationException; import com.pinecone.hydra.umc.msg.Messenger; import com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter; import com.pinecone.hydra.umc.wolf.UlfChannel; import com.pinecone.hydra.umc.wolf.UlfInstructMessage; import com.pinecone.hydra.umc.wolf.WolfMCStandardConstants; import com.pinecone.hydra.umc.wolf.client.ArchAsyncMessenger; import com.pinecone.hydra.umc.wolf.client.UlfAsyncMessengerChannelControlBlock; import com.pinecone.hydra.umc.wolf.client.UlfClient; import com.pinecone.hydra.umct.DuplexExpress; import com.pinecone.hydra.umct.MessageJunction; import com.pinecone.hydra.umct.UMCTExpress; import com.pinecone.hydra.umct.UMCTExpressHandler; import com.pinecone.hydra.umct.husky.HuskyCTPConstants; import com.pinecone.hydra.umct.husky.compiler.BytecodeIfaceCompiler; import com.pinecone.hydra.umct.husky.compiler.CompilerEncoder; import com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler; import com.pinecone.hydra.umct.husky.machinery.HuskyContextMachinery; import com.pinecone.hydra.umct.husky.machinery.HuskyRouteDispatcher; import com.pinecone.hydra.umct.husky.machinery.HuskyRouteDispatcherFabricator; import com.pinecone.hydra.umct.husky.machinery.ProtoRouteDispatcher; import com.pinecone.hydra.umct.husky.machinery.RouteDispatcher; import com.pinecone.hydra.umct.mapping.BytecodeControllerInspector; import com.pinecone.hydra.umct.mapping.ControllerInspector; import com.pinecone.ulf.util.protobuf.GenericFieldProtobufDecoder; import io.netty.channel.Channel; import io.netty.channel.ChannelId; import io.netty.util.AttributeKey; import javassist.ClassPool; /** * Pinecone Ursus For Java WolvesAppointClient [ Ulfhedinn Wolf Duplex RPC Client ] * Bean Nuts Walnut Ulfhedinn Wolves/Ulfar Family. * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** */ public class WolvesAppointClient extends WolfAppointClient implements UlfDuplexAppointClient { protected static Class checkExpressType( Class expressType ) { if ( !DuplexExpress.class.isAssignableFrom( expressType ) ) { throw new IllegalArgumentException( "`" + expressType.getSimpleName() + "` is not DuplexExpress calibre qualified." ); } return expressType; } protected Map mInstructedChannels; // Standby controlled channels, waiting for server to instruct. protected RouteDispatcher mRouteDispatcher; @Override protected boolean afterChannelInactive( ChannelControlBlock ccb, Object context ) throws ChannelHandleException { UlfAsyncMessengerChannelControlBlock cb = (UlfAsyncMessengerChannelControlBlock) ccb; Channel channel = cb.getChannel().getNativeHandle(); Object ob = channel.attr( AttributeKey.valueOf( HuskyCTPConstants.HCTP_DUP_PASSIVE_CHANNEL_KEY ) ).get(); if ( ob != null && (Boolean)ob ) { WolvesAppointClient.this.getLogger().info( "Passive-controlled channel ({}), has detached.", channel.id() ); UlfClient wrappedClient = WolvesAppointClient.this.getMessageNode(); if ( wrappedClient.getConnectionArguments().isAutoReconnect() ) { try { ArchAsyncMessenger.reconnect( cb, (Messenger) wrappedClient, context ); Channel newChannel = cb.getChannel().getNativeHandle(); WolvesAppointClient.copyDuplexAttrs( channel, newChannel ); UlfInstructMessage instructMessage = new UlfInstructMessage( HuskyCTPConstants.HCTP_DUP_CONTROL_REGISTER ); instructMessage.getHead().setIdentityId( wrappedClient.getMessageNodeId() ); cb.sendAsynMsg( instructMessage, true ); WolvesAppointClient.this.getLogger().info( "Passive-controlled channel ({}, `{}`), reconnect successfully.", channel.id(), cb.getChannel().getAddress() ); } catch ( MediumTerminationException e ) { WolvesAppointClient.this.getLogger().info( "Service already terminated with inactive event. " ); } catch ( IOException e ) { WolvesAppointClient.this.getLogger().error( "Passive-controlled channel ({}), attempted to reconnect but failed.", channel.id(), e ); throw new ChannelHandleException( e.getCause() ); } } DuplexExpress express = (DuplexExpress)WolvesAppointClient.this.mRouteDispatcher.getUMCTExpress(); express.afterChannelInactive( cb ); return true; // Blocking next inactive sequence. } return super.afterChannelInactive( ccb, context ); } private void initSelf() { } protected WolvesAppointClient( UlfClient messenger, ProtoRouteDispatcher dispatcher ) { super( messenger, dispatcher.getInterfacialCompiler(), dispatcher.getContextMachinery().getControllerInspector() ); this.initSelf(); this.mRouteDispatcher = dispatcher; this.mInstructedChannels = new LinkedTreeMap<>(); } public WolvesAppointClient( UlfClient messenger, ProtoInterfacialCompiler compiler, ControllerInspector controllerInspector, UMCTExpress express ){ this( messenger, new HuskyRouteDispatcher( compiler, controllerInspector, express ) ); this.apply( express ); } public WolvesAppointClient( UlfClient messenger, CompilerEncoder encoder, UMCTExpress express ){ this( messenger, new HuskyRouteDispatcher( encoder, express, messenger.getTaskManager().getClassLoader() ) ); this.apply( express ); } public WolvesAppointClient( UlfClient messenger, UMCTExpress express ){ this( messenger, new HuskyRouteDispatcher( express, messenger.getTaskManager().getClassLoader() ) ); this.apply( express ); } public WolvesAppointClient( UlfClient messenger, Class expressType ){ super( messenger, true ); this.initSelf(); try{ Constructor constructor = WolvesAppointClient.checkExpressType( expressType ).getConstructor( String.class, MessageJunction.class, Logger.class ); UMCTExpress express = (UMCTExpress) constructor.newInstance( AppointServer.DefaultEntityName, this, this.getLogger() ); this.mRouteDispatcher = new HuskyRouteDispatcher( express, messenger.getTaskManager().getClassLoader() ); HuskyRouteDispatcherFabricator.afterConstructed( (HuskyRouteDispatcher)this.mRouteDispatcher, express ); this.mMCTContextMachinery = new HuskyContextMachinery( new BytecodeIfaceCompiler( ClassPool.getDefault(), messenger.getTaskManager().getClassLoader() ), new BytecodeControllerInspector( ClassPool.getDefault(), messenger.getTaskManager().getClassLoader() ), new GenericFieldProtobufDecoder() ); this.apply( express ); this.mInstructedChannels = new LinkedTreeMap<>(); } catch ( NoSuchMethodException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) { throw new IllegalArgumentException( "`" + expressType.getSimpleName() + "` is not UMCTExpress calibre qualified." ); } } public WolvesAppointClient( UlfClient messenger ){ this( messenger, HuskyDuplexExpress.class ); } protected static void copyDuplexAttrs( Channel leg, Channel neo ) { UlfChannel.copyChannelAttr( leg, neo, HuskyCTPConstants.HCTP_DUP_PASSIVE_CHANNEL_KEY ); } public void apply( UMCTExpress handler ) { this.mRouteDispatcher.setUMCTExpress( handler ); } @Override public RouteDispatcher getRouteDispatcher() { return this.mRouteDispatcher; } @Override public boolean supportDuplex() { return true; } @Override public void embraces( int nLine, UlfAsyncMsgHandleAdapter handler ) throws IOException { // Join us, embracing uniformity. this.createPassiveChannel( nLine ); for ( Map.Entry kv : this.mInstructedChannels.entrySet() ) { UlfInstructMessage instructMessage = new UlfInstructMessage( HuskyCTPConstants.HCTP_DUP_CONTROL_REGISTER ); instructMessage.getHead().setIdentityId( this.mMessenger.getMessageNodeId() ); ChannelControlBlock ccb = kv.getValue(); UlfAsyncMessengerChannelControlBlock cb = (UlfAsyncMessengerChannelControlBlock) ccb; Channel channel = cb.getChannel().getNativeHandle(); channel.attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_ASYNC_MSG_HANDLE_KEY ) ).set( handler ); // Exclusive handler. channel.attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_ASY_EXCLUSIVE_HANDLE_KEY ) ).set( true ); channel.attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_EXTERNAL_CHANNEL_KEY ) ).set( true ); channel.attr( AttributeKey.valueOf( HuskyCTPConstants.HCTP_DUP_PASSIVE_CHANNEL_KEY ) ).set( true ); cb.sendAsynMsg( instructMessage, true ); this.getLogger().info( "Embracing and registering passive controlled channel ({}).", cb.getChannel().getNativeHandle().id() ); } } @Override public void embraces( int nLine, UMCTExpressHandler handler ) throws IOException { this.embraces( nLine, UlfAsyncMsgHandleAdapter.wrap( handler ) ); } @Override public void embraces( int nLine ) throws IOException { this.embraces( nLine, this.mRouteDispatcher.getUMCTExpress() ); } @Override public void createPassiveChannel( int nLine ) { ChannelPool pool = this.getMessageNode().getChannelPool(); ChannelControlBlock[] cbs = new ChannelControlBlock[ nLine ]; for ( int i = 0; i < nLine; ++i ) { ChannelControlBlock ccb = pool.depriveIdleChannel(); if ( ccb == null ) { for ( int j = 0; j < nLine; ++j ) { if ( cbs[ j ] == null ) { break; } ChannelId id = (ChannelId)cbs[ j ].getChannel().getChannelID(); this.mInstructedChannels.remove( id ); pool.add( cbs[ j ] ); } throw new IllegalArgumentException( "Creating `PassiveChannel` is compromised due to insufficient free channels. Consider setting up sufficient parallel channels." ); } ChannelId id = (ChannelId)ccb.getChannel().getChannelID(); cbs[ i ] = ccb; this.mInstructedChannels.put( id, ccb ); } } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/wolf/WolvesAppointServer.java ================================================ package com.pinecone.hydra.uma.wolf; import java.io.IOException; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeoutException; import com.google.protobuf.DynamicMessage; import com.pinecone.hydra.uma.AsynMsgHandler; import com.pinecone.hydra.uma.AsynReturnHandler; import com.pinecone.hydra.uma.DuplexAppointServer; import com.pinecone.hydra.uma.HuskyDuplexExpress; import com.pinecone.hydra.uma.UlfDuplexAppointServer; import com.pinecone.hydra.uma.proxy.GenericPassiveClientIfaceProxyFactory; import com.pinecone.hydra.uma.proxy.PassiveClientIfaceProxyFactory; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.ChannelHandleException; import com.pinecone.hydra.umc.msg.ChannelPool; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.event.ChannelEventHandler; import com.pinecone.hydra.umc.msg.event.ChannelInactiveHandler; import com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter; import com.pinecone.hydra.umc.wolf.UlfChannelStatus; import com.pinecone.hydra.umc.wolf.UlfInformMessage; import com.pinecone.hydra.umc.wolf.server.UlfServer; import com.pinecone.hydra.umc.wolf.server.WolfMCServer; import com.pinecone.hydra.umct.DuplexExpress; import com.pinecone.hydra.umct.IlleagalResponseException; import com.pinecone.hydra.umct.UMCTExpress; import com.pinecone.hydra.umct.husky.HuskyCTPConstants; import com.pinecone.hydra.umct.husky.compiler.CompilerEncoder; import com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler; import com.pinecone.hydra.umct.husky.compiler.MethodPrototype; import com.pinecone.hydra.umct.husky.machinery.HuskyRouteDispatcher; import com.pinecone.hydra.umct.husky.machinery.RouteDispatcher; import com.pinecone.hydra.umct.mapping.ControllerInspector; /** * Pinecone Ursus For Java WolfAppointServer [ Ulfhedinn Wolf Duplex RPC Server ] * Bean Nuts Walnut Ulfhedinn Wolves/Ulfar Family. * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** */ public class WolvesAppointServer extends WolfAppointServer implements UlfDuplexAppointServer { protected static Class checkExpressType( Class expressType ) { if ( !DuplexExpress.class.isAssignableFrom( expressType ) ) { throw new IllegalArgumentException( "`" + expressType.getSimpleName() + "` is not DuplexExpress calibre qualified." ); } return expressType; } protected PassiveClientIfaceProxyFactory mPassiveClientIfaceProxyFactory; protected void initUlfServerEventHandlers( UlfServer server ) { server.registerDataArrivedEventHandlers(new ChannelEventHandler() { @Override public void afterEventTriggered( ChannelControlBlock block, Object context ) { if ( block.getChannel().getChannelStatus() == UlfChannelStatus.WAITING_PASSIVE_RECEIVE ) { ChannelPool pool = WolvesAppointServer.this.getUMCTExpress().getPoolByClientId( block.getChannel().getIdentityID() ); if ( pool != null ) { pool.setIdleChannel( block ); } } } }); } private void initSelf( UlfServer server ) { this.initUlfServerEventHandlers( server ); this.mPassiveClientIfaceProxyFactory = new GenericPassiveClientIfaceProxyFactory( this ); this.mRecipient.registerChannelInactiveHandler(new ChannelInactiveHandler() { @Override public boolean afterChannelInactive( ChannelControlBlock ccb, Object context ) throws ChannelHandleException { this.afterEventTriggered( ccb, context ); DuplexExpress express = (DuplexExpress) WolvesAppointServer.this.mRouteDispatcher.getUMCTExpress(); express.afterChannelInactive( ccb ); return false; } }); } protected WolvesAppointServer( UlfServer server, RouteDispatcher dispatcher ){ super( server, dispatcher ); this.initSelf( server ); } public WolvesAppointServer( UlfServer server, ProtoInterfacialCompiler compiler, ControllerInspector controllerInspector, UMCTExpress express ){ this( server, new HuskyRouteDispatcher( compiler, controllerInspector, express ) ); } public WolvesAppointServer( UlfServer server, CompilerEncoder encoder, UMCTExpress express ){ this( server, new HuskyRouteDispatcher( encoder, express, server.getTaskManager().getClassLoader() ) ); this.apply( express ); } public WolvesAppointServer( UlfServer server, UMCTExpress express ){ this( server, new HuskyRouteDispatcher( express, server.getTaskManager().getClassLoader() ) ); this.apply( express ); } public WolvesAppointServer( UlfServer server, Class expressType ){ super( server, WolvesAppointServer.checkExpressType( expressType ) ); this.initSelf( server ); } public WolvesAppointServer( UlfServer server ){ this( server, HuskyDuplexExpress.class ); } @Override public boolean supportDuplex() { return true; } @Override public DuplexExpress getUMCTExpress() { return (DuplexExpress) super.getUMCTExpress(); } @Override public void sendAsynMsg( long clientId, UMCMessage request, boolean bNoneBuffered, AsynMsgHandler handler ) throws IOException { this.getUMCTExpress().sendAsynMsg( clientId, request, bNoneBuffered, handler ); } @Override public void sendAsynMsg( long clientId, UMCMessage request, boolean bNoneBuffered, UlfAsyncMsgHandleAdapter handler ) throws IOException { this.getUMCTExpress().sendAsynMsg( clientId, request, bNoneBuffered, handler ); } @Override public void sendAsynMsg( long clientId, UMCMessage request, AsynMsgHandler handler ) throws IOException { this.getUMCTExpress().sendAsynMsg( clientId, request, true, handler ); } @Override public void invokeInformAsyn( long clientId, MethodPrototype method, Object[] args, AsynMsgHandler handler ) throws IOException { DynamicMessage message = this.reinterpretMsg( method, args ); this.sendAsynMsg( clientId, new UlfInformMessage(message.toByteArray()), handler ); } @Override public void invokeInformAsyn( long clientId, MethodPrototype method, Object[] args, AsynReturnHandler handler ) throws IOException { DynamicMessage message = this.reinterpretMsg( method, args ); this.sendAsynMsg(clientId, new UlfInformMessage( message.toByteArray(), HuskyCTPConstants.HCTP_DUP_CONTROL_PASSIVE_REQUEST ), new AsynMsgHandler() { @Override public void onSuccessfulMsgReceived( UMCMessage msg ) throws Exception { handler.onSuccessfulReturn( WolvesAppointServer.this.unmarshalResponse( method, msg ) ); } @Override public void onErrorMsgReceived( UMCMessage msg ) throws Exception { handler.onErrorMsgReceived( msg ); } }); } @Override public void invokeInformAsyn( long clientId, String szMethodAddress, Object[] args, AsynMsgHandler handler ) throws IOException { this.invokeInformAsyn( clientId, this.queryMethodPrototype( szMethodAddress ), args, handler ); } @Override public void invokeInformAsyn( long clientId, String szMethodAddress, Object[] args, AsynReturnHandler handler ) throws IOException { this.invokeInformAsyn( clientId, this.queryMethodPrototype( szMethodAddress ), args, handler ); } @Override public Object invokeInform( long clientId, MethodPrototype method, Object[] args, long nWaitTimeMil ) throws IlleagalResponseException, IOException { CompletableFuture future = new CompletableFuture<>(); DynamicMessage message = this.reinterpretMsg(method, args); this.sendAsynMsg(clientId, new UlfInformMessage( message.toByteArray(), HuskyCTPConstants.HCTP_DUP_CONTROL_PASSIVE_REQUEST ), new AsynMsgHandler() { @Override public void onSuccessfulMsgReceived( UMCMessage msg ) throws Exception { try { Object result = WolvesAppointServer.this.unmarshalResponse( method, msg ); future.complete(result); } catch ( IlleagalResponseException e ) { future.completeExceptionally( e ); } } @Override public void onErrorMsgReceived( UMCMessage msg ) throws Exception { future.completeExceptionally( new IlleagalResponseException( "Error message received: " + msg ) ); } @Override public void onError( Object data, Throwable cause ) { future.completeExceptionally( cause ); } }); try { if ( nWaitTimeMil == -1 ) { if ( this.getMessageNode() instanceof WolfMCServer) { nWaitTimeMil = ((WolfMCServer) this.getMessageNode()).getConnectionArguments().getSyncWaitingMillis(); } } return WolfAppointHelper.evalCompletableFuture( future, nWaitTimeMil ); } catch ( TimeoutException | ExecutionException e ) { throw new IlleagalResponseException( e ); } catch ( InterruptedException e ) { Thread.currentThread().interrupt(); throw new IlleagalResponseException( e ); } } @Override public Object invokeInform( long clientId, MethodPrototype method, Object... args ) throws IlleagalResponseException, IOException { return this.invokeInform( clientId, method, args, -1 ); } @Override public Object invokeInform( long clientId, String szMethodAddress, Object[] args, long nWaitTimeMil ) throws IlleagalResponseException, IOException { return this.invokeInform( clientId, this.queryMethodPrototype( szMethodAddress ), args, nWaitTimeMil ); } @Override public Object invokeInform( long clientId, String szMethodAddress, Object... args ) throws IlleagalResponseException, IOException { return this.invokeInform( clientId, this.queryMethodPrototype( szMethodAddress ), args ); } @Override public T getIface( long clientId, Class iface ) { return this.mPassiveClientIfaceProxyFactory.createProxy( clientId, iface ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/io/ChannelInputStream.java ================================================ package com.pinecone.hydra.umc.io; import io.netty.buffer.ByteBuf; import java.io.IOException; import java.io.InputStream; public class ChannelInputStream extends InputStream { protected ByteBuf mByteBuf; public ChannelInputStream( ByteBuf byteBuf ) { this.mByteBuf = byteBuf; } public ByteBuf getByteBuf(){ return this.mByteBuf; } @Override public int read() throws IOException { try{ return this.mByteBuf.readByte(); } catch ( Exception e ) { throw new IOException( e ); } } @Override public int read( byte[] b ) throws IOException { try{ int n = this.mByteBuf.readableBytes(); this.mByteBuf.readBytes( b, 0, b.length ); return n - this.mByteBuf.readableBytes(); } catch ( Exception e ) { throw new IOException( e ); } } @Override public int read( byte[] b, int off, int len ) throws IOException { try{ int n = this.mByteBuf.readableBytes(); this.mByteBuf.readBytes( b, off, len ); return n - this.mByteBuf.readableBytes(); } catch ( Exception e ) { throw new IOException( e ); } } @Override public byte[] readAllBytes() throws IOException { try{ int readerIndex = this.mByteBuf.readerIndex(); int len = this.mByteBuf.readableBytes(); byte[] array; int offset; if ( this.mByteBuf.hasArray() ) { array = this.mByteBuf.array(); int arrayOffset = this.mByteBuf.arrayOffset(); offset = arrayOffset + readerIndex; } else { array = new byte[ len ]; offset = 0; this.mByteBuf.getBytes( readerIndex, array, 0, len ); } int nFinalLen = len - offset; byte[] neo = new byte[ nFinalLen ]; System.arraycopy( array, offset, neo, 0, nFinalLen ); return neo; } catch ( Exception e ) { throw new IOException( e ); } } @Override public int available() throws IOException { try{ return this.mByteBuf.readableBytes(); } catch ( Exception e ) { throw new IOException( e ); } } @Override public void close() throws IOException { try{ this.mByteBuf.clear(); } catch ( Exception e ) { throw new IOException( e ); } } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/io/ChannelOutputStream.java ================================================ package com.pinecone.hydra.umc.io; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.ChannelHandlerContext; import java.io.IOException; import java.io.OutputStream; import java.util.Objects; public class ChannelOutputStream extends OutputStream { protected ChannelHandlerContext mChannelHandlerContext; protected ByteBuf mByteBuf; protected byte[] mTemp = new byte[1]; public ChannelOutputStream( ChannelHandlerContext context ) { this.mChannelHandlerContext = context; } public ChannelHandlerContext getChannelHandlerContext(){ return this.mChannelHandlerContext; } @Override public void write( int b ) throws IOException { try{ this.mTemp[0] = (byte)b; this.mChannelHandlerContext.write( Unpooled.wrappedBuffer(this.mTemp) ); } catch ( Exception e ) { throw new IOException( e ); } } @Override public void write( byte[] b ) throws IOException { try{ this.mChannelHandlerContext.write( Unpooled.wrappedBuffer(b) ); } catch ( Exception e ) { throw new IOException( e ); } } @Override public void write( byte[] b, int off, int len ) throws IOException { Objects.checkFromIndexSize(off, len, b.length); try{ this.mChannelHandlerContext.write( Unpooled.wrappedBuffer( b, off, len ) ); } catch ( Exception e ) { throw new IOException( e ); } } @Override public void flush() throws IOException { try{ this.mChannelHandlerContext.flush(); } catch ( Exception e ) { throw new IOException( e ); } } public void close() throws IOException { try{ this.mChannelHandlerContext.close(); } catch ( Exception e ) { throw new IOException( e ); } } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/io/IOCounter.java ================================================ package com.pinecone.hydra.umc.io; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; public class IOCounter implements Pinenut { protected long mnSessionCount ; // How many this communication channel/stream/other established. [Transmit and Receive] protected long mnByteTransmitted ; // How many bytes transfer, that though transmit. [ Send / Write ] protected long mnByteReceived ; // How many bytes transfer, that though receive. [ Receive / Read ] protected long mnByteOther ; // How many bytes transfer, that though other operations. protected long mnTransmitCall ; // How many the transmission operation called. protected long mnReceiveCall ; // How many the receive operation called. protected long mnOtherCall ; // How many other operations called. protected long mnLastConTime ; // The last time when this channel/stream/other established. public long getSessionCount() { return this.mnSessionCount; } public void setSessionCount( long sessionCount ) { this.mnSessionCount = sessionCount; } public long getByteTransmitted() { return this.mnByteTransmitted; } public void setByteTransmitted( long byteTransmitted ) { this.mnByteTransmitted = byteTransmitted; } public long getByteReceived() { return this.mnByteReceived; } public void setByteReceived( long byteReceived ) { this.mnByteReceived = byteReceived; } public long getByteOther() { return this.mnByteOther; } public void setByteOther( long byteOther ) { this.mnByteOther = byteOther; } public long getTransmitCall() { return this.mnTransmitCall; } public void setTransmitCall( long transmitCall ) { this.mnTransmitCall = transmitCall; } public long getReceiveCall() { return this.mnReceiveCall; } public void setReceiveCall( long receiveCall ) { this.mnReceiveCall = receiveCall; } public long getOtherCall() { return this.mnOtherCall; } public void setOtherCall( long otherCall ) { this.mnOtherCall = otherCall; } public long getLastConTime() { return this.mnLastConTime; } public void setLastConTime( long lastConTime ) { this.mnLastConTime = lastConTime; } public JSONObject toJSONObject() { JSONObject json = new JSONMaptron(); json.put( "sessionCount" , this.mnSessionCount ); json.put( "byteTransmitted" , this.mnByteTransmitted ); json.put( "byteReceived" , this.mnByteReceived ); json.put( "byteOther" , this.mnByteOther ); json.put( "transmitCall" , this.mnTransmitCall ); json.put( "receiveCall" , this.mnReceiveCall ); json.put( "otherCall" , this.mnOtherCall ); json.put( "lastConTime" , this.mnLastConTime ); return json; } @Override public String toJSONString() { return this.toJSONObject().toJSONString(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/io/IOLoadBalanceStrategy.java ================================================ package com.pinecone.hydra.umc.io; import com.pinecone.framework.system.prototype.Strategy; public interface IOLoadBalanceStrategy extends Strategy { boolean readPriorityMatched( Object condition ); boolean writePriorityMatched( Object condition ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/AbstractUMCHead.java ================================================ package com.pinecone.hydra.umc.msg; import java.util.Map; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.json.JSON; import com.pinecone.framework.util.json.JSONEncoder; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; public abstract class AbstractUMCHead implements UMCHead { protected abstract void setSignature ( String signature ); protected abstract void setBodyLength ( long length ); protected abstract void setMethod ( UMCMethod umcMethod ); protected abstract void setExtraEncode ( ExtraEncode encode ); protected abstract void setExtraHead ( JSONObject jo ); protected abstract void setExtraHead ( Map jo ); protected abstract void setExtraHead ( Object o ); protected abstract void transApplyExHead ( ); protected abstract void applyExtraHeadCoder ( ExtraHeadCoder coder ); protected abstract UMCHead applyExHead( Map jo ); public static void transApplyExHeadExplicitly ( AbstractUMCHead that ) { that.transApplyExHead(); } public static void transApplyExHeadExplicitly ( UMCHead that ) { AbstractUMCHead.transApplyExHeadExplicitly( (AbstractUMCHead) that ); } public static void transApplyExHeadExplicitly ( UMCHead that, ExtraHeadCoder coder ) { ( (AbstractUMCHead) that ).applyExtraHeadCoder( coder ); AbstractUMCHead.transApplyExHeadExplicitly( (AbstractUMCHead) that ); } public static void setExtraHeadExplicitly ( UMCHead that, Object o ) { ( (AbstractUMCHead) that ).setExtraHead( o ); } protected String jsonifyExtraHead() { Map joExtraHead = this.getMapExtraHead(); String szExtraHead; if( joExtraHead == null ) { szExtraHead = "[object Object]"; } else { szExtraHead = JSON.stringify( this.getMapExtraHead() ); } return szExtraHead; } @Override public String toJSONString() { String szExtraHead = this.jsonifyExtraHead(); return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "Signature" , this.getSignature() ), new KeyValue<>( "ExtraHeadLength", this.getExtraHeadLength() ), new KeyValue<>( "ExtraEncode" , this.getExtraEncode().getName() ), new KeyValue<>( "BodyLength" , this.getBodyLength() ), new KeyValue<>( "KeepAlive" , this.getKeepAlive() ), new KeyValue<>( "Method" , this.getMethod() ), new KeyValue<>( "Status" , this.getStatus().getName() ), new KeyValue<>( "ControlBits" , "0x" + Integer.toUnsignedString( this.getControlBits(),16 ) ), new KeyValue<>( "IdentityId" , this.getIdentityId() ), new KeyValue<>( "SessionId" , this.getSessionId() ), new KeyValue<>( "ExtraHead" , szExtraHead ), } ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ArchBytesTransferMessage.java ================================================ package com.pinecone.hydra.umc.msg; import java.util.Map; public abstract class ArchBytesTransferMessage extends ArchUMCMessage implements TransferMessage { protected byte[] msBytesBody = null ; public ArchBytesTransferMessage( UMCHead head ) { super( head ); } public ArchBytesTransferMessage( UMCHead head, byte[] sBytesBody ) { this( head ); this.setBody( sBytesBody ); } public ArchBytesTransferMessage( UMCHead head, String szStringBody ) { this( head, szStringBody.getBytes() ); } public ArchBytesTransferMessage( Map joExHead, byte[] sBytesBody, int controlBits ) { super( joExHead, UMCMethod.TRANSFER, controlBits ); this.setBody( sBytesBody ); } public ArchBytesTransferMessage( Map joExHead, String szStringBody, int controlBits ) { this( joExHead, szStringBody.getBytes(), controlBits ); } public ArchBytesTransferMessage( Map joExHead, byte[] sBytesBody ) { this( joExHead, sBytesBody, 0 ); } public ArchBytesTransferMessage( Map joExHead, String szStringBody ) { this( joExHead, szStringBody, 0 ); } public ArchBytesTransferMessage( Object exHead, ExtraEncode encode, byte[] sBytesBody, int controlBits ) { super( exHead, encode, UMCMethod.TRANSFER, controlBits ); this.setBody( sBytesBody ); } public ArchBytesTransferMessage( Object exHead, ExtraEncode encode, String szStringBody, int controlBits ) { this( exHead, encode, szStringBody.getBytes(), controlBits ); } public ArchBytesTransferMessage( Object exHead, byte[] sBytesBody ) { this( exHead, ExtraEncode.Prototype, sBytesBody, 0 ); } public ArchBytesTransferMessage( Object exHead, String szStringBody ) { this( exHead, ExtraEncode.Prototype, szStringBody, 0 ); } void setBody( byte[] sBytesBody ) { this.msBytesBody = sBytesBody; this.mHead.inface().setBodyLength( this.msBytesBody.length ); } public byte[] getBody() { return this.msBytesBody; } @Override public void release() { super.release(); this.msBytesBody = null; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ArchInformCMessage.java ================================================ package com.pinecone.hydra.umc.msg; import java.util.Map; public abstract class ArchInformCMessage extends ArchUMCMessage implements InformMessage { public static UMCCHead newUMCCHead( Object exHead ) { UMCCHeadV1 head = new UMCCHeadV1(); head.setExtraHead( exHead ); return head; } public static UMCCHead newUMCCHead( Map joExHead ) { UMCCHeadV1 head = new UMCCHeadV1(); head.applyExHead( joExHead ); return head; } public ArchInformCMessage( UMCCHead head ) { super( head ); } public ArchInformCMessage( Map joExHead ) { this( ArchInformCMessage.newUMCCHead( joExHead ) ); } public ArchInformCMessage( Object protoExHead ) { this( ArchInformCMessage.newUMCCHead( protoExHead ) ); } @Override public long getMessageLength(){ return UMCCHeadV1.HeadBlockSize + this.mHead.getExtraHeadLength(); } @Override public UMCCHead getHead() { return (UMCCHead) super.getHead(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ArchInformMessage.java ================================================ package com.pinecone.hydra.umc.msg; import java.util.Map; public abstract class ArchInformMessage extends ArchUMCMessage implements InformMessage { public ArchInformMessage( UMCHead head ) { super( head ); } public ArchInformMessage( Map joExHead , int controlBits ) { super( joExHead, UMCMethod.INFORM, controlBits ); } public ArchInformMessage( Object protoExHead, int controlBits ) { super( protoExHead, UMCMethod.INFORM, controlBits ); } public ArchInformMessage( Map joExHead ) { super( joExHead, UMCMethod.INFORM ); } public ArchInformMessage( Object protoExHead, ExtraEncode encode ) { super( protoExHead, encode ); } public ArchInformMessage( Object protoExHead ) { super( protoExHead, UMCMethod.INFORM ); } @Override public long getMessageLength(){ return UMCHeadV1.HeadBlockSize + this.mHead.getExtraHeadLength(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ArchStreamTransferMessage.java ================================================ package com.pinecone.hydra.umc.msg; import java.io.IOException; import java.io.InputStream; import java.util.Map; public class ArchStreamTransferMessage extends ArchUMCMessage implements TransferMessage { protected InputStream mIStreamBody = null ; public ArchStreamTransferMessage( UMCHead head ) { super( head ); head.inface().setMethod( UMCMethod.TRANSFER ); } public ArchStreamTransferMessage( UMCHead head, InputStream inStream ) { this( head ); this.setBody( inStream ); } public ArchStreamTransferMessage( Map joExHead, InputStream inStream, int controlBits ) { super( joExHead, UMCMethod.TRANSFER, controlBits ); this.setBody( inStream ); } public ArchStreamTransferMessage( Map joExHead, InputStream inStream ) { this( joExHead, inStream, 0 ); } public ArchStreamTransferMessage( Object exHead, ExtraEncode encode, InputStream inStream, int controlBits ) { super( exHead, encode, UMCMethod.TRANSFER, controlBits ); this.setBody( inStream ); } public ArchStreamTransferMessage( Object exHead, InputStream inStream ) { this( exHead, ExtraEncode.Prototype, inStream, 0 ); } void setBody( InputStream inStream ) { this.mIStreamBody = inStream; try{ this.mHead.inface().setBodyLength( this.mIStreamBody.available() ); } catch ( IOException e ) { this.mHead.inface().setBodyLength( 0 ); } } @Override public InputStream getBody() { return this.mIStreamBody; } @Override public void release() { super.release(); this.mIStreamBody = null; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ArchUMCMessage.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.json.JSONEncoder; import java.util.Map; public abstract class ArchUMCMessage implements UMCMessage { protected UMCHead mHead ; public ArchUMCMessage( UMCHead head ) { this.mHead = head; } ArchUMCMessage( Map joExHead, UMCMethod method, int controlBits ) { UMCHeadV1 head = new UMCHeadV1(); head.setControlBits( controlBits ); head.setMethod( method ); head.applyExHead( joExHead ); this.mHead = head; } ArchUMCMessage( Map joExHead, UMCMethod method ) { this( joExHead, method, 0 ); } public ArchUMCMessage( Map joExHead, int controlBits ) { this( joExHead, UMCMethod.INFORM, controlBits ); } public ArchUMCMessage( Map joExHead ) { this( joExHead, UMCMethod.INFORM ); } protected ArchUMCMessage( Object protoExHead, ExtraEncode encode, UMCMethod method, int controlBits ) { UMCHeadV1 head = new UMCHeadV1(); head.setControlBits( controlBits ); head.setMethod( method ); head.setExtraHead( protoExHead ); head.setExtraEncode( encode ); this.mHead = head; } protected ArchUMCMessage( Object protoExHead, UMCMethod method, int controlBits ) { this( protoExHead, ExtraEncode.Prototype, method, controlBits ); } protected ArchUMCMessage( Object protoExHead, UMCMethod method ) { this( protoExHead, method, 0 ); } protected ArchUMCMessage( Object protoExHead, ExtraEncode encode, UMCMethod method ) { this( protoExHead, encode, method, 0 ); } public ArchUMCMessage( Object protoExHead, int controlBits ) { this( protoExHead, UMCMethod.INFORM, controlBits ); } public ArchUMCMessage( Object protoExHead ) { this( protoExHead, UMCMethod.INFORM ); } public ArchUMCMessage( Object protoExHead, ExtraEncode encode ) { this( protoExHead, encode, UMCMethod.INFORM ); } @Override public UMCHead getHead() { return this.mHead; } @Override public Object getExHead() { return this.mHead.getExtraHead(); } @Override public long getMessageLength(){ return UMCHeadV1.HeadBlockSize + this.mHead.getExtraHeadLength() + this.mHead.getBodyLength(); } @Override public long queryMessageLength(){ this.mHead.inface().transApplyExHead(); return this.getMessageLength(); } @Override public void release() { this.mHead = null; } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { String szControlBits = "0x" + Integer.toUnsignedString( this.getHead().getControlBits(),16 ); return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "ExtraHeadLength", this.getHead().getExtraHeadLength() ), new KeyValue<>( "Head" , this.getHead().getExtraHead() ), new KeyValue<>( "Method" , this.getHead().getMethod() ), new KeyValue<>( "BodyLength" , this.getHead().getBodyLength() ), new KeyValue<>( "ControlBits" , szControlBits ), new KeyValue<>( "IdentityId" , this.getHead().getIdentityId() ), new KeyValue<>( "SessionId" , this.getHead().getSessionId() ), new KeyValue<>( "Status" , this.getHead().getStatus() ) } ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ArchUMCProtocol.java ================================================ package com.pinecone.hydra.umc.msg; import java.io.OutputStream; import java.io.InputStream; import java.io.IOException; import com.pinecone.framework.util.json.JSONException; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; /** * Pinecone Ursus For Java UlfMCProtocol [ Wolf Uniform Message Control Protocol ] * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ********************************************************** * UlfUMC Message Struct: * const char* lpszSignature * byteEnum method * uint32 nExtraHeadLength * uint64 nBodyLength * Atom* lpjoExtraHead // JSON5 String / JSONObject * Stream bodyStream * ********************************************************** * UlfUMC/1.1 0xFF0xFFFFFFFF0xFFFFFFFFFFFFFFFF{Key:"Val"...} * ********************************************************** * UlfUMC/1.1 0xFF0xFFFFFFFF0xFFFFFFFFFFFFFFFF{Key:"Val"...} * MsgBody * ********************************************************** */ public abstract class ArchUMCProtocol implements UMCProtocol { protected int mnFrameSize = 4096; protected String mszVersion = UMCHeadV1.ProtocolVersion; protected String mszSignature = UMCHeadV1.ProtocolSignature; protected OutputStream mOutputStream ; protected InputStream mInputStream ; protected Medium mMessageSource ; protected ExtraHeadCoder mExtraHeadCoder ; public ArchUMCProtocol( Medium messageSource ) { this.mMessageSource = messageSource; this.mOutputStream = this.mMessageSource.getOutputStream(); this.mInputStream = this.mMessageSource.getInputStream(); this.applyMessageSource( messageSource ); } @Override public UMCProtocol applyMessageSource( Medium medium ) { this.mMessageSource = medium; this.mExtraHeadCoder = this.getExtraHeadCoder(); return this; } @Override public Medium getMessageSource() { return this.mMessageSource; } @Override public String getVersion(){ return this.mszVersion; } @Override public String getSignature() { return this.mszSignature; } protected UMCHeadV1 newHead() { UMCHeadV1 head = new UMCHeadV1(); head.applyExtraHeadCoder( this.getExtraHeadCoder() ); return head; } @Override public void release() { this.mMessageSource.release(); this.mMessageSource = null; this.mszVersion = null; this.mszSignature = null; this.mOutputStream = null; this.mInputStream = null; } public ExtraHeadCoder getExtraHeadCoder() { return this.mMessageSource.getMessageNode().getExtraHeadCoder(); } protected void flush() throws IOException { this.mOutputStream.flush(); } protected void sendMsgHead( UMCHead head ) throws IOException { this.sendMsgHead( head, true ); } protected void sendMsgHead( UMCHead umcHead, boolean bFlush ) throws IOException { UMCHeadV1.EncodePair encodePair = UMCHeadV1.encode( umcHead, this.getExtraHeadCoder() ); this.mOutputStream.write( encodePair.byteBuffer.array(), 0, encodePair.bufLength ); if( bFlush ) { this.mOutputStream.flush(); } } protected UMCHead readMsgHead() throws IOException { int nBufSize = ArchUMCProtocol.basicHeadLength( this.mszSignature ); byte[] buf = new byte[ nBufSize ]; if ( this.mInputStream.read( buf ) < nBufSize ) { throw new StreamTerminateException("StreamEndException:[UMCProtocol] Stream is ended."); } UMCHeadV1 head = (UMCHeadV1)ArchUMCProtocol.onlyReadMsgBasicHead( buf, this.mszSignature, this.getExtraHeadCoder() ); byte[] headBuf = new byte[ head.nExtraHeadLength ]; if ( this.mInputStream.read( headBuf ) < head.nExtraHeadLength ) { throw new StreamTerminateException("[UMCProtocol] Stream is ended."); } try { Object jo = this.getExtraHeadCoder().getDecoder().decode( head, headBuf ); head.setExtraHead( jo ); } catch ( JSONException e ) { throw new IOException(" [UMCProtocol] Illegal protocol head."); } return head; } public static int basicHeadLength( String szSignature ) { return szSignature.length() + UMCHeadV1.StructBlockSize; } public static UMCHead onlyReadMsgBasicHead( byte[] buf, String szSignature, ExtraHeadCoder extraHeadCoder ) throws IOException { return UMCHeadV1.decode( buf, szSignature, extraHeadCoder ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ArchUMCReceiver.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.framework.system.ProvokeHandleException; import java.io.IOException; import java.lang.reflect.InvocationTargetException; public abstract class ArchUMCReceiver extends ArchUMCProtocol implements UMCReceiver { public ArchUMCReceiver( Medium messageSource ) { super( messageSource ); } @Override public Object readInformMsg() throws IOException { UMCHead head = this.readMsgHead(); if( head.getMethod() != UMCMethod.INFORM ) { throw new IOException( "[UMCProtocol] Illegal protocol method." ); } return head.getExtraHead(); } protected UMCHead readTransferHead() throws IOException { UMCHead head = this.readMsgHead(); if( head.getMethod() != UMCMethod.TRANSFER ) { throw new IOException( "[UMCProtocol] Illegal protocol method." ); } return head; } protected void onlyReadTransferBody( TransferMessage message, boolean bAllBytes ) throws IOException { if( bAllBytes ) { ( (ArchBytesTransferMessage)message ).setBody( this.mInputStream.readAllBytes() ); } else { ( (ArchStreamTransferMessage)message ).setBody( this.mInputStream ); } } public UMCMessage readMsg( boolean bAllBytes, MessageStereotypes stereotypes ) throws IOException { try{ UMCHead head = this.readMsgHead(); UMCMessage message; if( head.getMethod() == UMCMethod.TRANSFER ){ if( bAllBytes ) { message = (UMCMessage) stereotypes.postBytesType().getConstructor( UMCHead.class ).newInstance( head ); } else { message = (UMCMessage) stereotypes.postStreamType().getConstructor( UMCHead.class ).newInstance( head ); } this.onlyReadTransferBody( (TransferMessage)message, bAllBytes ); } else { if( head.getMethod() != UMCMethod.INFORM ){ if ( !( head.getMethod() == UMCMethod.UNDEFINED && head.getExtraEncode() == ExtraEncode.Iussum ) ) { throw new IOException( " [UMCProtocol] Unrecognized protocol method." ); } } message = (UMCMessage) stereotypes.putType().getConstructor( UMCHead.class ).newInstance( head ); } return message; } catch ( NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException e ) { throw new ProvokeHandleException( e ); } } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ArchUMCTransmit.java ================================================ package com.pinecone.hydra.umc.msg; import java.io.IOException; import java.io.InputStream; import java.util.Map; public abstract class ArchUMCTransmit extends ArchUMCProtocol implements UMCTransmit { public ArchUMCTransmit( Medium messageSource ) { super( messageSource ); } @SuppressWarnings( "unchecked" ) protected void applyExHead( UMCHeadV1 head, Object msg ) { if( msg instanceof Map ) { head.inface().applyExHead( (Map) msg ); } else { head.inface().setExtraHead( msg ); } } @Override public void sendInformMsg( Object msg, Status status ) throws IOException { UMCHeadV1 head = this.newHead(); this.applyExHead( head, msg ); head.setStatus( status ); head.inface().setMethod( UMCMethod.INFORM ); this.sendMsgHead( head ); } @Override public void sendInformMsg( Object msg ) throws IOException { this.sendInformMsg( msg, Status.OK ); } public void sendTransferMsgHead( Object msg ) throws IOException { this.sendTransferMsgHead( msg, false ); } public void sendTransferMsgHead( Object msg, boolean bFlush ) throws IOException { UMCHeadV1 head = this.newHead(); this.applyExHead( head, msg ); head.inface().setMethod( UMCMethod.TRANSFER ); this.sendMsgHead( head, bFlush ); } public void sendTransferMsgContent( byte[] frame, int len ) throws IOException { this.mOutputStream.write( frame, 0, len ); } protected void onlySendPostBody( byte[] bytes ) throws IOException { this.sendTransferMsgContent( bytes, bytes.length ); this.mOutputStream.flush(); } @Override public void sendTransferMsg( Object msg, byte[] bytes, Status status ) throws IOException { UMCHeadV1 head = this.newHead(); head.setBodyLength( bytes.length ); head.setStatus( status ); this.sendTransferMsgHead( msg, false ); this.onlySendPostBody( bytes ); } @Override public void sendTransferMsg( Object msg, byte[] bytes ) throws IOException { this.sendTransferMsg( msg, bytes, Status.OK ); } protected void onlySendPostBody( InputStream is, boolean bNoneBuffered ) throws IOException { //this.mnFrameSize = 2; byte[] buf; if( bNoneBuffered ) { buf = is.readAllBytes(); this.sendTransferMsgContent( buf, buf.length ); } else { buf = new byte[ this.mnFrameSize ]; while ( true ) { int n = is.available(); if( n > this.mnFrameSize && is.read( buf ) > 0 ) { this.sendTransferMsgContent( buf, this.mnFrameSize ); } else { if( is.read( buf, 0, n ) > 0 ) { this.sendTransferMsgContent( buf, n ); } break; } } } this.getMessageSource().getOutputStream().flush(); } @Override public void sendTransferMsg( Object msg, InputStream is ) throws IOException { UMCHeadV1 head = this.newHead(); head.setBodyLength( is.available() ); this.sendTransferMsgHead( msg, false ); this.onlySendPostBody( is, false ); } @Override public void sendMsg( UMCMessage msg, boolean bNoneBuffered ) throws IOException { msg.getHead().setIdentityId( this.getMessageSource().getMessageNode().getMessageNodeId() ); UMCHead head = msg.getHead(); head.inface().setSignature( this.mszSignature ); if( msg.getMethod() == UMCMethod.INFORM || msg.getMethod() == UMCMethod.UNDEFINED ) { this.sendMsgHead( head ); } else if( msg.getMethod() == UMCMethod.TRANSFER ) { this.sendMsgHead( head, false ); Object body = msg.evinceTransferMessage().getBody(); if( body instanceof byte[] ) { byte[] bytes = (byte[])body; this.onlySendPostBody( bytes ); } else if( body instanceof InputStream ) { InputStream is = (InputStream)body; this.onlySendPostBody( is, bNoneBuffered ); } } } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/AsynChannelAllocator.java ================================================ package com.pinecone.hydra.umc.msg; public interface AsynChannelAllocator extends ChannelPool { ChannelControlBlock nextAsynChannel( long nMillisTimeout, boolean bEager ) ; default ChannelControlBlock nextAsynChannel( long nMillisTimeout ) { return this.nextAsynChannel( nMillisTimeout, true ); } default ChannelControlBlock nextAsynChannel() { return this.nextAsynChannel( 5000 ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/AsyncMessenger.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter; import java.io.IOException; public interface AsyncMessenger extends Messenger { void sendAsynMsg( UMCMessage request, boolean bNoneBuffered ) throws IOException; void sendAsynMsg( UMCMessage request, boolean bNoneBuffered, UlfAsyncMsgHandleAdapter handler ) throws IOException; // Javascript/Ajax style. default void sendAsynMsg( UMCMessage request, UlfAsyncMsgHandleAdapter handler ) throws IOException { this.sendAsynMsg( request, false, handler ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/AsyncMessengerChannelControlBlock.java ================================================ package com.pinecone.hydra.umc.msg; import java.io.IOException; public interface AsyncMessengerChannelControlBlock extends MessengerChannelControlBlock { @Override default AsyncMessenger getParentMessageNode(){ return (AsyncMessenger) this.getChannel().getParentMessageNode(); } void sendAsynMsg( UMCMessage message, boolean bNoneBuffered ) throws IOException; @Override default void sendMsg( UMCMessage message, boolean bNoneBuffered ) throws IOException { this.sendAsynMsg( message, bNoneBuffered ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/AsyncMsgHandleAdapter.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.hydra.umct.UMCTExpressHandler; public interface AsyncMsgHandleAdapter extends UMCTExpressHandler { } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/CascadeMessageNode.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.framework.system.regimentation.UniformCascadeNodus; public interface CascadeMessageNode extends MessageNode, UniformCascadeNodus { } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ChannelAllocateException.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.framework.system.prototype.Pinenut; public class ChannelAllocateException extends RuntimeException implements Pinenut { public ChannelAllocateException() { super(); } public ChannelAllocateException( String message ) { super(message); } public ChannelAllocateException( String message, Throwable cause ) { super(message, cause); } public ChannelAllocateException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ChannelControlBlock.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.hydra.umc.io.IOCounter; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter; import java.io.IOException; public interface ChannelControlBlock extends Pinenut { UMCChannel getChannel(); IOCounter getIOCounter(); boolean getInSyncMode(); UMCTransmit getTransmit(); UMCReceiver getReceiver(); default MessageNode getParentMessageNode(){ return this.getChannel().getParentMessageNode(); } void sendMsg( UMCMessage message, boolean bNoneBuffered ) throws IOException; void release(); void close(); default boolean isShutdown(){ return this.getChannel().isShutdown(); } ChannelStatus getChannelStatus(); void pushMsgHandle ( UlfAsyncMsgHandleAdapter msgHandle ); UlfAsyncMsgHandleAdapter pollMsgHandle ( long nWaitMillis ) throws InterruptedException; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ChannelHandleException.java ================================================ package com.pinecone.hydra.umc.msg; import java.io.IOException; import com.pinecone.framework.system.prototype.Pinenut; public class ChannelHandleException extends IOException implements Pinenut { public ChannelHandleException() { super(); } public ChannelHandleException( String message ) { super(message); } public ChannelHandleException( String message, Throwable cause ) { super(message, cause); } public ChannelHandleException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ChannelPool.java ================================================ package com.pinecone.hydra.umc.msg; import java.util.Collection; import com.pinecone.framework.system.prototype.Pinenut; public interface ChannelPool extends Pinenut { ChannelControlBlock queryChannelById( Object id ) ; void onlyRemove( Object id ); int size(); void clear(); boolean isEmpty(); Collection getPooledChannels(); ChannelControlBlock terminate( Object id ) throws InterruptedException; boolean isAllChannelsTerminated(); void remove ( ChannelControlBlock ccb ); void deactivate ( ChannelControlBlock ccb ); ChannelPool setIdleChannel( ChannelControlBlock block ); ChannelPool add( ChannelControlBlock block ); ChannelControlBlock depriveIdleChannel(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ChannelStatus.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.framework.system.prototype.Pinenut; public interface ChannelStatus extends Pinenut { String getName(); int getValue(); byte getByteValue(); boolean isIdle(); boolean isTerminated(); boolean isWaitingForIOCompleted(); boolean isWaitingForLocalCompleted(); default boolean isWaitingForOperationCompleted() { return this.isWaitingForIOCompleted() || this.isWaitingForLocalCompleted(); } boolean isAsynAvailable(); boolean isSyncAvailable(); default String toJSONString() { return "\"" + this.toString() + "\""; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/EMCBytesDecoder.java ================================================ package com.pinecone.hydra.umc.msg; import java.io.IOException; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; public interface EMCBytesDecoder extends Pinenut { UMCHead decode( byte[] buf, ExtraHeadCoder extraHeadCoder ) throws IOException; UMCHead decodeIntegrated( byte[] buf, ExtraHeadCoder extraHeadCoder ) throws IOException; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/EMCHead.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.framework.system.prototype.Pinenut; /** * Pinecone Ursus For Java EMC [ Elastic Uniform Message Control ] * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ******************************************************************************************************************** * Variable-length message protocol header * 可变长弹性协议头 * ******************************************************************************************************************** * A typical top-level UMC header only contains the signature and the ExtraHeadLength. * In practice, the ExtraHeadLength is usually left empty, so the minimum sizeof = 8. * This allows for elastic and excellent performance while ensuring the supreme uniformity based on the specific message type. * Unlike C/C++, other languages cannot use unions or struct segments for memory manipulation. * To ensure maximum compatibility, the UMC uses the highest bit length (dword/qword). * For uniformity, it will inevitably lead to memory overhead, but the sacrifice is worth it. * ******************************************************************************************************************** * 一个典型顶级的UMC头仅包含协议签名、扩展头长度,实践中扩展头长度字段(ExtraHeadLength) * 默认是置空的,因此最小 sizeof = 8。这样可根据具体的消息类型,灵活确保最高统一抽象和极致性能。 * 由于其他语言不像C/C++,无法使用union、结构体段等内存手段,UMC协议为确保最高兼容,因此使用了最高位长(dword/qword) * 这会不可避免地带来内存损益,为了统一牺牲是值得的。 * ******************************************************************************************************************** */ public interface EMCHead extends Pinenut { String getSignature(); int getSignatureLength(); int sizeof(); int fieldsSize(); // UMCHead (Non-Dynamic) Fields size. int getExtraHeadLength(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ExtraEncode.java ================================================ package com.pinecone.hydra.umc.msg; public enum ExtraEncode { Undefined ( 0x01, "Undefined" ), JSONString ( 0x02, "JSONString" ), Binary ( 0x03, "Binary" ), // Bson Prototype ( 0x04, "Prototype" ), // Prototype Raw Binary Iussum ( 0x05, "Iussum" ), // Reduced instruction Custom ( 0xFF, "Custom" ); private final int value; private final String name; ExtraEncode( int value, String name ){ this.value = value; this.name = name; } public String getName(){ return this.name; } public int getValue() { return this.value; } public byte getByteValue() { return (byte) this.value; } public static ExtraEncode asValue( int val ) { for ( ExtraEncode type : ExtraEncode.values() ) { if ( type.getValue() == val ) { return type; } } return ExtraEncode.Custom; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/FairChannelPool.java ================================================ package com.pinecone.hydra.umc.msg; import java.util.Queue; public interface FairChannelPool extends AsynChannelAllocator { long getMajorWaitTimeout(); FairChannelPool setMajorWaitTimeout( long nMillisTimeout ); FairChannelPool pushBack( ChannelControlBlock channel ); ChannelControlBlock pop(); @Override FairChannelPool setIdleChannel( ChannelControlBlock block ); @Override ChannelControlBlock nextAsynChannel( long nMillisTimeout, boolean bEager ) ; @Override default ChannelControlBlock nextAsynChannel( long nMillisTimeout ) { return this.nextAsynChannel( nMillisTimeout, true ); } @Override default ChannelControlBlock nextAsynChannel() { return this.nextAsynChannel( 5000 ); } Queue getMajorQueue(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/GenericEMCBytesDecoder.java ================================================ package com.pinecone.hydra.umc.msg; import java.io.IOException; import com.pinecone.framework.util.json.JSONException; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; public class GenericEMCBytesDecoder implements EMCBytesDecoder { protected boolean isQualified ( byte[] buf, String szSignature ) throws IOException { if ( buf.length < szSignature.length() ) { // Signature size is minimum. throw new StreamTerminateException( "StreamEndException:[EMCBytesDecoder] Stream is ended." ); } byte[] des = szSignature.getBytes(); // UMC | UMC-C | UMC-BP return buf[ 4 ] == des[ 4 ] && buf[ 5 ] == des[ 5 ] && buf[ 6 ] == des[ 6 ]; } @Override public UMCHead decode( byte[] buf, ExtraHeadCoder extraHeadCoder ) throws IOException { if ( this.isQualified( buf, UMCHeadV1.ProtocolSignature ) ) { return UMCHeadV1.decode( buf, UMCHeadV1.ProtocolSignature, extraHeadCoder ); } else if ( this.isQualified( buf, UMCCHeadV1.ProtocolSignature ) ) { return UMCCHeadV1.decode( buf, UMCCHeadV1.ProtocolSignature, extraHeadCoder ); } return null; } @Override public UMCHead decodeIntegrated( byte[] buf, ExtraHeadCoder extraHeadCoder ) throws IOException { UMCHead head = this.decode( buf, extraHeadCoder ); byte[] headBuf = new byte[ head.getExtraHeadLength() ]; int headSize = head.sizeof(); System.arraycopy( buf, headSize, headBuf, 0, head.getExtraHeadLength() ); if ( buf.length < head.getExtraHeadLength() ) { throw new StreamTerminateException("[UMCProtocol] Buffer is not long enough."); } try { Object jo = extraHeadCoder.getDecoder().decode( head, headBuf ); AbstractUMCHead.setExtraHeadExplicitly( head, jo ); } catch ( JSONException e ) { throw new IOException(" [UMCProtocol] Illegal protocol head."); } return head; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/IdleFirstBalanceStrategy.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.hydra.umc.io.IOLoadBalanceStrategy; public class IdleFirstBalanceStrategy implements IOLoadBalanceStrategy { public IdleFirstBalanceStrategy() { } @Override public boolean matched( Object condition ) { ChannelControlBlock ccb = (ChannelControlBlock) condition; return ccb.getChannelStatus().isIdle(); } @Override public boolean readPriorityMatched( Object condition ) { return this.matched( condition ); } @Override public boolean writePriorityMatched( Object condition ) { return this.matched( condition ); } public IdleFirstBalanceStrategy clone() { IdleFirstBalanceStrategy clone; try { clone = (IdleFirstBalanceStrategy) super.clone(); } catch ( CloneNotSupportedException e ) { throw new InternalError(e); } return clone; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/InformMessage.java ================================================ package com.pinecone.hydra.umc.msg; public interface InformMessage extends UMCMessage { @Override default InformMessage evinceInformMessage() { return this; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/MappedChannelPool.java ================================================ package com.pinecone.hydra.umc.msg; import java.util.Collection; import java.util.Map; public interface MappedChannelPool extends ChannelPool { Map getPooledMap(); @Override default Collection getPooledChannels() { return this.getPooledMap().values(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/Medium.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.framework.system.prototype.Pinenut; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; public interface Medium extends Pinenut { Object getNativeMessageSource(); OutputStream getOutputStream(); InputStream getInputStream(); default byte[] receive( int nLength ) throws IOException { return this.getInputStream().readNBytes( nLength ); } default void send ( byte[] bytes, int off, int n ) throws IOException { this.getOutputStream().write( bytes, off, n ); } default void send ( byte[] bytes ) throws IOException { this.getOutputStream().write( bytes, 0, bytes.length ); } String sourceName(); void release(); MessageNodus getMessageNode(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/MediumTerminationException.java ================================================ package com.pinecone.hydra.umc.msg; public class MediumTerminationException extends UMCServiceException { public MediumTerminationException() { super(); } public MediumTerminationException( String message ) { super(message); } public MediumTerminationException( String message, Throwable cause ) { super(message, cause); } public MediumTerminationException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/Message.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.framework.system.prototype.Pinenut; public interface Message extends Pinenut { long getMessageLength(); default long queryMessageLength(){ return this.getMessageLength(); } void release(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/MessageNode.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.system.Hydrogen; public interface MessageNode extends Processum, MessageNodus { @Override Hydrogen parentSystem(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/MessageNodus.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; import com.pinecone.hydra.umc.msg.handler.ErrorMessageAudit; public interface MessageNodus extends Messagus { ExtraHeadCoder getExtraHeadCoder(); ErrorMessageAudit getErrorMessageAudit(); void setErrorMessageAudit( ErrorMessageAudit audit ); MsgNodeConfig getMessageNodeConfig(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/MessageStereotypes.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.framework.system.prototype.Pinenut; public interface MessageStereotypes extends Pinenut { Class putType(); Class postBytesType(); Class postStreamType(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/Messagus.java ================================================ package com.pinecone.hydra.umc.msg; import java.util.concurrent.atomic.AtomicInteger; import com.pinecone.framework.system.regimentation.Nodus; public interface Messagus extends Nodus { AtomicInteger LocalNodeIdAllocator = new AtomicInteger( 0 ); static int nextLocalId() { return MessageNodus.LocalNodeIdAllocator.getAndIncrement(); } long getMessageNodeId(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/Messenger.java ================================================ package com.pinecone.hydra.umc.msg; import java.io.IOException; import com.pinecone.hydra.umc.wolf.client.ClientConnectArguments; import com.pinecone.hydra.umct.UMCTExpressHandler; public interface Messenger extends MessageNode { UMCMessage sendSyncMsg( UMCMessage request, boolean bNoneBuffered, long nWaitTime ) throws IOException; ClientConnectArguments getConnectionArguments(); UMCTExpressHandler getAsyncMsgHandler(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/MessengerChannelControlBlock.java ================================================ package com.pinecone.hydra.umc.msg; import java.io.IOException; import java.util.concurrent.locks.Lock; public interface MessengerChannelControlBlock extends ChannelControlBlock { @Override default Messenger getParentMessageNode(){ return (Messenger) this.getChannel().getParentMessageNode(); } Lock getSynRequestLock(); UMCMessage sendSyncMsg( UMCMessage message, boolean bNoneBuffered, long nWaitTime ) throws IOException; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/MsgNodeConfig.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.framework.system.prototype.Pinenut; public interface MsgNodeConfig extends Pinenut { long getSyncWaitingMillis(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/MsgProtocol.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.framework.system.prototype.Pinenut; public interface MsgProtocol extends Pinenut { Medium getMessageSource(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/MultiClientChannelRegistry.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.framework.system.prototype.Pinenut; public interface MultiClientChannelRegistry extends Pinenut { int size(); void clear(); boolean isEmpty(); void register( CID id, ChannelControlBlock controlBlock ); void deregister( CID id, ChannelControlBlock controlBlock ); void deregister( CID id ); ChannelPool getPool( CID id ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/Recipient.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.hydra.umc.wolf.server.ServerConnectArguments; public interface Recipient extends MessageNode { int getMaximumConnections(); ServerConnectArguments getConnectionArguments(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/RecipientChannelControlBlock.java ================================================ package com.pinecone.hydra.umc.msg; public interface RecipientChannelControlBlock extends ChannelControlBlock { @Override default Recipient getParentMessageNode(){ return (Recipient) this.getChannel().getParentMessageNode(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/RegisterChannelPool.java ================================================ package com.pinecone.hydra.umc.msg; public interface RegisterChannelPool extends MappedChannelPool { long getMajorWaitTimeout(); RegisterChannelPool setMajorWaitTimeout( long nMillisTimeout ); int getMaximumPoolSize(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/Status.java ================================================ package com.pinecone.hydra.umc.msg; public enum Status { SwitchingProtocols ( 101, "SwitchingProtocols" ), // For messenger(a.k.a, `Client`) or recipient (a.k.a, `Server`) // The confirmed and successful session OK ( 200, "OK" ), // For messenger(a.k.a, `Client`) or recipient (a.k.a, `Server`) // BadRequest or BadResponse BadSession ( 400, "BadSession" ), Unauthorized ( 401, "Unauthorized" ), IllegalMessage ( 402, "IllegalMessage" ), Forbidden ( 403, "Forbidden" ), MappingNotFound ( 404, "MappingNotFound" ), InternalError ( 500, "InternalError" ), NotImplemented ( 501, "NotImplemented" ), BadGateway ( 502, "BadGateway" ), Unavailable ( 503, "Unavailable" ), GatewayTimeout ( 504, "GatewayTimeout" ), VersionNotSupported ( 505, "VersionNotSupported" ), TooManyConnections ( 506, "TooManyConnections" ); private final int value; private final String name; Status( int value, String name ){ this.value = value; this.name = name; } public String getName(){ return this.name; } public int getValue() { return this.value; } public short getShortValue() { return (short) this.value; } public static Status asValue( int val ) { for ( Status type : Status.values() ) { if ( type.getValue() == val ) { return type; } } throw new IllegalArgumentException( "Invalid status value: " + val ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/StreamTerminateException.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.framework.system.prototype.Pinenut; import java.io.IOException; public class StreamTerminateException extends IOException implements Pinenut { public StreamTerminateException() { super(); } public StreamTerminateException( String message ) { super(message); } public StreamTerminateException( String message, Throwable cause ) { super(message, cause); } public StreamTerminateException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/SyncFairChannelPool.java ================================================ package com.pinecone.hydra.umc.msg; public interface SyncFairChannelPool extends FairChannelPool { ChannelControlBlock nextSyncChannel( long nMillisTimeout, boolean bEager ) ; default ChannelControlBlock nextSyncChannel( long nMillisTimeout ) { return this.nextSyncChannel( nMillisTimeout, false ); } default ChannelControlBlock nextSyncChannel() { return this.nextSyncChannel( 5000 ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/TransferMessage.java ================================================ package com.pinecone.hydra.umc.msg; public interface TransferMessage extends UMCMessage { @Override default TransferMessage evinceTransferMessage() { return this; } Object getBody() ; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCCHead.java ================================================ package com.pinecone.hydra.umc.msg; import java.util.Map; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; public interface UMCCHead extends UMCHead { void enableField( int at ); void disableField( int at ); void enableField( String fieldName ); void disableField( String fieldName ); long getFieldIndexBitmap(); long evalIndexBitmap(); void setBodyLength ( long length ); void setExtraHead ( JSONObject jo ) ; void setExtraHead ( Map jo ) ; void setExtraHead ( Object o ) ; void setExtraEncode ( ExtraEncode encode ) ; UMCCHead applyExHead ( Map jo ) ; void applyExtraHeadCoder ( ExtraHeadCoder coder ) ; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCCHeadV1.java ================================================ package com.pinecone.hydra.umc.msg; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Comparator; import java.util.Map; import com.pinecone.framework.unit.BitSet64; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.Bytes; import com.pinecone.framework.util.datetime.compact.CompactTimeUnit; import com.pinecone.framework.util.json.JSONEncoder; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.JSONString; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; /** * Pinecone Ursus For Java UMCC[ Uniform Message Control - Compacted ] * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ********************************************************** * Uniform Message Control Protocol - Base-Mutable - Compacted [UMC-C] * 统一消息控制协议-紧凑变基分协议 * ********************************************************** */ public class UMCCHeadV1 extends UMCHeadV1 implements UMCCHead { public static final String ProtocolSignature = "UMC-C/" + UMCHeadV1.ProtocolVersion; public static final int BitmapAt = 1; public static final int BitmapBytes = Long.BYTES; protected long fieldIndexBitmap ; // :1 sizeof( int64 ) = 8, Field index-control bitmap. public static final HeadField FieldSignature = new HeadField( "signature" , 0, ProtocolSignature.length() ); public static final HeadField FieldExtraHeadLength = new HeadField( "extraHeadLength" , 1, Integer.BYTES ); public static final HeadField FieldExtraEncode = new HeadField( "extraEncode" , 2, Byte.BYTES ); public static final HeadField FieldBodyLength = new HeadField( "bodyLength" , 3, Long.BYTES ); public static final HeadField FieldKeepAlive = new HeadField( "keepAlive" , 4, Integer.BYTES ); public static final HeadField FieldMethod = new HeadField( "method" , 5, Byte.BYTES ); public static final HeadField FieldStatus = new HeadField( "status" , 6, Short.BYTES ); public static final HeadField FieldControlBits = new HeadField( "controlBits" , 7, Integer.BYTES ); public static final HeadField FieldIdentityId = new HeadField( "identityId" , 8, Long.BYTES ); public static final HeadField FieldSessionId = new HeadField( "sessionId" , 9, Long.BYTES ); static final HeadField[] HeadFieldsMap = new HeadField[ UMCHeadV1.HeadFieldsSize ]; static final HeadField[] HeadFieldsIndex = { FieldSignature, FieldExtraHeadLength, FieldExtraEncode, FieldBodyLength, FieldKeepAlive, FieldMethod, FieldStatus, FieldControlBits, FieldIdentityId, FieldSessionId }; static { System.arraycopy( HeadFieldsIndex, 0, HeadFieldsMap, 0, HeadFieldsSize ); Arrays.sort( HeadFieldsMap, Comparator.comparing(a -> a.name) ); } public static HeadField searchField( String fieldName ) { int low = 0; int high = HeadFieldsMap.length - 1; while ( low <= high ) { int mid = (low + high) >>> 1; int cmp = HeadFieldsMap[ mid ].name.compareTo(fieldName); if ( cmp == 0 ) { return HeadFieldsMap[ mid ]; } else if ( cmp < 0 ) { low = mid + 1; } else { high = mid - 1; } } return null; } protected void enableDefaultFields() { this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldSignature.index ); this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldExtraHeadLength.index ); this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldExtraEncode.index ); } public UMCCHeadV1( ) { this( UMCCHeadV1.ProtocolSignature ); } public UMCCHeadV1( String szSignature ) { super( szSignature, UMCMethod.INFORM ); this.enableDefaultFields(); } public UMCCHeadV1( String szSignature, UMCMethod umcMethod ) { super( szSignature, umcMethod, 0 ); this.enableDefaultFields(); this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldMethod.index ); } public UMCCHeadV1( String szSignature, UMCMethod umcMethod, long fieldIndexBitmap ) { super( szSignature, umcMethod, 0 ); this.fieldIndexBitmap = fieldIndexBitmap; } @Override public int sizeof() { int totalSize = BitmapBytes; for ( int i = 0; i < UMCHeadV1.HeadFieldsSize; ++i ) { if ( ( this.fieldIndexBitmap & (1L << i) ) != 0 ) { totalSize += HeadFieldsIndex[ i ].sizeof; } } return totalSize; } @Override public int fieldsSize() { return BitSet64.existence( this.fieldIndexBitmap ); } @Override public long getFieldIndexBitmap() { return this.fieldIndexBitmap; } @Override public long evalIndexBitmap() { if ( this.nExtraHeadLength > 0 ) { this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldExtraHeadLength.index ); this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldExtraEncode.index ); } if ( this.nBodyLength > 0 ) { this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldBodyLength.index ); } if ( this.nKeepAlive != -1 ) { this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldKeepAlive.index ); } if ( this.method != null && this.method != UMCMethod.UNDEFINED && this.method != UMCMethod.INFORM ) { this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldMethod.index ); } if ( this.status != Status.OK ) { this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldStatus.index ); } if ( this.controlBits != 0 ) { this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldControlBits.index ); } if ( this.identityId != 0 ) { this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldIdentityId.index ); } if ( this.sessionId != 0 ) { this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldSessionId.index ); } return this.fieldIndexBitmap; } @Override public void enableField( int at ) { this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, at ); } @Override public void disableField( int at ) { this.fieldIndexBitmap = BitSet64.clearBit( this.fieldIndexBitmap, at ); } @Override public void enableField( String fieldName ) { HeadField field = UMCCHeadV1.searchField( fieldName ); if ( field == null ) { throw new IllegalArgumentException( fieldName + " is not existed." ); } this.enableField( field.index ); } @Override public void disableField( String fieldName ) { HeadField field = UMCCHeadV1.searchField( fieldName ); if ( field == null ) { throw new IllegalArgumentException( fieldName + " is not existed." ); } this.disableField( field.index ); } protected void enableExtraHead() { this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldExtraHeadLength.index ); this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldExtraEncode.index ); } @Override public void setExtraHead ( JSONObject jo ) { super.setExtraHead( jo ); this.enableExtraHead(); } @Override public void setExtraHead ( Map jo ) { super.setExtraHead( jo ); this.enableExtraHead(); } @Override public void setExtraHead ( Object o ) { super.setExtraHead( o ); this.enableExtraHead(); } @Override public void setExtraEncode ( ExtraEncode encode ) { super.setExtraEncode( encode ); this.transApplyExHead(); } public UMCCHead applyExHead ( Map jo ) { super.applyExHead( jo ); this.enableExtraHead(); return this; } @Override protected void transApplyExHead ( ) { if ( this.dyExtraHead != null && this.extraHeadCoder == null ) { throw new IllegalStateException( "ExtraHeadCoder is null." ); } super.transApplyExHead(); this.enableExtraHead(); } @Override public void applyExtraHeadCoder ( ExtraHeadCoder coder ) { super.applyExtraHeadCoder( coder ); this.enableExtraHead(); } @Override public void setBodyLength ( long length ) { super.setBodyLength( length ); this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldBodyLength.index ); } @Override public void setKeepAlive ( int nKeepAliveMills ) { super.setKeepAlive( nKeepAliveMills ); this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldKeepAlive.index ); } @Override public void setKeepAlive ( int nKeepAlive, CompactTimeUnit timeUnit ) { super.setKeepAlive( nKeepAlive, timeUnit ); this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldKeepAlive.index ); } @Override protected void setMethod ( UMCMethod umcMethod ) { super.setMethod( umcMethod ); this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldMethod.index ); } @Override public void setStatus ( Status status ) { super.setStatus( status ); this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldStatus.index ); } @Override public void setControlBits ( int controlBits ) { super.setControlBits( controlBits ); this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldControlBits.index ); } @Override public void setIdentityId ( long identityId ) { super.setIdentityId( identityId ); this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldIdentityId.index ); } @Override public void setSessionId ( long sessionId ) { super.setSessionId( sessionId ); this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldSessionId.index ); } @Override public String toJSONString() { String szExtraHead = this.jsonifyExtraHead(); return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "Signature" , this.getSignature() ), new KeyValue<>( "FieldIndexBitmap" , JSONString.wrapRaw( BitSet64.toIndexJSONString( this.fieldIndexBitmap ) ) ), new KeyValue<>( "ExtraHeadLength" , this.getExtraHeadLength() ), new KeyValue<>( "ExtraEncode" , this.getExtraEncode().getName() ), new KeyValue<>( "BodyLength" , this.getBodyLength() ), new KeyValue<>( "KeepAlive" , this.getKeepAlive() ), new KeyValue<>( "Method" , this.getMethod() ), new KeyValue<>( "Status" , this.getStatus().getName() ), new KeyValue<>( "ControlBits" , "0x" + Integer.toUnsignedString( this.getControlBits(),16 ) ), new KeyValue<>( "IdentityId" , this.getIdentityId() ), new KeyValue<>( "SessionId" , this.getSessionId() ), new KeyValue<>( "ExtraHead" , szExtraHead ), } ); } @Override public EncodePair bytesEncode( ExtraHeadCoder extraHeadCoder ) { return UMCCHeadV1.encode( this, extraHeadCoder ); } public static class HeadField { public final String name; public final int index; public final int sizeof; HeadField( String name, int index, int sizeof ) { this.name = name; this.index = index; this.sizeof = sizeof; } } public static EncodePair encode( UMCCHead umcHead, ExtraHeadCoder extraHeadCoder ) { UMCCHeadV1 head = (UMCCHeadV1) umcHead; head.applyExtraHeadCoder( extraHeadCoder ); head.transApplyExHead(); int extraHeadLength = head.getExtraHeadLength(); ByteBuffer byteBuffer = ByteBuffer.allocate( UMCHeadV1.ReadBufferSize + extraHeadLength ); byteBuffer.order( UMCHeadV1.BinByteOrder ); int nBufLength = head.getSignatureLength(); byteBuffer.put( head.getSignature().getBytes() ); byteBuffer.putLong( head.fieldIndexBitmap ); nBufLength += Long.BYTES; for ( int i = BitmapAt; i < HeadFieldsIndex.length; ++i ) { if ( ( head.fieldIndexBitmap & (1L << i) ) != 0 ) { HeadField field = HeadFieldsIndex[i]; switch ( field.index ) { case 1: { // nExtraHeadLength byteBuffer.putInt( head.nExtraHeadLength ); nBufLength += Integer.BYTES; break; } case 2: { // extraEncode byteBuffer.put( head.extraEncode.getByteValue() ); nBufLength += Byte.BYTES; break; } case 3: { // nBodyLength byteBuffer.putLong( head.nBodyLength ); nBufLength += Long.BYTES; break; } case 4: { // nKeepAlive byteBuffer.putInt( head.nKeepAlive ); nBufLength += Integer.BYTES; break; } case 5: { // method byteBuffer.put( head.method.getByteValue() ); nBufLength += Byte.BYTES; break; } case 6: { // status byteBuffer.putShort( head.status.getShortValue() ); nBufLength += Short.BYTES; break; } case 7: { // controlBits byteBuffer.putInt( head.controlBits ); nBufLength += Integer.BYTES; break; } case 8: { // identityId byteBuffer.putLong( head.identityId ); nBufLength += Long.BYTES; break; } case 9: { // sessionId byteBuffer.putLong( head.sessionId ); nBufLength += Long.BYTES; break; } default: { break; } } } } if( head.extraHead == null ) { byteBuffer.put( Bytes.Empty ); } else { byteBuffer.put( head.extraHead ); } nBufLength += head.getExtraHeadLength(); return new EncodePair( byteBuffer, nBufLength ); } public static UMCCHead decode( byte[] buf, String szSignature, ExtraHeadCoder extraHeadCoder ) throws IOException { if ( buf.length < szSignature.length() ) { // Signature size is minimum. throw new StreamTerminateException( "StreamEndException:[UMC-CProtocol] Stream is ended." ); } int nReadAt = szSignature.length(); if ( !Arrays.equals( buf, 0, szSignature.length(), szSignature.getBytes(), 0, szSignature.length() ) ) { throw new IOException( "[UMC-CProtocol] Illegal protocol signature." ); } UMCCHeadV1 head = new UMCCHeadV1(); head.applyExtraHeadCoder( extraHeadCoder ); head.fieldIndexBitmap = ByteBuffer.wrap( buf, nReadAt, Long.BYTES ).order( BinByteOrder ).getLong(); nReadAt += Long.BYTES; for ( int i = BitmapAt; i < HeadFieldsIndex.length; ++i ) { if ( ( head.fieldIndexBitmap & (1L << i) ) != 0 ) { HeadField field = HeadFieldsIndex[ i ]; switch ( field.index ) { case 1: { // nExtraHeadLength head.nExtraHeadLength = ByteBuffer.wrap( buf, nReadAt, Integer.BYTES ).order( BinByteOrder ).getInt(); nReadAt += Integer.BYTES; break; } case 2: { // extraEncode head.extraEncode = ExtraEncode.asValue( ByteBuffer.wrap( buf, nReadAt, Byte.BYTES ).order( BinByteOrder ).get() ); nReadAt += Byte.BYTES; break; } case 3: { // nBodyLength head.nBodyLength = ByteBuffer.wrap(buf, nReadAt, Long.BYTES).order( BinByteOrder ).getLong(); nReadAt += Long.BYTES; break; } case 4: { // nKeepAlive head.nKeepAlive = ByteBuffer.wrap(buf, nReadAt, Integer.BYTES).order( BinByteOrder ).getInt(); nReadAt += Integer.BYTES; break; } case 5: { // method head.method = UMCMethod.values()[buf[nReadAt]]; nReadAt += Byte.BYTES; break; } case 6: { // status head.status = Status.asValue(ByteBuffer.wrap(buf, nReadAt, Short.BYTES).order( BinByteOrder ).getShort()); nReadAt += Short.BYTES; break; } case 7: { // controlBits head.controlBits = ByteBuffer.wrap(buf, nReadAt, Integer.BYTES).order( BinByteOrder ).getInt(); nReadAt += Integer.BYTES; break; } case 8: { // identityId head.identityId = ByteBuffer.wrap(buf, nReadAt, Long.BYTES).order( BinByteOrder ).getLong(); nReadAt += Long.BYTES; break; } case 9: { // sessionId head.sessionId = ByteBuffer.wrap(buf, nReadAt, Long.BYTES).order( BinByteOrder ).getLong(); nReadAt += Long.BYTES; break; } default: { break; } } } } return head; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCChannel.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umc.wolf.UlfChannelStatus; import java.io.IOException; import java.net.SocketAddress; public interface UMCChannel extends Pinenut { Thread getAffiliateThread(); // Target address. SocketAddress getAddress(); SocketAddress remoteAddress(); SocketAddress localAddress(); void reconnect() throws IOException; void reconnect( long mils ) throws IOException; Object getNativeHandle(); ChannelStatus getChannelStatus(); void setChannelStatus( UlfChannelStatus status ); MessageNode getParentMessageNode(); Object getChannelID() ; long getIdentityID(); void release(); void close(); boolean isShutdown(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCConstants.java ================================================ package com.pinecone.hydra.umc.msg; public final class UMCConstants { public static final long DefaultSyncWaitingMillis = 600000; // 10 * 60 * 1000 [s] } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCException.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.framework.system.prototype.Pinenut; public class UMCException extends Exception implements Pinenut { public UMCException() { super(); } public UMCException( String message ) { super(message); } public UMCException( String message, Throwable cause ) { super(message, cause); } public UMCException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCHead.java ================================================ package com.pinecone.hydra.umc.msg; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Map; import com.pinecone.framework.util.datetime.compact.CompactTimeUnit; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; public interface UMCHead extends EMCHead { ExtraHeadCoder getExtraHeadCoder(); UMCMethod getMethod(); long getBodyLength(); long getKeepAlive(); int getCompactKeepAlive(); long getSessionId(); Status getStatus(); ExtraEncode getExtraEncode(); int getControlBits(); long getIdentityId(); byte[] getExtraHeadBytes(); Map evalMapExtraHead() ; Map getMapExtraHead() ; Object getExtraHead(); Object getExHeaderVal( String key ); void putExHeaderVal( String key, Object val ) throws IllegalArgumentException; void setStatus ( Status status ); void setKeepAlive ( int nKeepAliveMills ); void setKeepAlive ( int nKeepAlive, CompactTimeUnit timeUnit ); void setControlBits ( int controlBits ); void setIdentityId ( long identityId ); void setSessionId ( long sessionId ); void release(); default AbstractUMCHead inface() { return (AbstractUMCHead) this; } class EncodePair { public final ByteBuffer byteBuffer; public final int bufLength; public EncodePair( ByteBuffer byteBuffer, int bufLength ) { this.byteBuffer = byteBuffer; this.bufLength = bufLength; } public byte[] getBytes() { return Arrays.copyOfRange( this.byteBuffer.array(), 0, this.bufLength ); } } EncodePair bytesEncode( ExtraHeadCoder extraHeadCoder ) ; default EncodePair bytesEncode() { return this.bytesEncode( this.getExtraHeadCoder() ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCHeadV1.java ================================================ package com.pinecone.hydra.umc.msg; import com.pinecone.framework.system.prototype.ObjectiveBean; import com.pinecone.framework.unit.LinkedTreeMap; import com.pinecone.framework.util.Bytes; import com.pinecone.framework.util.ReflectionUtils; import com.pinecone.framework.util.datetime.compact.CompactTimeUnit; import com.pinecone.framework.util.datetime.compact.CompactTimeUnit32; import com.pinecone.framework.util.datetime.compact.CompactTimestamp32; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.Arrays; import java.util.Map; public class UMCHeadV1 extends AbstractUMCHead implements UMCHead { public static final String ProtocolVersion = "1.1"; public static final String ProtocolSignature = "UMC/" + UMCHeadV1.ProtocolVersion; public static final int StructBlockSize = Integer.BYTES + Byte.BYTES + Long.BYTES + Integer.BYTES + Byte.BYTES + Short.BYTES + Integer.BYTES + Long.BYTES + Long.BYTES; public static final int HeadBlockSize = UMCHeadV1.ProtocolSignature.length() + UMCHeadV1.StructBlockSize; public static final ByteOrder BinByteOrder = ByteOrder.LITTLE_ENDIAN ;// Using x86, C/C++ public static final int HeadFieldsSize = 10; public static final int ReadBufferSize = 64; protected String szSignature ; // :0 protected int nExtraHeadLength = 2 ; // :1 sizeof( int32 ) = 4 protected ExtraEncode extraEncode = ExtraEncode.Undefined ; // :2 sizeof( ExtraEncode/byte ) = 1 protected long nBodyLength = 0 ; // :3 sizeof( int64 ) = 8 protected int nKeepAlive = -1 ; // :4 sizeof( int32 ) = 4, CompactTimestamp32, [-1 for forever, 0 for off, others for millis(default, or seconds/hours/etc).] protected UMCMethod method ; // :5 sizeof( UMCMethod/byte ) = 1 protected Status status = Status.OK ; // :6 sizeof( Status/Short ) = 2 protected int controlBits = 0 ; // :7 sizeof( int32 ) = 4, Custom control bytes. protected long identityId = 0 ; // :8 sizeof( int64 ) = 8, Client / Node ID protected long sessionId = 0 ; // :9 sizeof( int64 ) = 8 protected byte[] extraHead = {} ; protected Object dyExtraHead ; protected ExtraHeadCoder extraHeadCoder ; public UMCHeadV1( ) { this( UMCHeadV1.ProtocolSignature, UMCMethod.INFORM ); } public UMCHeadV1( String szSignature ) { this( szSignature, UMCMethod.INFORM ); } public UMCHeadV1( String szSignature, int controlBits ) { this( szSignature, UMCMethod.INFORM, controlBits ); } public UMCHeadV1( String szSignature, UMCMethod umcMethod ) { this( szSignature, umcMethod, 0 ); } public UMCHeadV1( String szSignature, UMCMethod umcMethod, int controlBits ) { this( szSignature, umcMethod, new LinkedTreeMap<>(), controlBits ); } public UMCHeadV1( String szSignature, UMCMethod umcMethod, Object ex, int controlBits ) { this.szSignature = szSignature; this.method = umcMethod; this.dyExtraHead = ex; this.controlBits = controlBits; } UMCHeadV1( String szSignature, UMCMethod umcMethod, Map joEx, int controlBits ) { this( szSignature, umcMethod, (Object) joEx, controlBits ); } UMCHeadV1( String szSignature, UMCMethod umcMethod, Map joEx ) { this( szSignature, umcMethod, (Object) joEx, 0 ); } @Override public int sizeof() { return UMCHeadV1.HeadBlockSize; } @Override public int fieldsSize() { return UMCHeadV1.HeadFieldsSize; } @Override protected void setSignature ( String signature ) { this.szSignature = signature; } @Override protected void setBodyLength ( long length ) { this.nBodyLength = length; } @Override public void setKeepAlive ( int nKeepAliveMills ) { this.nKeepAlive = nKeepAliveMills; } @Override public void setKeepAlive ( int nKeepAlive, CompactTimeUnit timeUnit ) { this.nKeepAlive = CompactTimestamp32.encode( nKeepAlive, (CompactTimeUnit32) timeUnit ); } @Override protected void setMethod ( UMCMethod umcMethod ) { this.method = umcMethod; if ( this.method == UMCMethod.INFORM ) { this.nBodyLength = 0; } } @Override protected void setExtraEncode ( ExtraEncode encode ) { this.extraEncode = encode; } @Override public void setControlBits ( int controlBits ) { this.controlBits = controlBits; } @Override public void setIdentityId ( long identityId ) { this.identityId = identityId; } @Override public void setSessionId ( long sessionId ) { this.sessionId = sessionId; } @Override protected void setExtraHead ( JSONObject jo ) { this.dyExtraHead = jo.getMap(); } @Override protected void setExtraHead ( Map jo ) { this.dyExtraHead = jo; } @Override protected void setExtraHead ( Object o ) { this.dyExtraHead = o; if( o == null ) { this.nExtraHeadLength = 0; } } @Override protected void transApplyExHead ( ) { if ( this.dyExtraHead != null ) { this.extraHead = this.extraHeadCoder.getEncoder().encode( this, this.dyExtraHead ); this.nExtraHeadLength = this.extraHead.length; } else { if( this.extraEncode == ExtraEncode.JSONString ) { this.extraHead = "{}".getBytes(); } else if( this.extraEncode == ExtraEncode.Prototype ) { this.extraHead = null; this.nExtraHeadLength = 0; return; } else if( this.extraEncode == ExtraEncode.Iussum ) { this.extraHead = new byte[ 0 ]; this.nExtraHeadLength = 0; return; } else { this.dyExtraHead = this.extraHeadCoder.newExtraHead(); this.extraHead = this.extraHeadCoder.getEncoder().encode( this, this.dyExtraHead ); } } this.nExtraHeadLength = this.extraHead.length; } @Override protected void applyExtraHeadCoder ( ExtraHeadCoder coder ) { this.extraHeadCoder = coder; if( this.extraEncode == ExtraEncode.Undefined ) { this.extraEncode = coder.getDefaultEncode(); } } @Override public void setStatus ( Status status ) { this.status = status; } @Override public ExtraHeadCoder getExtraHeadCoder() { return this.extraHeadCoder; } @Override public String getSignature() { return this.szSignature; } @Override public int getSignatureLength() { return this.getSignature().length(); } @Override public UMCMethod getMethod() { return this.method; } @Override public int getExtraHeadLength() { return this.nExtraHeadLength; } @Override public long getBodyLength() { return this.nBodyLength; } @Override public long getKeepAlive() { return CompactTimestamp32.toMilliseconds( this.nKeepAlive ); } @Override public int getCompactKeepAlive() { return this.nKeepAlive; } @Override public long getSessionId() { return this.sessionId; } @Override public Status getStatus() { return this.status; } @Override public ExtraEncode getExtraEncode() { return this.extraEncode; } @Override public int getControlBits() { return this.controlBits; } @Override public long getIdentityId() { return this.identityId; } @Override public byte[] getExtraHeadBytes() { return this.extraHead ; } @Override @SuppressWarnings( "unchecked" ) public Map evalMapExtraHead() { if( this.dyExtraHead instanceof Map ) { return (Map) this.dyExtraHead; } return ( new ObjectiveBean( this.dyExtraHead ) ).toMap(); } @Override @SuppressWarnings( "unchecked" ) public Map getMapExtraHead() { if( this.dyExtraHead instanceof Map ) { return (Map) this.dyExtraHead; } return null; } @Override public Object getExtraHead() { return this.dyExtraHead; } @Override public void putExHeaderVal( String key, Object val ) throws IllegalArgumentException { if( this.dyExtraHead instanceof Map ) { this.getMapExtraHead().put( key, val ); } else { ReflectionUtils.beanSet( this.dyExtraHead, key, val ); } } @Override public Object getExHeaderVal( String key ) { if( this.dyExtraHead instanceof Map ) { return this.getMapExtraHead().get( key ); } else { return ReflectionUtils.beanGet( this.dyExtraHead, key ); } } protected UMCHead applyExHead( Map jo ) { if( !( this.dyExtraHead instanceof Map ) && this.dyExtraHead != null ) { throw new IllegalArgumentException( "Current extra headed is not dynamic." ); } if( this.getMapExtraHead() == null || this.getMapExtraHead().size() == 0 ) { this.setExtraHead( jo ); } else { if( jo.size() > this.getMapExtraHead().size() ) { jo.putAll( this.getMapExtraHead() ); this.setExtraHead( jo ); } else { this.getMapExtraHead().putAll( jo ); } } return this; } public UMCHead receiveSet( Map joExtraHead ) { this.dyExtraHead = joExtraHead; return this; } @Override public void release() { // Help GC this.dyExtraHead = null; } @Override public String toString() { return this.toJSONString(); } @Override public EncodePair bytesEncode( ExtraHeadCoder extraHeadCoder ) { return UMCHeadV1.encode( this, extraHeadCoder ); } public static EncodePair encode( UMCHead umcHead, ExtraHeadCoder extraHeadCoder ) { UMCHeadV1 head = (UMCHeadV1) umcHead; head.applyExtraHeadCoder( extraHeadCoder ); head.transApplyExHead(); ByteBuffer byteBuffer = ByteBuffer.allocate( UMCHeadV1.ReadBufferSize + head.getExtraHeadLength() ); byteBuffer.order( UMCHeadV1.BinByteOrder ); int nBufLength = head.getSignatureLength(); byteBuffer.put( head.getSignature().getBytes() ); //byteBuffer.put( (byte) ' ' ); //++nBufLength; byteBuffer.putInt( head.nExtraHeadLength ); nBufLength += Integer.BYTES; byteBuffer.put( head.extraEncode.getByteValue() ); nBufLength += Byte.BYTES; byteBuffer.putLong( head.nBodyLength ); nBufLength += Long.BYTES; byteBuffer.putInt( head.nKeepAlive ); nBufLength += Integer.BYTES; byteBuffer.put( head.method.getByteValue() ); nBufLength += Byte.BYTES; byteBuffer.putShort( head.status.getShortValue() ); nBufLength += Short.BYTES; byteBuffer.putInt( head.controlBits ); nBufLength += Integer.BYTES; byteBuffer.putLong( head.identityId ); nBufLength += Long.BYTES; byteBuffer.putLong( head.sessionId ); nBufLength += Long.BYTES; if( head.extraHead == null ) { byteBuffer.put( Bytes.Empty ); } else { byteBuffer.put( head.extraHead ); } nBufLength += head.getExtraHeadLength(); return new EncodePair( byteBuffer, nBufLength ); } public static UMCHead decode( byte[] buf, String szSignature, ExtraHeadCoder extraHeadCoder ) throws IOException { int nBufSize = ArchUMCProtocol.basicHeadLength( szSignature ); if ( buf.length < nBufSize ) { throw new StreamTerminateException( "StreamEndException:[UMCProtocol] Stream is ended." ); } int nReadAt = szSignature.length(); if ( !Arrays.equals( buf, 0, szSignature.length(), szSignature.getBytes(), 0, szSignature.length() ) ) { throw new IOException( "[UMCProtocol] Illegal protocol signature." ); } UMCHeadV1 head = new UMCHeadV1(); head.applyExtraHeadCoder( extraHeadCoder ); //nReadAt++; // For ' ' head.nExtraHeadLength = ByteBuffer.wrap( buf, nReadAt, Integer.BYTES ).order( UMCHeadV1.BinByteOrder ).getInt(); nReadAt += Integer.BYTES; head.extraEncode = ExtraEncode.asValue( ByteBuffer.wrap( buf, nReadAt, Byte.BYTES ).order( UMCHeadV1.BinByteOrder ).get() ); nReadAt += Byte.BYTES; head.nBodyLength = ByteBuffer.wrap( buf, nReadAt, Long.BYTES ).order( UMCHeadV1.BinByteOrder ).getLong(); nReadAt += Long.BYTES; head.nKeepAlive = ByteBuffer.wrap( buf, nReadAt, Integer.BYTES ).order( UMCHeadV1.BinByteOrder ).getInt(); nReadAt += Integer.BYTES; head.method = UMCMethod.values()[ buf[nReadAt] ]; nReadAt += Byte.BYTES; head.status = Status.asValue( ByteBuffer.wrap( buf, nReadAt, Short.BYTES ).order( UMCHeadV1.BinByteOrder ).getShort() ); nReadAt += Short.BYTES; head.controlBits = ByteBuffer.wrap( buf, nReadAt, Integer.BYTES ).order( UMCHeadV1.BinByteOrder ).getInt(); nReadAt += Integer.BYTES; head.identityId = ByteBuffer.wrap( buf, nReadAt, Long.BYTES ).order( UMCHeadV1.BinByteOrder ).getLong(); nReadAt += Long.BYTES; head.sessionId = ByteBuffer.wrap( buf, nReadAt, Long.BYTES ).order( UMCHeadV1.BinByteOrder ).getLong(); nReadAt += Long.BYTES; return head; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCMessage.java ================================================ package com.pinecone.hydra.umc.msg; public interface UMCMessage extends Message { UMCHead getHead(); default UMCMethod getMethod(){ return this.getHead().getMethod(); } default Object getExHead() { return this.getHead().getExtraHead(); } default InformMessage evinceInformMessage() { return null; } default TransferMessage evinceTransferMessage() { return null; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCMethod.java ================================================ package com.pinecone.hydra.umc.msg; public enum UMCMethod { UNDEFINED ( 0x00, "Undefined" ), INFORM ( 0x01, "Inform" ), TRANSFER ( 0x02, "Transfer" ); private final int value; private final String name; UMCMethod( int value, String name ){ this.value = value; this.name = name; } public String getName(){ return this.name; } public int getValue() { return this.value; } public byte getByteValue() { return (byte) this.value; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCProtocol.java ================================================ package com.pinecone.hydra.umc.msg; /** * Pinecone Ursus For Java UMCProtocol [ Unified Message Control Protocol ] * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. */ public interface UMCProtocol extends MsgProtocol { UMCProtocol applyMessageSource( Medium medium ) ; String getVersion(); String getSignature(); void release(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCReceiver.java ================================================ package com.pinecone.hydra.umc.msg; import java.io.IOException; public interface UMCReceiver extends UMCProtocol{ Object readInformMsg() throws IOException; UMCMessage readTransferMsg() throws IOException; UMCMessage readTransferMsgBytes() throws IOException; UMCMessage readMsg() throws IOException; UMCMessage readMsgBytes() throws IOException; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCServiceException.java ================================================ package com.pinecone.hydra.umc.msg; public class UMCServiceException extends UMCException { public UMCServiceException() { super(); } public UMCServiceException( String message ) { super(message); } public UMCServiceException( String message, Throwable cause ) { super(message, cause); } public UMCServiceException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCTransmit.java ================================================ package com.pinecone.hydra.umc.msg; import java.io.IOException; import java.io.InputStream; public interface UMCTransmit extends UMCProtocol { void sendInformMsg( Object msg ) throws IOException; void sendInformMsg( Object msg, Status status ) throws IOException; void sendTransferMsg( Object msg, byte[] bytes ) throws IOException; void sendTransferMsg( Object msg, byte[] bytes, Status status ) throws IOException; default void sendTransferMsg( Object msg, String sz ) throws IOException { this.sendTransferMsg( msg, sz.getBytes() ); } void sendTransferMsg( Object msg, InputStream is ) throws IOException; void sendMsg( UMCMessage msg, boolean bNoneBuffered ) throws IOException; default void sendMsg( UMCMessage msg ) throws IOException { this.sendMsg( msg, false ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/event/ChannelDataInterceptor.java ================================================ package com.pinecone.hydra.umc.msg.event; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.ChannelHandleException; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCMessage; import io.netty.channel.ChannelHandlerContext; public interface ChannelDataInterceptor extends Pinenut { boolean interceptAfterDataArrived ( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws ChannelHandleException; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/event/ChannelEventHandler.java ================================================ package com.pinecone.hydra.umc.msg.event; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import io.netty.channel.ChannelHandlerContext; public interface ChannelEventHandler extends Pinenut { void afterEventTriggered( ChannelControlBlock block, Object context ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/event/ChannelInactiveHandler.java ================================================ package com.pinecone.hydra.umc.msg.event; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.ChannelHandleException; import io.netty.channel.ChannelHandlerContext; public interface ChannelInactiveHandler extends ChannelEventHandler { boolean afterChannelInactive( ChannelControlBlock ccb, Object context ) throws ChannelHandleException; @Override default void afterEventTriggered( ChannelControlBlock block, Object context ) { } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/extra/ExtraHeadCoder.java ================================================ package com.pinecone.hydra.umc.msg.extra; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umc.msg.ExtraEncode; import java.util.Map; public interface ExtraHeadCoder extends Pinenut { ExtraHeadEncoder getEncoder(); ExtraHeadDecoder getDecoder(); Map newExtraHead(); ExtraEncode getDefaultEncode(); void setDefaultEncode( ExtraEncode encode ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/extra/ExtraHeadDecoder.java ================================================ package com.pinecone.hydra.umc.msg.extra; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umc.msg.UMCHead; public interface ExtraHeadDecoder extends Pinenut { Object decode( UMCHead head, byte[] raw ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/extra/ExtraHeadEncoder.java ================================================ package com.pinecone.hydra.umc.msg.extra; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umc.msg.UMCHead; public interface ExtraHeadEncoder extends Pinenut { byte[] encode( UMCHead head, Object jo ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/extra/ExtraHeadMarshalingException.java ================================================ package com.pinecone.hydra.umc.msg.extra; import com.pinecone.framework.system.PineRuntimeException; public class ExtraHeadMarshalingException extends PineRuntimeException { public ExtraHeadMarshalingException() { super(); } public ExtraHeadMarshalingException( String message ) { super( message ); } public ExtraHeadMarshalingException( String message, Throwable cause ) { super( message, cause ); } public ExtraHeadMarshalingException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/extra/GenericExtraHeadCoder.java ================================================ package com.pinecone.hydra.umc.msg.extra; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.hydra.umc.msg.ExtraEncode; import java.util.Map; public class GenericExtraHeadCoder implements ExtraHeadCoder { protected ExtraHeadEncoder encoder; protected ExtraHeadDecoder decoder; protected ExtraEncode extraEncode; public GenericExtraHeadCoder () { this( new GenericExtraHeadEncoder(), new GenericExtraHeadDecoder() ); } public GenericExtraHeadCoder ( ExtraHeadEncoder encoder, ExtraHeadDecoder decoder ) { this( encoder, decoder, ExtraEncode.JSONString ); } public GenericExtraHeadCoder ( ExtraHeadEncoder encoder, ExtraHeadDecoder decoder, ExtraEncode extraEncode ) { this.encoder = encoder; this.decoder = decoder; this.extraEncode = extraEncode; } @Override public ExtraHeadEncoder getEncoder() { return this.encoder; } @Override public ExtraHeadDecoder getDecoder() { return this.decoder; } @Override public ExtraEncode getDefaultEncode() { return this.extraEncode; } @Override public void setDefaultEncode( ExtraEncode encode ) { this.extraEncode = encode; } @Override public Map newExtraHead() { return new JSONMaptron(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/extra/GenericExtraHeadDecoder.java ================================================ package com.pinecone.hydra.umc.msg.extra; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.umc.msg.ExtraEncode; import com.pinecone.hydra.umc.msg.UMCHead; import com.pinecone.ulf.util.bson.UlfJSONDecompiler; import java.io.ByteArrayInputStream; import java.util.Map; public class GenericExtraHeadDecoder implements ExtraHeadDecoder { @Override @SuppressWarnings( "unchecked" ) public Object decode( UMCHead head, byte[] raw ) { ExtraEncode encode = head.getExtraEncode(); switch ( encode ) { case JSONString: { JSONObject jo = new JSONMaptron( head.evalMapExtraHead(), true ); jo.jsonDecode( new String( raw ) ); return jo; } case Binary: { ByteArrayInputStream is = new ByteArrayInputStream( raw ); UlfJSONDecompiler decompiler = new UlfJSONDecompiler( is ); Object o = decompiler.decompile(); if( o instanceof JSONObject ) { return o; } else if( o instanceof Map ) { return new JSONMaptron( (Map)o, true ) ; } throw new ExtraHeadMarshalingException( "Illegal decompiler Binary json, requires Map but " + o.getClass().getSimpleName() + " found." ); } case Iussum: case Prototype: { return raw; } } throw new ExtraHeadMarshalingException( "Unsupported encode mode[" + encode.getName() + "]." ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/extra/GenericExtraHeadEncoder.java ================================================ package com.pinecone.hydra.umc.msg.extra; import com.pinecone.framework.util.json.JSON; import com.pinecone.hydra.umc.msg.ExtraEncode; import com.pinecone.hydra.umc.msg.UMCHead; import com.pinecone.ulf.util.bson.UlfJSONCompiler; import java.io.ByteArrayOutputStream; import java.io.IOException; public class GenericExtraHeadEncoder implements ExtraHeadEncoder { @Override public byte[] encode( UMCHead head, Object raw ) throws ExtraHeadMarshalingException { ExtraEncode encode = head.getExtraEncode(); switch ( encode ) { case JSONString: { return JSON.stringify( raw ).getBytes(); } case Binary: { UlfJSONCompiler compiler = new UlfJSONCompiler(); ByteArrayOutputStream os = new ByteArrayOutputStream(); try{ compiler.compile( raw, os ); } catch ( IOException e ) { throw new ExtraHeadMarshalingException( e ); } return os.toByteArray(); } case Iussum: case Prototype: { return (byte[]) raw; } } throw new ExtraHeadMarshalingException( "Unsupported encode mode[" + encode.getName() + "]." ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/handler/ErrorMessageAudit.java ================================================ package com.pinecone.hydra.umc.msg.handler; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umc.msg.UMCMessage; public interface ErrorMessageAudit extends Pinenut { boolean isErrorMessage( UMCMessage message ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/handler/GenericErrorMessageAudit.java ================================================ package com.pinecone.hydra.umc.msg.handler; import com.pinecone.hydra.umc.msg.MessageNodus; import com.pinecone.hydra.umc.msg.UMCMessage; public class GenericErrorMessageAudit implements ErrorMessageAudit { protected MessageNodus mMessageNode; public GenericErrorMessageAudit( MessageNodus node ) { this.mMessageNode = node; } @Override public boolean isErrorMessage( UMCMessage message ) { return message.getHead().getStatus().getValue() >= 500; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/handler/WrappedErrorMessageException.java ================================================ package com.pinecone.hydra.umc.msg.handler; import com.pinecone.framework.system.PineRuntimeException; import com.pinecone.hydra.umc.msg.Status; import com.pinecone.hydra.umc.msg.UMCHead; import com.pinecone.hydra.umc.msg.UMCMessage; public class WrappedErrorMessageException extends PineRuntimeException { protected Status status; protected UMCMessage message; public WrappedErrorMessageException( Status status ) { super(); this.status = status; } public WrappedErrorMessageException( String message, Status status ) { super( message ); this.status = status; } public WrappedErrorMessageException( String message, Throwable cause, Status status ) { super( message, cause ); this.status = status; } public WrappedErrorMessageException( Throwable cause, Status status ) { super(cause); this.status = status; } public Status getStatus() { return this.status; } public UMCMessage getUMCMessage() { return this.message; } public void setUMCMessage( UMCMessage message ) { this.message = message; } public static WrappedErrorMessageException wrap( UMCHead head ) { Object what = head.getExHeaderVal( "What" ); if( what instanceof String ) { return new WrappedErrorMessageException( (String) what, head.getStatus() ); } return new WrappedErrorMessageException( head.getStatus() ); } public static WrappedErrorMessageException wrap( UMCMessage message ) { WrappedErrorMessageException exception = wrap( message.getHead() ); exception.setUMCMessage( message ); return exception; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/vita/HeartbeatControl.java ================================================ package com.pinecone.hydra.umc.vita; import java.io.IOException; import java.util.Collection; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.UMCMessage; public interface HeartbeatControl extends Pinenut { void registerChannels( Collection channels, long intervalMillis ) ; void registerChannel( ChannelControlBlock ccb, long intervalMillis ) ; void deregisterChannel( ChannelControlBlock ccb ) ; void shutdown() ; boolean interceptFeedback( ChannelControlBlock block, UMCMessage msg ) throws IOException; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/vita/HeartbeatFeedbackor.java ================================================ package com.pinecone.hydra.umc.vita; import java.io.IOException; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.UMCMessage; public interface HeartbeatFeedbackor extends Pinenut { boolean interceptHeartbeat( ChannelControlBlock block, UMCMessage msg ) throws IOException ; void feedback( ChannelControlBlock block, UMCMessage msg ) throws IOException ; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/ArchChannelControlBlock.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.hydra.umc.io.IOCounter; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.MessageNode; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.framework.system.executum.ArchThreadum; import java.io.IOException; import java.util.concurrent.BlockingDeque; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; public abstract class ArchChannelControlBlock extends ArchThreadum implements NettyChannelControlBlock { protected UlfChannel mChannel; protected MessageNode mMessageNode; // For Load Balance. protected IOCounter mIOCounter; protected boolean mbForceSyncMode; protected boolean mbInSyncMode; protected UlfMCTransmit mTransmit; protected UlfMCReceiver mReceiver; protected BlockingDeque mAsyncMsgHandleQueue = new LinkedBlockingDeque<>(); protected ArchChannelControlBlock( MessageNode parentNode, UlfChannel channel, boolean bForceSyncMode ) { super( null, parentNode ); this.mChannel = channel; this.mbForceSyncMode = bForceSyncMode; this.mbInSyncMode = bForceSyncMode; this.mMessageNode = parentNode; } @Override public ArchChannelControlBlock setThreadAffinity( Thread affinity ) { super.setThreadAffinity( affinity ); this.getChannel().setThreadAffinity( affinity ); return this; } public UlfChannel getChannel() { return this.mChannel; } public IOCounter getIOCounter() { return this.mIOCounter; } public boolean getInSyncMode() { return this.mbInSyncMode; } public UlfMCTransmit getTransmit() { return this.mTransmit; } public UlfMCReceiver getReceiver() { return this.mReceiver; } public MessageNode getParentMessageNode() { return this.mMessageNode; } @Override public void sendMsg( UMCMessage request, boolean bNoneBuffered ) throws IOException { this.getChannel().setChannelStatus( UlfChannelStatus.WAITING_FOR_SEND ); this.mTransmit.sendMsg( request, bNoneBuffered ); this.getChannel().setChannelStatus( UlfChannelStatus.WAITING_FOR_RECEIVE ); } protected void afterConnectionArrive( Medium medium, boolean bRenew, Lock forceSyncLock ) { if( this.mbForceSyncMode ) { forceSyncLock.lock(); } try{ if( bRenew ) { this.mTransmit.applyMessageSource( medium ); this.mReceiver.applyMessageSource( medium ); } this.mTransmit = new UlfMCTransmit( medium ); this.mReceiver = new UlfMCReceiver( medium ); } finally { if( this.mbForceSyncMode ) { forceSyncLock.unlock(); } } } public void release() { this.mChannel.release(); //this.mChannel = null; this.mIOCounter = null; this.mTransmit = null; this.mReceiver = null; } public void close(){ this.mChannel.close(); } public boolean isShutdown() { return this.getChannel().isShutdown(); } public UlfChannelStatus getChannelStatus() { return this.getChannel().getChannelStatus(); } @Override public void kill() { this.close(); this.release(); } @Override public void pushMsgHandle ( UlfAsyncMsgHandleAdapter msgHandle ) { this.mAsyncMsgHandleQueue.add( msgHandle ); } @Override public UlfAsyncMsgHandleAdapter pollMsgHandle ( long nWaitMillis ) throws InterruptedException { return this.mAsyncMsgHandleQueue.poll( nWaitMillis, TimeUnit.MICROSECONDS ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/ArchChannelPool.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.hydra.umc.msg.ChannelPool; import com.pinecone.hydra.umc.wolf.client.MessengerNettyChannelControlBlock; public abstract class ArchChannelPool implements ChannelPool { @Override public boolean isAllChannelsTerminated() { if( this.isEmpty() ) { return true; } //boolean b = true; for ( Object o : this.getPooledChannels() ){ MessengerNettyChannelControlBlock block = (MessengerNettyChannelControlBlock) o; //b = b && block.isShutdown(); if( !block.isShutdown() ) { return false; } } //return b; return true; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/ArchUMCChannel.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.hydra.umc.msg.MessageNode; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelId; import java.net.SocketAddress; public abstract class ArchUMCChannel implements NettyUMCChannel { protected ChannelId mChannelID ; protected long mIdentityID ; protected Thread mAffiliateThread = Thread.currentThread(); protected MessageNode mParentMessageNode ; protected ChannelFuture mLastChannelFuture ; protected Channel mChannel ; protected SocketAddress mAddress ; protected volatile UlfChannelStatus mChannelStatus = UlfChannelStatus.IDLE; protected ArchUMCChannel( MessageNode node ) { this.mParentMessageNode = node; } protected ArchUMCChannel( MessageNode node, Channel nativeChannel, SocketAddress address ) { this( node ); this.mChannel = nativeChannel; this.mChannelID = this.mChannel.id(); this.mAddress = address; } protected ArchUMCChannel( MessageNode node, Channel nativeChannel ) { this( node, nativeChannel, null ); } public ArchUMCChannel setThreadAffinity( Thread affinity ) { this.mAffiliateThread = affinity; return this; } public ArchUMCChannel bindAffiliateThread( Thread affinity ) { if( this.mAffiliateThread == null ) { return this.setThreadAffinity( affinity ); } return this; } public synchronized ArchUMCChannel bindThisThread() { return this.bindAffiliateThread( Thread.currentThread() ); } public ChannelFuture getLastChannelFuture() { return this.mLastChannelFuture; } @Override public SocketAddress getAddress(){ return this.mAddress; } @Override public SocketAddress remoteAddress() { return this.mChannel.remoteAddress(); } @Override public SocketAddress localAddress() { return this.mChannel.localAddress(); } @Override public Thread getAffiliateThread(){ return this.mAffiliateThread; } @Override public ChannelId getChannelID() { return this.mChannelID; } @Override public long getIdentityID() { return this.mIdentityID; } void setIdentityID( long identityID ) { this.mIdentityID = identityID; } @Override public Channel getNativeHandle(){ return this.mChannel; } @Override public UlfChannelStatus getChannelStatus() { return this.mChannelStatus; } @Override public void setChannelStatus( UlfChannelStatus status ) { this.mChannelStatus = status; } @Override public MessageNode getParentMessageNode() { return this.mParentMessageNode; } @Override public void release() { this.mAffiliateThread = null; this.mLastChannelFuture = null; // this.mChannel = null; // this.mChannelStatus = null; this.mParentMessageNode = null; } @Override public void close() { this.setChannelStatus( UlfChannelStatus.WAITING_FOR_SHUTDOWN ); this.getNativeHandle().close(); this.setChannelStatus( UlfChannelStatus.SHUTDOWN ); } @Override public boolean isShutdown() { return this.getChannelStatus().isTerminated() || !this.getNativeHandle().isActive(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/AsyncUlfMedium.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.hydra.umc.io.ChannelInputStream; import com.pinecone.hydra.umc.io.ChannelOutputStream; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.MessageNode; import io.netty.buffer.ByteBuf; import io.netty.channel.ChannelHandlerContext; import java.io.InputStream; import java.io.OutputStream; public class AsyncUlfMedium implements Medium { protected ChannelHandlerContext mContext; protected ByteBuf mInBuf; protected OutputStream mOutputStream ; protected InputStream mInputStream; protected MessageNode mMessageNode; public AsyncUlfMedium( ChannelHandlerContext context, MessageNode messageNode ) { this.mContext = context; this.mInBuf = null; this.mOutputStream = new ChannelOutputStream( this.mContext ); this.mInputStream = null; this.mMessageNode = messageNode; } public AsyncUlfMedium( ChannelHandlerContext context, ByteBuf byteBuf, MessageNode messageNode ) { this.mContext = context; this.mInBuf = byteBuf; this.mOutputStream = new ChannelOutputStream( this.mContext ); this.mInputStream = new ChannelInputStream( this.mInBuf ); this.mMessageNode = messageNode; } @Override public OutputStream getOutputStream(){ return this.mOutputStream; } @Override public InputStream getInputStream(){ return this.mInputStream; } @Override public Object getNativeMessageSource(){ return this.mContext.channel(); } @Override public String sourceName(){ return "WolfUMC"; } @Override public MessageNode getMessageNode() { return this.mMessageNode; } @Override public void release() { this.mContext = null; this.mInBuf = null; this.mOutputStream = null; this.mInputStream = null; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/ChannelUtils.java ================================================ package com.pinecone.hydra.umc.wolf; public final class ChannelUtils { public static void setChannelIdentityID( UlfChannel channel, long nIdentityID ) { channel.setIdentityID( nIdentityID ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/GenericUMCByteMessageDecoder.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.hydra.umc.msg.UMCHeadV1; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; import io.netty.buffer.ByteBuf; import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.ByteToMessageDecoder; import com.pinecone.hydra.umc.msg.ArchUMCProtocol; import com.pinecone.hydra.umc.msg.UMCHead; import java.util.List; public class GenericUMCByteMessageDecoder extends ByteToMessageDecoder { private ByteBuf cumulation; private ExtraHeadCoder extraHeadCoder; private long byteSum; private long bodyBytes; private int readAt; private int readBytes; // Each package public GenericUMCByteMessageDecoder( ExtraHeadCoder extraHeadCoder ) { this.extraHeadCoder = extraHeadCoder; this.byteSum = -1; this.bodyBytes = 0; this.readAt = 0; this.readBytes = 0; } private static int countOccurrences( byte[] bfs, byte[] target ) { int count = 0; for ( int i = 0; i <= bfs.length - target.length; ++i ) { boolean match = true; for ( int j = 0; j < target.length; ++j ) { if (bfs[i + j] != target[j]) { match = false; break; } } if (match) { count++; } } return count; } @Override protected void decode( ChannelHandlerContext ctx, ByteBuf in, List out ) throws Exception { // ByteBuf bufs = in.copy(); // byte[] bfs = new byte[ bufs.readableBytes() ]; // bufs.readBytes( bfs ); // int occurrences = countOccurrences(bfs, "UMC/1.1".getBytes()); // int kf = countOccurrences(bfs, "afd".getBytes()); // if ( kf > 0 ) { // IC += occurrences; // Debug.redfs(IC); // } while ( in.readableBytes() > 0 ) { boolean bContinueRead = false; if ( this.byteSum == -1 ) { // For debug reference. // if ( in.readableBytes() > 100 ) { // Debug.traceSyn( in ); // } int nBufSize = ArchUMCProtocol.basicHeadLength( UMCHeadV1.ProtocolSignature ); // Waiting for more data to arrive, and that will be enough to decode the header. if ( in.readableBytes() < nBufSize ) { return; } this.readBytes = 0; byte[] buf = new byte[ nBufSize ]; in.readBytes(buf); // For debug reference. // if ( buf[ 0 ] != 85 ) { // Debug.traceSyn( buf ); // } UMCHead head = ArchUMCProtocol.onlyReadMsgBasicHead( buf, UMCHeadV1.ProtocolSignature, this.extraHeadCoder ); this.bodyBytes = head.getBodyLength(); this.byteSum = nBufSize + head.getExtraHeadLength() + this.bodyBytes; this.readAt += nBufSize; this.readBytes += nBufSize; if ( this.byteSum < 0 ) { throw new IllegalArgumentException( "Invalid byteSum calculation: " + this.byteSum ); } bContinueRead = true; } if ( bContinueRead ) { int startAt = this.readAt - this.readBytes; in.readerIndex( startAt ); this.readAt -= this.readBytes; this.readBytes = 0; } if ( in.readableBytes() >= this.byteSum ) { this.readBytes = (int)this.byteSum; ByteBuf completeMessage = in.readRetainedSlice((int) this.readBytes); this.readAt += this.readBytes; // For debug reference. // byte[] bs = new byte[ (int) this.byteSum ]; // ByteBuf byteBuf = completeMessage.copy(); // byteBuf.readBytes(bs); // byteBuf.release(); // if ( bs[ 0 ] != 85 ) { // Debug.traceSyn( bs, bContinueRead ); // } // head = ArchUMCProtocol.onlyReadMsgBasicHead( bs, UMCHeadV1.ProtocolSignature, this.extraHeadCoder ); // Debug.warnSyn( bs, head ); try { //Debug.bluefs( invokes.getAndIncrement() ); ctx.fireChannelRead(completeMessage); } finally { completeMessage.release(); } this.byteSum = -1; this.bodyBytes = 0; this.readBytes = 0; } else { return; } } if ( this.byteSum == -1 ) { this.readAt = 0; } // Waiting for more data to arrive. // else { // return; // } } private void resetState() { this.byteSum = -1; this.bodyBytes = 0; this.readAt = 0; this.readBytes = 0; } @Override public void channelInactive( ChannelHandlerContext ctx ) throws Exception { super.channelInactive(ctx); if ( this.cumulation != null ) { this.cumulation.clear(); this.cumulation.release(); this.cumulation = null; } this.resetState(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/InternalErrors.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.Status; import com.pinecone.hydra.umc.msg.UMCMessage; import java.io.IOException; public final class InternalErrors { public static void sendInternalError( ChannelControlBlock channel, Status errorCode ) throws IOException { UMCMessage errorMsg = new UlfInformMessage( (Object) null ); errorMsg.getHead().setStatus( errorCode ); channel.sendMsg( errorMsg, true ); } public static void sendDefaultInternalError( ChannelControlBlock channel ) throws IOException { InternalErrors.sendInternalError( channel, Status.InternalError ); } public static void sendNotImplemented( ChannelControlBlock channel ) throws IOException { InternalErrors.sendInternalError( channel, Status.NotImplemented ); } public static void sendBadGateway( ChannelControlBlock channel ) throws IOException { InternalErrors.sendInternalError( channel, Status.BadGateway ); } public static void sendUnavailable( ChannelControlBlock channel ) throws IOException { InternalErrors.sendInternalError( channel, Status.Unavailable ); } public static void sendGatewayTimeout( ChannelControlBlock channel ) throws IOException { InternalErrors.sendInternalError( channel, Status.GatewayTimeout ); } public static void sendVersionNotSupported( ChannelControlBlock channel ) throws IOException { InternalErrors.sendInternalError( channel, Status.VersionNotSupported ); } public static void sendTooManyConnections( ChannelControlBlock channel ) throws IOException { InternalErrors.sendInternalError( channel, Status.TooManyConnections ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/MCConnectionArguments.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.hydra.umc.msg.MsgNodeConfig; public interface MCConnectionArguments extends MsgNodeConfig { String getHost(); void setHost( String host ); short getPort(); void setPort( short port ); int getKeepAliveTimeout(); void setKeepAliveTimeout( int keepAliveTimeout ); int getSocketTimeout(); void setSocketTimeout( int socketTimeout ); boolean isEnableHeartbeat() ; void setHeartbeatState( boolean enable ) ; long getHeartbeatInterval(); void setHeartbeatInterval( long heartbeatIntervalMills ); @Override default long getSyncWaitingMillis() { return this.getKeepAliveTimeout() * 1000L; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/MCSecurityAuthentication.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.framework.system.prototype.Pinenut; public interface MCSecurityAuthentication extends Pinenut { String getUsername(); void setUsername( String username ); String getDomain(); void setDomain( String domain ); String getPassword(); void setPassword( String password ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/MCSecurityToken.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.framework.system.prototype.Pinenut; public interface MCSecurityToken extends Pinenut { } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/NettyChannelControlBlock.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.hydra.umc.msg.ChannelControlBlock; public interface NettyChannelControlBlock extends ChannelControlBlock { @Override NettyUMCChannel getChannel(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/NettyUMCChannel.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.hydra.umc.msg.UMCChannel; import io.netty.channel.Channel; import io.netty.channel.ChannelId; public interface NettyUMCChannel extends UMCChannel { @Override Channel getNativeHandle(); @Override ChannelId getChannelID() ; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/SharedConnectionArguments.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.umc.wolf.client.ArchAsyncMessenger; public abstract class SharedConnectionArguments implements MCConnectionArguments { protected String mszHost; protected short mnPort; protected int mnKeepAliveTimeout; protected int mnSocketTimeout; protected boolean mbEnableHeartbeat; protected long mnHeartbeatInterval; public SharedConnectionArguments( JSONObject args ) { this.mszHost = args.optString( "host", null ); this.mnPort = (short) args.optInt( "port", -1 ); this.mnKeepAliveTimeout = args.optInt( "KeepAliveTimeout" ); this.mnSocketTimeout = args.optInt( "SocketTimeout", 800 ); this.mbEnableHeartbeat = args.optBoolean( "EnableHeartbeat", false ); this.mnHeartbeatInterval = args.optLong( "HeartbeatInterval", 10000 ); // 10s } public SharedConnectionArguments( ArchAsyncMessenger args ) { this( args.getSectionConf() ); } @Override public String getHost() { return this.mszHost; } @Override public void setHost( String host ) { this.mszHost = host; } @Override public short getPort() { return this.mnPort; } @Override public void setPort( short port ) { this.mnPort = port; } @Override public int getKeepAliveTimeout() { return this.mnKeepAliveTimeout; } @Override public void setKeepAliveTimeout( int keepAliveTimeout ) { this.mnKeepAliveTimeout = keepAliveTimeout; } @Override public int getSocketTimeout() { return this.mnSocketTimeout; } @Override public void setSocketTimeout( int socketTimeout ) { this.mnSocketTimeout = socketTimeout; } @Override public boolean isEnableHeartbeat() { return this.mbEnableHeartbeat; } @Override public void setHeartbeatState( boolean enable ) { this.mbEnableHeartbeat = enable; } @Override public void setHeartbeatInterval( long heartbeatIntervalMills ) { this.mnHeartbeatInterval = heartbeatIntervalMills; } @Override public long getHeartbeatInterval() { return this.mnHeartbeatInterval; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/StandardRemoteUserAuthentication.java ================================================ package com.pinecone.hydra.umc.wolf; public class StandardRemoteUserAuthentication implements MCSecurityAuthentication { protected String mszUsername; protected String mszDomain; protected String mszPassword; public StandardRemoteUserAuthentication( String szUsername, String szDomain, String szPassword ) { this.mszUsername = szUsername; this.mszDomain = szDomain; this.mszPassword = szPassword; } public StandardRemoteUserAuthentication( String szUsername, String szPassword ) { this( szUsername, "", szPassword ); } @Override public String getUsername() { return this.mszUsername; } @Override public void setUsername( String username ) { this.mszUsername = username; } @Override public String getDomain() { return this.mszDomain; } @Override public void setDomain( String domain ) { this.mszDomain = domain; } @Override public String getPassword() { return this.mszPassword; } @Override public void setPassword( String password ) { this.mszPassword = password; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfAsyncMsgHandleAdapter.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.hydra.umc.msg.AsyncMsgHandleAdapter; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCMessage; import io.netty.channel.ChannelHandlerContext; import com.pinecone.hydra.umc.msg.UMCReceiver; import com.pinecone.hydra.umc.msg.UMCTransmit; import com.pinecone.hydra.umct.UMCTExpressHandler; public interface UlfAsyncMsgHandleAdapter extends AsyncMsgHandleAdapter { default void onSuccessfulMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception { this.onSuccessfulMsgReceived( medium, block.getTransmit(), block.getReceiver(), msg, new Object[]{ block, rawMsg } ); } default void onErrorMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception { this.onErrorMsgReceived( medium, block.getTransmit(), block.getReceiver(), msg, new Object[]{ block, rawMsg } ); } default void onError( ChannelHandlerContext ctx, Throwable cause ) { this.onError( (Object) ctx, cause ); } static UlfAsyncMsgHandleAdapter wrap( UMCTExpressHandler handler ) { return new UlfAsyncMsgHandleAdapter() { @Override public void onSuccessfulMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception { handler.onSuccessfulMsgReceived( medium, block.getTransmit(), block.getReceiver(), msg, new Object[]{ block, rawMsg } ); } @Override public void onSuccessfulMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception { handler.onSuccessfulMsgReceived( medium, transmit, receiver, msg, args ); } @Override public void onErrorMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception { handler.onErrorMsgReceived( medium, block.getTransmit(), block.getReceiver(), msg, new Object[]{ block, rawMsg } ); } @Override public void onErrorMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception { handler.onErrorMsgReceived( medium, transmit, receiver, msg, args ); } @Override public void onError( ChannelHandlerContext ctx, Throwable cause ) { handler.onError( (Object) ctx, cause ); } @Override public void onError( Object data, Throwable cause ) { handler.onError( data, cause ); } }; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfBytesTransferMessage.java ================================================ package com.pinecone.hydra.umc.wolf; import java.util.Map; import com.pinecone.hydra.umc.msg.ArchBytesTransferMessage; import com.pinecone.hydra.umc.msg.ExtraEncode; import com.pinecone.hydra.umc.msg.UMCHead; public class UlfBytesTransferMessage extends ArchBytesTransferMessage { public UlfBytesTransferMessage( UMCHead head ) { super( head ); } public UlfBytesTransferMessage( UMCHead head, byte[] sBytesBody ) { super( head, sBytesBody ); } public UlfBytesTransferMessage( UMCHead head, String szStringBody ) { this( head, szStringBody.getBytes() ); } public UlfBytesTransferMessage( Map joExHead, byte[] sBytesBody, int controlBits ) { super( joExHead, sBytesBody, controlBits ); } public UlfBytesTransferMessage( Map joExHead, String szStringBody, int controlBits ) { this( joExHead, szStringBody.getBytes(), controlBits ); } public UlfBytesTransferMessage( Map joExHead, byte[] sBytesBody ) { this( joExHead, sBytesBody, 0 ); } public UlfBytesTransferMessage( Map joExHead, String szStringBody ) { this( joExHead, szStringBody.getBytes(), 0 ); } public UlfBytesTransferMessage( Object exHead, ExtraEncode encode, byte[] sBytesBody, int controlBits ) { super( exHead, encode, sBytesBody, controlBits ); } public UlfBytesTransferMessage( Object exHead, ExtraEncode encode, String szStringBody, int controlBits ) { this( exHead, encode, szStringBody.getBytes(), controlBits ); } public UlfBytesTransferMessage( Object exHead, byte[] sBytesBody ) { this( exHead, ExtraEncode.Prototype, sBytesBody, 0 ); } public UlfBytesTransferMessage( Object exHead, String szStringBody ) { this( exHead, ExtraEncode.Prototype, szStringBody, 0 ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfChannel.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.framework.system.Nullable; import com.pinecone.hydra.system.component.Slf4jTraceable; import com.pinecone.hydra.umc.msg.MessageNode; import com.pinecone.hydra.umc.wolf.client.WolfMCClient; import io.netty.bootstrap.Bootstrap; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFutureListener; import io.netty.channel.EventLoopGroup; import io.netty.util.AttributeKey; import java.io.IOException; import java.net.SocketAddress; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; public class UlfChannel extends ArchUMCChannel { protected EventLoopGroup mExecutorGroup ; protected Bootstrap mBootstrap ; public UlfChannel( MessageNode node ) { super( node ); if( node instanceof WolfMCClient) { WolfMCClient messenger = (WolfMCClient) node; this.mExecutorGroup = messenger.getEventLoopGroup(); this.mBootstrap = messenger.getBootstrap(); } } // Auto set address while connection. public UlfChannel( MessageNode node, Channel nativeChannel, @Nullable SocketAddress address ) { super( node, nativeChannel, address ); } public UlfChannel( MessageNode node, Channel nativeChannel ) { this( node, nativeChannel, null ); } public EventLoopGroup getExecutorGroup() { return this.mExecutorGroup; } public Bootstrap getBootstrap() { return this.mBootstrap; } @Override public void reconnect( long mils ) throws IOException { if ( this.isShutdown() ) { ChannelFuture future = this.toConnect( this.getAddress() ).getLastChannelFuture(); CompletableFuture completableFuture = new CompletableFuture<>(); future.addListener(new ChannelFutureListener() { @Override public void operationComplete( ChannelFuture channelFuture ) throws Exception { try { completableFuture.complete( null ); } catch (Exception e) { completableFuture.completeExceptionally( e ); } } }); try { if ( mils != -1 ) { future.get( mils, TimeUnit.MILLISECONDS ); } else { future.get(); } } catch ( InterruptedException e ) { Thread.currentThread().interrupt(); throw new IOException( e ); } catch ( TimeoutException | ExecutionException e ) { throw new IOException( e.getCause() ); } try{ ( (Slf4jTraceable) this.getParentMessageNode() ).getLogger().info( "[ChannelReconnect] ", this.getNativeHandle().id(), this.getAddress() ); } catch ( ClassCastException ignore ) { // Ignore them. } } } @Override public void reconnect() throws IOException { this.reconnect( -1 ); } public static void copyChannelAttr( Channel leg, Channel neo, String key ) { Object val = leg.attr( AttributeKey.valueOf( key ) ).get(); if ( val != null ) { neo.attr( AttributeKey.valueOf( key ) ).set( val ); } } public ArchUMCChannel toConnect( SocketAddress address ) { this.mAddress = address; this.mLastChannelFuture = this.getBootstrap().connect( address ); Channel channel = this.getLastChannelFuture().channel(); if ( this.mChannel != null ) { // Reconnect Object ccb = this.mChannel.attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_CONTROL_BLOCK_KEY ) ).get(); channel.attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_CONTROL_BLOCK_KEY ) ).set( ccb ); WolfMCStandardConstants.copyChannelStandardAttrs( this.mChannel, channel ); } this.mChannel = channel; this.mChannelID = this.mChannel.id(); return this; } @Override public void release() { super.release(); this.mExecutorGroup = null; this.mBootstrap = null; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfChannelStatus.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.hydra.umc.msg.ChannelStatus; public enum UlfChannelStatus implements ChannelStatus { IDLE ( 0x00, "Idle" ), WAITING_FOR_SEND ( 0x01, "WaitingSend" ), WAITING_FOR_RECEIVE ( 0x02, "WaitingReceive" ), WAITING_FOR_RECALL_FUN ( 0x03, "WaitingRecallFun" ), WAITING_THREAD_RESUME ( 0x04, "WaitingThreadResume" ), FORCE_SYNCHRONIZED ( 0x05, "ForceSynchronized" ), WAITING_FOR_SHUTDOWN ( 0x06, "WaitingShutdown" ), SHUTDOWN ( 0x07, "Shutdown" ), WAITING_PASSIVE_SEND ( 0xA1, "WaitingPassiveSend" ), WAITING_PASSIVE_RECEIVE ( 0xA2, "WaitingPassiveReceive" ), ; public static final int PassiveStatusMask = 0xA0; private final int value; private final String name; UlfChannelStatus( int value, String name ){ this.value = value; this.name = name; } @Override public String getName(){ return this.name; } @Override public int getValue() { return this.value; } @Override public byte getByteValue() { return (byte) this.value; } @Override public boolean isIdle() { return this == UlfChannelStatus.IDLE; } @Override public boolean isTerminated() { return this == UlfChannelStatus.WAITING_FOR_SHUTDOWN; } @Override public boolean isWaitingForIOCompleted(){ return this.value >= UlfChannelStatus.WAITING_FOR_SEND.value && this.value <= UlfChannelStatus.WAITING_FOR_RECEIVE.value; } @Override public boolean isWaitingForLocalCompleted(){ return this.value >= UlfChannelStatus.WAITING_FOR_RECALL_FUN.value && this.value <= UlfChannelStatus.WAITING_THREAD_RESUME.value; } @Override public boolean isAsynAvailable() { return !this.isTerminated() && this != UlfChannelStatus.FORCE_SYNCHRONIZED && this != UlfChannelStatus.WAITING_FOR_SEND && ( (this.value & PassiveStatusMask) != PassiveStatusMask ); } @Override public boolean isSyncAvailable() { return !this.isTerminated() || this.isIdle(); } @Override public String toString() { return this.getName(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfIOLoadBalanceStrategy.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.hydra.umc.io.IOLoadBalanceStrategy; import com.pinecone.hydra.umc.msg.ChannelControlBlock; public interface UlfIOLoadBalanceStrategy extends IOLoadBalanceStrategy { boolean match( ChannelControlBlock ccb ); UlfIOLoadBalanceStrategy clone(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfIdleFirstBalanceStrategy.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.IdleFirstBalanceStrategy; public class UlfIdleFirstBalanceStrategy extends IdleFirstBalanceStrategy implements UlfIOLoadBalanceStrategy { public UlfIdleFirstBalanceStrategy() { super(); } @Override public boolean match( ChannelControlBlock ccb ) { return ccb.getChannelStatus().isIdle(); } @Override public boolean matched( Object condition ) { return this.match( (ChannelControlBlock) condition ); } @Override public UlfIdleFirstBalanceStrategy clone() { return (UlfIdleFirstBalanceStrategy)super.clone(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfInformMessage.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.hydra.umc.msg.ArchInformMessage; import com.pinecone.hydra.umc.msg.ExtraEncode; import com.pinecone.hydra.umc.msg.UMCHead; import java.util.Map; public class UlfInformMessage extends ArchInformMessage { public UlfInformMessage( UMCHead head ) { super(head); } public UlfInformMessage( Map joExHead, int controlBits ) { super( joExHead, controlBits ); } public UlfInformMessage( Object protoExHead , int controlBits ) { super( protoExHead, controlBits ); } public UlfInformMessage( Map joExHead ) { super( joExHead ); } public UlfInformMessage( Object protoExHead, ExtraEncode encode ) { super( protoExHead, encode ); } public UlfInformMessage( Object protoExHead ) { super( protoExHead ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfInstructMessage.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.hydra.umc.msg.ArchUMCMessage; import com.pinecone.hydra.umc.msg.ExtraEncode; import com.pinecone.hydra.umc.msg.InformMessage; import com.pinecone.hydra.umc.msg.UMCMethod; public class UlfInstructMessage extends ArchUMCMessage implements InformMessage { public UlfInstructMessage( UMCMethod method, int controlBits ) { super( (Object) null, ExtraEncode.Iussum, method, controlBits ); } public UlfInstructMessage( int controlBits ) { this( UMCMethod.UNDEFINED, controlBits ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfMCReceiver.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.hydra.umc.msg.ArchUMCReceiver; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.TransferMessage; import com.pinecone.hydra.umc.msg.UMCHead; import com.pinecone.hydra.umc.msg.UMCMessage; import java.io.IOException; public class UlfMCReceiver extends ArchUMCReceiver { public UlfMCReceiver( Medium messageSource ) { super( messageSource ); } public UMCMessage readTransferMsg( boolean bAllBytes ) throws IOException { UMCHead head = this.readTransferHead(); TransferMessage message; if( bAllBytes ) { message = new UlfBytesTransferMessage( head ); } else { message = new UlfStreamTransferMessage( head ); } this.onlyReadTransferBody( message, bAllBytes ); return message; } @Override public UMCMessage readTransferMsg() throws IOException { return this.readTransferMsg( false ); } @Override public UMCMessage readTransferMsgBytes() throws IOException { return this.readTransferMsg( true ); } @Override public UMCMessage readMsg() throws IOException { return this.readMsg( false, UlfMessageStereotypes.Default ); } @Override public UMCMessage readMsgBytes() throws IOException { return this.readMsg( true, UlfMessageStereotypes.Default ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfMCTransmit.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.hydra.umc.msg.ArchUMCTransmit; import com.pinecone.hydra.umc.msg.Medium; public class UlfMCTransmit extends ArchUMCTransmit { public UlfMCTransmit( Medium messageSource ) { super( messageSource ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfMessageNode.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.hydra.umc.msg.ChannelPool; import com.pinecone.hydra.umc.msg.CascadeMessageNode; import com.pinecone.hydra.umc.msg.event.ChannelDataInterceptor; import com.pinecone.hydra.umc.msg.event.ChannelInactiveHandler; public interface UlfMessageNode extends CascadeMessageNode { ChannelPool getChannelPool(); void close(); UlfMessageNode registerChannelInactiveHandler( ChannelInactiveHandler handler ) throws IllegalStateException; UlfMessageNode deregisterChannelInactiveHandler( ChannelInactiveHandler handler ) throws IllegalStateException; UlfMessageNode registerArrivedDataInterceptor( ChannelDataInterceptor handler ) throws IllegalStateException; UlfMessageNode deregisterArrivedDataInterceptor( ChannelDataInterceptor handler ) throws IllegalStateException; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfMessageStereotypes.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.hydra.umc.msg.MessageStereotypes; public class UlfMessageStereotypes implements MessageStereotypes { public static final MessageStereotypes Default = new UlfMessageStereotypes(); protected Class putType = UlfInformMessage.class; protected Class postBytesType = UlfBytesTransferMessage.class; protected Class postStreamType = UlfStreamTransferMessage.class; @Override public Class putType() { return this.putType; } @Override public Class postBytesType() { return this.postBytesType; } @Override public Class postStreamType() { return this.postStreamType; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfStreamTransferMessage.java ================================================ package com.pinecone.hydra.umc.wolf; import java.io.InputStream; import java.util.Map; import com.pinecone.hydra.umc.msg.ArchStreamTransferMessage; import com.pinecone.hydra.umc.msg.ExtraEncode; import com.pinecone.hydra.umc.msg.UMCHead; public class UlfStreamTransferMessage extends ArchStreamTransferMessage { public UlfStreamTransferMessage( UMCHead head ) { super( head ); } public UlfStreamTransferMessage( UMCHead head, InputStream inStream ) { super( head, inStream ); } public UlfStreamTransferMessage( Map joExHead, InputStream inStream, int controlBits ) { super( joExHead, inStream, controlBits ); } public UlfStreamTransferMessage( Map joExHead, InputStream inStream ) { super( joExHead, inStream, 0 ); } public UlfStreamTransferMessage(Object exHead, ExtraEncode encode, InputStream inStream, int controlBits ) { super( exHead, encode, inStream, controlBits ); } public UlfStreamTransferMessage( Object exHead, InputStream inStream ) { this( exHead, ExtraEncode.Prototype, inStream, 0 ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UnsetUlfAsyncMsgHandleAdapter.java ================================================ package com.pinecone.hydra.umc.wolf; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pinecone.framework.system.ProvokeHandleException; import com.pinecone.hydra.system.component.Slf4jTraceable; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.MessageNode; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.UMCReceiver; import com.pinecone.hydra.umc.msg.UMCTransmit; import io.netty.channel.ChannelHandlerContext; /** * UnsetUlfAsyncMsgHandleAdapter * Dummy UlfAsyncMsgHandleAdapter */ public final class UnsetUlfAsyncMsgHandleAdapter implements UlfAsyncMsgHandleAdapter { private MessageNode mMessageNode; private Logger mLogger; public UnsetUlfAsyncMsgHandleAdapter( MessageNode node ) { this.mMessageNode = node; if ( this.mMessageNode instanceof Slf4jTraceable ) { this.mLogger = ((Slf4jTraceable) this.mMessageNode).getLogger(); } else { this.mLogger = LoggerFactory.getLogger( this.getClass() ); } } @Override public void onSuccessfulMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) { this.mLogger.warn( "Warning, MsgHandleAdapter is unset. Info => {}, {}", block.getChannel().getChannelID(), msg ); } @Override public void onSuccessfulMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception { this.mLogger.warn( "Warning, MsgHandleAdapter is unset. Info => {}", msg ); } @Override public void onErrorMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception { this.mLogger.warn( "Warning, MsgHandleAdapter is unset. Info => {}", msg ); } @Override public void onErrorMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) { this.mLogger.warn( "Warning, MsgHandleAdapter is unset. Info => {}", msg ); } @Override public void onError( ChannelHandlerContext ctx, Throwable cause ) { this.onError( (Object) ctx, cause ); } @Override public void onError( Object data, Throwable cause ) { this.mLogger.error( "UnsetMsgHandleAdapter. Error => {}, {}", cause.getMessage(), cause.toString() ); if( !( cause instanceof Exception ) ) { throw new ProvokeHandleException( cause ); } } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/WolfMCInitializationException.java ================================================ package com.pinecone.hydra.umc.wolf; public class WolfMCInitializationException extends WolfMCServiceException { public WolfMCInitializationException() { super(); } public WolfMCInitializationException( String message ) { super(message); } public WolfMCInitializationException( String message, Throwable cause ) { super(message, cause); } public WolfMCInitializationException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/WolfMCNode.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.system.regimentation.CascadeNodus; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.ChannelHandleException; import com.pinecone.hydra.umc.msg.ExtraEncode; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.event.ChannelDataInterceptor; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; import com.pinecone.hydra.umc.msg.extra.GenericExtraHeadCoder; import com.pinecone.hydra.umc.msg.handler.ErrorMessageAudit; import com.pinecone.hydra.umc.msg.handler.GenericErrorMessageAudit; import com.pinecone.hydra.umc.msg.event.ChannelInactiveHandler; import com.pinecone.hydra.umct.UMCTExpressHandler; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.locks.ReentrantLock; import io.netty.channel.ChannelHandlerContext; public abstract class WolfMCNode extends WolfNettyServgram implements UlfMessageNode { protected ExtraHeadCoder mExtraHeadCoder ; protected final ReentrantLock mMajorIOLock = new ReentrantLock(); protected ErrorMessageAudit mErrorMessageAudit ; protected UlfMessageNode mParentNode ; protected Namespace mNodeNamespace ; protected long mnMessageNodeId ; protected List mChannelInactiveHandlers ; protected List mArrivedDataInterceptors ; public WolfMCNode( long nodeId, String szName, Processum parentProcess, UlfMessageNode parent, Map joConf, @Nullable ExtraHeadCoder extraHeadCoder ) { super( szName, parentProcess, joConf ); this.mExtraHeadCoder = extraHeadCoder; this.mErrorMessageAudit = new GenericErrorMessageAudit( this ); this.mParentNode = parent; this.mnMessageNodeId = nodeId; this.mChannelInactiveHandlers = new ArrayList<>(); this.mArrivedDataInterceptors = new ArrayList<>(); this.setTargetingName( szName ); } public WolfMCNode(long nodeId, String szName, Hydrogen system, Map joConf, @Nullable ExtraHeadCoder extraHeadCoder ) { this( nodeId, szName, system, null, joConf, extraHeadCoder ); } protected void checkDeregisterHandlerStatus() throws IllegalStateException { if ( !this.isShutdown() ) { throw new IllegalStateException( "Service is already running." ); } } @Override public UlfMessageNode registerChannelInactiveHandler( ChannelInactiveHandler handler ) throws IllegalStateException { this.checkDeregisterHandlerStatus(); this.mChannelInactiveHandlers.add( handler ); return this; } @Override public UlfMessageNode deregisterChannelInactiveHandler( ChannelInactiveHandler handler ) throws IllegalStateException { this.checkDeregisterHandlerStatus(); this.mChannelInactiveHandlers.remove( handler ); return this; } @Override public UlfMessageNode registerArrivedDataInterceptor( ChannelDataInterceptor handler ) throws IllegalStateException { this.checkDeregisterHandlerStatus(); this.mArrivedDataInterceptors.add( handler ); return this; } @Override public UlfMessageNode deregisterArrivedDataInterceptor( ChannelDataInterceptor handler ) throws IllegalStateException { this.checkDeregisterHandlerStatus(); this.mArrivedDataInterceptors.remove( handler ); return this; } protected boolean tryInvokeOrInterceptArrivedData( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws ChannelHandleException { for( ChannelDataInterceptor h : this.mArrivedDataInterceptors ) { if ( h.interceptAfterDataArrived( medium, block, msg, ctx, rawMsg ) ){ return true; } } return false; } @Override public CascadeNodus parent() { return this.mParentNode; } @Override public Namespace getTargetingName() { return this.mNodeNamespace; } @Override public void setTargetingName( Namespace name ) { this.mNodeNamespace = name; } @Override public ExtraHeadCoder getExtraHeadCoder() { return this.mExtraHeadCoder; } @Override public long getMessageNodeId() { return this.mnMessageNodeId; } public ReentrantLock getMajorIOLock() { return this.mMajorIOLock; } public WolfMCNode apply( Map joConf ) { this.setConfig( joConf ); try{ if( this.mExtraHeadCoder == null ) { String szExtraHeadCoder = (String) joConf.get( "ExtraHeadCoder" ); if( StringUtils.isEmpty( szExtraHeadCoder ) ) { this.mExtraHeadCoder = new GenericExtraHeadCoder() ; } else { this.mExtraHeadCoder = (ExtraHeadCoder) DynamicFactory.DefaultFactory.loadInstance( szExtraHeadCoder, null, null ); } String szDefaultExtraEncode = (String) joConf.get( "DefaultExtraEncode" ); if( StringUtils.isEmpty( szDefaultExtraEncode ) ) { this.mExtraHeadCoder.setDefaultEncode( ExtraEncode.JSONString ); } else { this.mExtraHeadCoder.setDefaultEncode( ExtraEncode.valueOf( szDefaultExtraEncode ) ); } } } catch ( ClassNotFoundException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) { throw new ProxyProvokeHandleException( e ); } return this; } public abstract WolfMCNode apply( UlfAsyncMsgHandleAdapter fnRecipientMsgHandler ); public WolfMCNode apply( UMCTExpressHandler handler ){ this.apply( UlfAsyncMsgHandleAdapter.wrap( handler ) ); return this; } @Override public ErrorMessageAudit getErrorMessageAudit() { return this.mErrorMessageAudit; } @Override public void setErrorMessageAudit( ErrorMessageAudit audit ) { this.mErrorMessageAudit = audit; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/WolfMCServiceException.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.hydra.umc.msg.UMCServiceException; public class WolfMCServiceException extends UMCServiceException { public WolfMCServiceException() { super(); } public WolfMCServiceException( String message ) { super(message); } public WolfMCServiceException( String message, Throwable cause ) { super(message, cause); } public WolfMCServiceException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/WolfMCStandardConstants.java ================================================ package com.pinecone.hydra.umc.wolf; import io.netty.channel.Channel; public abstract class WolfMCStandardConstants { public static final String CB_CONTROL_BLOCK_KEY = "ControlBlock"; public static final String CB_ASYNC_MSG_HANDLE_KEY = "AsyncMsgHandle"; public static final String CB_ASY_EXCLUSIVE_HANDLE_KEY = "AsyncExclusiveHandle"; public static final String CB_EXTERNAL_CHANNEL_KEY = "ExternalChannel"; public static void copyChannelStandardAttrs( Channel leg, Channel neo ) { UlfChannel.copyChannelAttr( leg, neo, WolfMCStandardConstants.CB_ASYNC_MSG_HANDLE_KEY ); UlfChannel.copyChannelAttr( leg, neo, WolfMCStandardConstants.CB_ASY_EXCLUSIVE_HANDLE_KEY ); UlfChannel.copyChannelAttr( leg, neo, WolfMCStandardConstants.CB_EXTERNAL_CHANNEL_KEY ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/WolfNettyServgram.java ================================================ package com.pinecone.hydra.umc.wolf; import com.pinecone.framework.system.IrrationalProvokedException; import com.pinecone.framework.system.ProvokeHandleException; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.hydra.servgram.ArchServgramium; import com.pinecone.framework.system.RedirectRuntimeException; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.umc.msg.UMCException; import com.pinecone.hydra.umc.msg.UMCServiceException; import java.io.IOException; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReentrantLock; public abstract class WolfNettyServgram extends ArchServgramium { protected JSONObject mjoSectionConf; protected final Object mPrimaryThreadJoinMutex = new Object(); // Joining the primary thread, waiting for client-sub-system terminated. protected final Object mOuterThreadDetachMutex = new Object(); // Waiting for primary thread initialized. [Outer refers invoked thead, e.g. Usually main-thread] protected ReentrantLock mStateMutex = new ReentrantLock(); public WolfNettyServgram( String szName, Processum parentProcess, Map joConf ) { super( szName, parentProcess ); this.setConfig( joConf ); } public JSONObject getSectionConf() { return this.mjoSectionConf; } @Override public Hydrogen parentSystem() { return (Hydrogen) super.parentSystem(); } public abstract boolean isShutdown() ; @Override public abstract boolean isTerminated() ; protected void setConfig( Map joConf ) { if( joConf instanceof JSONObject ) { this.mjoSectionConf = (JSONObject) joConf; } else { this.mjoSectionConf = new JSONMaptron( joConf, true ); } } protected void unlockOuterThreadDetachMutex() { synchronized ( this.mOuterThreadDetachMutex ) { this.mOuterThreadDetachMutex.notify(); } } protected void preparePrimaryThread( Thread primaryThread ) { primaryThread.setName( ( this.className() + "-primary-" + primaryThread.getName() ).toLowerCase() ); this.setThreadAffinity( primaryThread ); } protected void joinOuterThread() { synchronized ( this.mOuterThreadDetachMutex ) { try { this.mOuterThreadDetachMutex.wait();// Waiting for primary thread initialized. // This mutex will not locks the parent thread, if you wish to lock it, adding more locks. // If primary thread successfully executed, do nothing, and goto back to parent thread. // If primary exception thrown, redirected it to parent thread. } catch ( InterruptedException e ) { Thread.currentThread().interrupt(); throw new ProvokeHandleException( e ); } } } protected void redirectException2ParentThread( Exception previousException ) throws IOException, UMCServiceException { if( previousException instanceof RuntimeException ) { throw new RedirectRuntimeException( previousException ); } else if( previousException instanceof IOException ) { throw (IOException) previousException; } else if( previousException instanceof UMCServiceException ) { throw (UMCServiceException) previousException; } else if( previousException instanceof UMCException ) { throw new UMCServiceException( previousException ); } else if( previousException != null ){ throw new IrrationalProvokedException( previousException ); // This should never be happened. } } @Override public String toString() { return String.format( "[object %s(0x%s)<\uD83D\uDC3A>]", this.className() , Integer.toHexString( this.hashCode() ) ); } @Override public String toJSONString() { return "\"" + this.toString() + "\""; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/client/ArchAsyncMessenger.java ================================================ package com.pinecone.hydra.umc.wolf.client; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelId; import io.netty.channel.EventLoop; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.umc.msg.AsyncMessenger; import com.pinecone.hydra.umc.msg.ChannelAllocateException; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.MediumTerminationException; import com.pinecone.hydra.umc.msg.Messenger; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter; import com.pinecone.hydra.umc.wolf.UlfIdleFirstBalanceStrategy; import com.pinecone.hydra.umc.wolf.UlfMessageNode; import com.pinecone.hydra.umc.wolf.WolfMCNode; import java.io.IOException; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; public abstract class ArchAsyncMessenger extends WolfMCNode implements AsyncMessenger, UlfMessageNode { protected final ReentrantLock mSynRequestLock = new ReentrantLock(); protected ProactiveParallelFairSyncChannelPool mChannelPool ; //protected BlockingDeque mSyncRetMsgQueue = new LinkedBlockingDeque<>(); public ArchAsyncMessenger( long nodeId, String szName, Processum parentProcess, UlfMessageNode parent, Map joConf, ExtraHeadCoder extraHeadCoder ) { super( nodeId, szName, parentProcess, parent, joConf, extraHeadCoder ); this.mChannelPool = new ProactiveParallelFairSyncChannelPool<>( this.mSynRequestLock, new UlfIdleFirstBalanceStrategy() ); //TODO //this.makeNameAndId(); } public ArchAsyncMessenger(long nodeId, String szName, Hydrogen system, Map joConf, ExtraHeadCoder extraHeadCoder ) { this( nodeId, szName, system, null, joConf, extraHeadCoder ); } @Override public ProactiveParallelFairSyncChannelPool getChannelPool() { return this.mChannelPool; } Lock getSynRequestLock() { return this.mSynRequestLock; } protected long getSyncWaitingMillis() { return ArchAsyncMessenger.getSyncWaitingMillis( this ); } UlfAsyncMessengerChannelControlBlock nextSynChannelCB() throws IOException { UlfAsyncMessengerChannelControlBlock block = (UlfAsyncMessengerChannelControlBlock) this.getChannelPool().nextSyncChannel( this.getChannelPool().getMajorWaitTimeout() * 2 ); if( block == null ) { throw new ChannelAllocateException( "Channel allocate failed." ); } reconnect( block, this.getSyncWaitingMillis() ); return block; } UlfAsyncMessengerChannelControlBlock nextAsyChannelCB() throws IOException { UlfAsyncMessengerChannelControlBlock block = (UlfAsyncMessengerChannelControlBlock) this.getChannelPool().nextAsynChannel( this.getChannelPool().getMajorWaitTimeout() * 2 ); if( block == null ) { throw new ChannelAllocateException( "Channel allocate failed." ); } reconnect( block, this.getSyncWaitingMillis() ); return block; } // BlockingDeque getSyncRetMsgQueue() { // return this.mSyncRetMsgQueue; // } @Override public UMCMessage sendSyncMsg( UMCMessage request, boolean bNoneBuffered, long nWaitTime ) throws IOException { return this.nextSynChannelCB().sendSyncMsg( request, bNoneBuffered, nWaitTime ); } @Override public void sendAsynMsg( UMCMessage request, boolean bNoneBuffered ) throws IOException { this.nextAsyChannelCB().sendAsynMsg( request, bNoneBuffered ); } @Override public void sendAsynMsg( UMCMessage request, boolean bNoneBuffered, UlfAsyncMsgHandleAdapter handler ) throws IOException { UlfAsyncMessengerChannelControlBlock cb = this.nextAsyChannelCB(); if ( handler != null ) { // If the handler is null, do not set it; otherwise, it will disrupt the subsequent handler-setting pipeline. // Additionally, if there is no-response request, it will not affect the later pipeline. // 如果 handler 为 null 不要设置, 否则破坏后面的设置流水线,且无响应的请求不会影响后面的流水线. cb.pushMsgHandle( handler ); //cb.getChannel().getNativeHandle().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_ASYNC_MSG_HANDLE_KEY ) ).set( handler ); } cb.sendAsynMsg( request, bNoneBuffered ); } protected static void reconnect( ChannelControlBlock block, long mils ) throws IOException { if( block.isShutdown() ) { block.getChannel().reconnect( mils ); ( (UlfMessageNode)block.getParentMessageNode() ).getChannelPool().setIdleChannel( block ); } } protected static long getSyncWaitingMillis( Messenger messenger ) { return messenger.getConnectionArguments().getSyncWaitingMillis(); } public static void reconnect( ChannelControlBlock block, Messenger messenger ) throws IOException { long mils = ArchAsyncMessenger.getSyncWaitingMillis( messenger ); ArchAsyncMessenger.reconnect( block, mils ); } public static void reconnect( ChannelControlBlock block, Messenger messenger, Object context ) throws IOException, MediumTerminationException { if ( context instanceof ChannelHandlerContext ) { ChannelHandlerContext ctx = (ChannelHandlerContext) context; EventLoop loop = ctx.channel().eventLoop(); if ( !loop.isShuttingDown() ) { reconnect( block, messenger ); } else { throw new MediumTerminationException( "Medium has already terminated." ); } } else { reconnect( block, messenger ); } } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/client/ClientConnectArguments.java ================================================ package com.pinecone.hydra.umc.wolf.client; import com.pinecone.hydra.umc.wolf.MCConnectionArguments; public interface ClientConnectArguments extends MCConnectionArguments { int getParallelChannels(); void setParallelChannels( int parallelChannels ); boolean isAutoReconnect(); void setAutoReconnect( boolean autoReconnect ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/client/ClientConnectionArguments.java ================================================ package com.pinecone.hydra.umc.wolf.client; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.umc.wolf.SharedConnectionArguments; public class ClientConnectionArguments extends SharedConnectionArguments implements ClientConnectArguments { protected int mnParallelChannels; protected boolean mbAutoReconnect; public ClientConnectionArguments( JSONObject args ) { super( args ); this.mnParallelChannels = args.optInt( "ParallelChannels", 1 ); this.mbAutoReconnect = args.optBoolean( "AutoReconnect", false ); } public ClientConnectionArguments( ArchAsyncMessenger args ) { this( args.getSectionConf() ); } @Override public int getParallelChannels() { return this.mnParallelChannels; } @Override public void setParallelChannels( int parallelChannels ) { this.mnParallelChannels = parallelChannels; } @Override public boolean isAutoReconnect() { return this.mbAutoReconnect; } @Override public void setAutoReconnect( boolean autoReconnect ) { this.mbAutoReconnect = autoReconnect; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/client/MessengerNettyChannelControlBlock.java ================================================ package com.pinecone.hydra.umc.wolf.client; import com.pinecone.framework.system.ProvokeHandleException; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.wolf.*; import io.netty.channel.Channel; import java.io.IOException; import java.util.concurrent.BlockingDeque; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; public class MessengerNettyChannelControlBlock extends ArchChannelControlBlock implements UlfAsyncMessengerChannelControlBlock { protected ArchAsyncMessenger mParentMessenger; protected BlockingDeque mSyncRetMsgQueue = new LinkedBlockingDeque<>(); public MessengerNettyChannelControlBlock( ArchAsyncMessenger messenger, UlfChannel channel, boolean bForceSyncMode ) { super( messenger, channel, bForceSyncMode ); this.mParentMessenger = messenger; } public MessengerNettyChannelControlBlock( ArchAsyncMessenger messenger, Channel nativeChannel, boolean bForceSyncMode ) { this( messenger, new UlfChannel( messenger, nativeChannel ), bForceSyncMode ); } public MessengerNettyChannelControlBlock( ArchAsyncMessenger messenger, Channel nativeChannel ) { this( messenger, nativeChannel, false ); } public MessengerNettyChannelControlBlock( ArchAsyncMessenger messenger, boolean bForceSyncMode ) { this( messenger, new UlfChannel( messenger ), bForceSyncMode ); } public MessengerNettyChannelControlBlock( ArchAsyncMessenger messenger ) { this( messenger, false ); } protected void afterConnectionArrive( Medium medium, boolean bRenew ) { super.afterConnectionArrive( medium, bRenew, this.getSynRequestLock() ); } BlockingDeque getSyncRetMsgQueue() { return this.mSyncRetMsgQueue; } @Override public Lock getSynRequestLock() { return this.getParentMessageNode().getSynRequestLock(); } @Override public ArchAsyncMessenger getParentMessageNode() { return (ArchAsyncMessenger) super.getParentMessageNode(); } protected UMCMessage onlySendSyncMsg( UMCMessage message, boolean bNoneBuffered, long nWaitTime ) throws IOException { UMCMessage msg; this.mTransmit.sendMsg( message, bNoneBuffered ); try{ //msg = this.getParentMessageNode().getSyncRetMsgQueue().poll( nWaitTime, TimeUnit.MILLISECONDS ); msg = this.getSyncRetMsgQueue().poll( nWaitTime, TimeUnit.MILLISECONDS ); if( msg == null ) { // Close channel, preventing server sent messages late which could disrupted the sync deque. try{ this.getChannel().close(); ArchAsyncMessenger.reconnect( this, nWaitTime ); } catch ( ProvokeHandleException e ) { if( e.getCause() instanceof IOException ) { throw new IOException( e ); } } throw new IOException( "Waiting for receive synchronization message timeout [Max -> " + nWaitTime + " millis]." ); } } catch ( InterruptedException e ) { msg = null; } return msg; } @Override public UMCMessage sendSyncMsg( UMCMessage message, boolean bNoneBuffered, long nWaitTime ) throws IOException { if( this.mbForceSyncMode ) { return this.onlySendSyncMsg( message, bNoneBuffered, nWaitTime ); } else { this.getSynRequestLock().lock(); UMCMessage msg = null; try{ this.mbInSyncMode = true; this.getChannel().setChannelStatus( UlfChannelStatus.FORCE_SYNCHRONIZED ); msg = this.onlySendSyncMsg( message, bNoneBuffered, nWaitTime ); this.getParentMessageNode().getChannelPool().setIdleChannel( this ); // There will to set channel status. this.mbInSyncMode = false; } finally { this.getSynRequestLock().unlock(); } return msg; } } @Override public void sendAsynMsg( UMCMessage request, boolean bNoneBuffered ) throws IOException { super.sendMsg( request, bNoneBuffered ); } @Override public void release() { super.release(); this.mParentMessenger = null; this.mSyncRetMsgQueue = null; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/client/ProactiveParallelFairChannelPool.java ================================================ package com.pinecone.hydra.umc.wolf.client; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.ChannelPool; import com.pinecone.hydra.umc.msg.FairChannelPool; import com.pinecone.hydra.umc.msg.MappedChannelPool; import com.pinecone.hydra.umc.msg.UMCChannel; import com.pinecone.framework.unit.LinkedTreeMap; import com.pinecone.hydra.umc.wolf.ArchChannelPool; import com.pinecone.hydra.umc.wolf.UlfChannelStatus; import com.pinecone.hydra.umc.wolf.UlfIOLoadBalanceStrategy; import java.util.Map; import java.util.Queue; import java.util.concurrent.locks.ReentrantReadWriteLock; public class ProactiveParallelFairChannelPool extends ArchChannelPool implements FairChannelPool, MappedChannelPool { protected ReentrantReadWriteLock mPoolIOLock = new ReentrantReadWriteLock(); protected ChannelControlBlock mExclusiveSyncChannelCB; // The exclusive channel only for synchronized messages only. protected UlfIOLoadBalanceStrategy mLoadBalanceStrategy; protected long mnMajorWaitTimeout = 5000; protected LinkedTreeMap mChannelMapQueue; protected LinkedTreeMap mChannelIdleQueue; protected final Object mPullQueryLock = new Object(); public ProactiveParallelFairChannelPool( UlfIOLoadBalanceStrategy strategy ) { this.mLoadBalanceStrategy = strategy; this.mChannelMapQueue = new LinkedTreeMap<>(); this.mChannelIdleQueue = new LinkedTreeMap<>(); } public ProactiveParallelFairChannelPool setExclusiveSyncChannel( ChannelControlBlock exclusiveSyncChannelCB ) { this.mExclusiveSyncChannelCB = exclusiveSyncChannelCB; return this; } public UMCChannel getExclusiveSyncChannel() { if( this.mExclusiveSyncChannelCB != null ) { return this.mExclusiveSyncChannelCB.getChannel(); } return null; } @SuppressWarnings( "unchecked" ) protected ID warpKey( Object id ) { return (ID)id; } @Override public ChannelControlBlock queryChannelById( Object id ) { return this.mChannelMapQueue.get( this.warpKey( id ) ); } @Override public void onlyRemove( Object id ) { this.mChannelMapQueue.remove ( this.warpKey( id ) ); this.mChannelIdleQueue.remove( this.warpKey( id ) ); } @Override public long getMajorWaitTimeout() { return this.mnMajorWaitTimeout; } @Override public ProactiveParallelFairChannelPool setMajorWaitTimeout( long nMillisTimeout ){ this.mnMajorWaitTimeout = nMillisTimeout; return this; } // [1, 2] -> [1, 2, 3] @Override public ProactiveParallelFairChannelPool pushBack( ChannelControlBlock channel ) { ID id = this.warpKey( channel.getChannel().getChannelID() ); this.mChannelMapQueue.put( id, channel ); this.mChannelIdleQueue.put( id, channel ); return this; } // [1, 2, 3] ->[2, 3] public ChannelControlBlock pop() { return this.mChannelMapQueue.pop().getValue(); } @Override public ChannelControlBlock depriveIdleChannel() { this.mPoolIOLock.writeLock().lock(); try{ ChannelControlBlock qualified = null; for ( Map.Entry kv : this.mChannelMapQueue.entrySet() ) { ChannelControlBlock block = kv.getValue(); if( block.getChannelStatus().isIdle() ) { qualified = block; break; } } if ( qualified != null ) { this.onlyRemove( qualified.getChannel().getChannelID() ); } return qualified; } finally { this.mPoolIOLock.writeLock().unlock(); } } @Override public ProactiveParallelFairChannelPool setIdleChannel( ChannelControlBlock block ) { this.mPoolIOLock.writeLock().lock(); try{ block.getChannel().setChannelStatus( UlfChannelStatus.IDLE ); this.mChannelIdleQueue.put( this.warpKey( block.getChannel().getChannelID() ), block ); //Debug.trace( this.mChannelIdleQueue, this.mChannelIdleQueue.size(), block ); } finally { this.mPoolIOLock.writeLock().unlock(); } return this; } @Override public ChannelPool add( ChannelControlBlock block ) { this.mPoolIOLock.writeLock().lock(); try{ this.pushBack( block ); } finally { this.mPoolIOLock.writeLock().unlock(); } return this; } protected ChannelControlBlock queryNextChannel( long nMillisTimeout, boolean bEager, boolean bSync ) { ChannelControlBlock nextChannel = null; this.mPoolIOLock.readLock().lock(); try { if( this.mChannelMapQueue.isEmpty() ) { return null; } } finally { this.mPoolIOLock.readLock().unlock(); } long nLastTime = System.currentTimeMillis(); while ( true ) { boolean bIsIdleEmpty = this.mChannelIdleQueue.isEmpty(); if( !bIsIdleEmpty ) { // Condition1: If there has an idle, just use it. this.mPoolIOLock.writeLock().lock(); try{ bIsIdleEmpty = this.mChannelIdleQueue.isEmpty(); if ( !bIsIdleEmpty ) { nextChannel = this.mChannelIdleQueue.pop().getValue(); } } finally { this.mPoolIOLock.writeLock().unlock(); } } if ( nextChannel == null ) { // Condition2: If there are no idles, waiting and found balance channel. // Notice: In asynchronous condition, the producer could produce over-allocated messages and dump them into the queue of one channel so that consumers will mismatch the produced messages. // Using LinkedTreeMapQueue to sift repetitive idle channel and keep the queue. try { this.mPoolIOLock.readLock().lock(); if( bSync ) { for ( Map.Entry kv : this.mChannelMapQueue.entrySet() ) { ChannelControlBlock block = kv.getValue(); if( this.mLoadBalanceStrategy.matched( block ) || block.isShutdown() ) { nextChannel = block; break; } } } else { for ( Map.Entry kv : this.mChannelMapQueue.entrySet() ) { ChannelControlBlock block = kv.getValue(); boolean bFirstStrategyMatched = this.mLoadBalanceStrategy.matched( block ); if( bFirstStrategyMatched || block.getChannelStatus().isAsynAvailable() || block.isShutdown() ) { nextChannel = block; break; } } } } finally { this.mPoolIOLock.readLock().unlock(); } } if( nextChannel != null ) { this.mPoolIOLock.writeLock().lock(); try{ ID id = this.warpKey( nextChannel.getChannel().getChannelID() ); this.mChannelMapQueue.remove( id ); this.mChannelMapQueue.put( id, nextChannel ); // push back to queue tail } finally { this.mPoolIOLock.writeLock().unlock(); } break; } if( !bEager ) { try{ this.mPullQueryLock.wait( 10 ); } catch ( InterruptedException e ) { // Just return null. break; } } if( nMillisTimeout > 0 && System.currentTimeMillis() - nLastTime > nMillisTimeout ) { break; } } return nextChannel; } @Override public ChannelControlBlock nextAsynChannel( long nMillisTimeout, boolean bEager ) { return this.queryNextChannel( nMillisTimeout, bEager, false ); } @Override public ChannelControlBlock nextAsynChannel( long nMillisTimeout ) { return this.nextAsynChannel( nMillisTimeout, true ); } @Override public ChannelControlBlock nextAsynChannel() { return this.nextAsynChannel( this.mnMajorWaitTimeout ); } @Override public boolean isEmpty() { this.mPoolIOLock.readLock().lock(); try { return this.mChannelMapQueue.isEmpty(); } finally { this.mPoolIOLock.readLock().unlock(); } } @Override public int size() { this.mPoolIOLock.readLock().lock(); try { return this.mChannelMapQueue.size(); } finally { this.mPoolIOLock.readLock().unlock(); } } @Override public void clear() { this.mPoolIOLock.writeLock().lock(); try { for( ChannelControlBlock block : this.mChannelMapQueue.values() ) { block.close(); block.release(); } this.mChannelMapQueue.clear(); this.mChannelIdleQueue.clear(); } finally { this.mPoolIOLock.writeLock().unlock(); } } @Override public Map getPooledMap() { return this.mChannelMapQueue; } @Override public Queue getMajorQueue() { return this.mChannelMapQueue.toQueue(); } @Override public void remove( ChannelControlBlock ccb ) { this.mPoolIOLock.writeLock().lock(); try { ID id = this.warpKey( ccb.getChannel().getChannelID() ); this.onlyRemove( id ); } finally { this.mPoolIOLock.writeLock().unlock(); } } @Override public void deactivate( ChannelControlBlock ccb ) { this.mPoolIOLock.writeLock().lock(); try { ID id = this.warpKey( ccb.getChannel().getChannelID() ); if( !ccb.getChannel().isShutdown() ) { ccb.close(); ccb.release(); this.onlyRemove( id ); } } finally { this.mPoolIOLock.writeLock().unlock(); } } @Override public ChannelControlBlock terminate( Object id ) throws InterruptedException { this.mPoolIOLock.writeLock().lock(); ChannelControlBlock block = null; try{ block = this.queryChannelById( id ); if( block != null ) { block.close(); block.release(); } this.onlyRemove( id ); } finally { this.mPoolIOLock.writeLock().unlock(); } return block; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/client/ProactiveParallelFairSyncChannelPool.java ================================================ package com.pinecone.hydra.umc.wolf.client; import java.util.concurrent.locks.Lock; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.SyncFairChannelPool; import com.pinecone.hydra.umc.wolf.UlfIOLoadBalanceStrategy; public class ProactiveParallelFairSyncChannelPool extends ProactiveParallelFairChannelPool implements SyncFairChannelPool { protected Lock mSynRequestLock; public ProactiveParallelFairSyncChannelPool( Lock synRequestLock, UlfIOLoadBalanceStrategy strategy ) { super(strategy); this.mSynRequestLock = synRequestLock; } @Override public ChannelControlBlock nextSyncChannel( long nMillisTimeout, boolean bEager ) { this.mSynRequestLock.lock(); if( this.mExclusiveSyncChannelCB != null ) { return this.mExclusiveSyncChannelCB; } else { ChannelControlBlock cb = null; try{ cb = this.queryNextChannel( nMillisTimeout, bEager, true ); } finally { this.mSynRequestLock.unlock(); } return cb; } } @Override public ChannelControlBlock nextSyncChannel( long nMillisTimeout ) { return this.nextSyncChannel( nMillisTimeout, true ); } @Override public ChannelControlBlock nextSyncChannel() { return this.nextSyncChannel( this.mnMajorWaitTimeout ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/client/UlfAsyncMessengerChannelControlBlock.java ================================================ package com.pinecone.hydra.umc.wolf.client; import com.pinecone.hydra.umc.msg.AsyncMessengerChannelControlBlock; import com.pinecone.hydra.umc.wolf.NettyChannelControlBlock; import com.pinecone.hydra.umc.wolf.UlfChannel; public interface UlfAsyncMessengerChannelControlBlock extends AsyncMessengerChannelControlBlock, NettyChannelControlBlock { @Override UlfChannel getChannel(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/client/UlfClient.java ================================================ package com.pinecone.hydra.umc.wolf.client; import java.io.IOException; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.event.ChannelEventHandler; import com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter; import com.pinecone.hydra.umc.wolf.UlfMessageNode; public interface UlfClient extends UlfMessageNode { ClientConnectArguments getConnectionArguments(); UMCMessage sendSyncMsg( UMCMessage request ) throws IOException; UMCMessage sendSyncMsg( UMCMessage request, boolean bNoneBuffered ) throws IOException ; void sendAsynMsg( UMCMessage request ) throws IOException ; void sendAsynMsg( UMCMessage request, UlfAsyncMsgHandleAdapter handler ) throws IOException; UlfClient registerChannelConnectedHandler ( ChannelEventHandler handler ) throws IllegalStateException ; UlfClient deregisterChannelConnectedHandler( ChannelEventHandler handler ) throws IllegalStateException ; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/client/WolfMCClient.java ================================================ package com.pinecone.hydra.umc.wolf.client; import io.netty.bootstrap.Bootstrap; import io.netty.buffer.ByteBuf; import io.netty.channel.EventLoopGroup; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelOption; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelInboundHandlerAdapter; import io.netty.channel.ChannelFutureListener; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.channel.socket.SocketChannel; import io.netty.channel.socket.nio.NioSocketChannel; import io.netty.handler.timeout.ReadTimeoutHandler; import io.netty.util.AttributeKey; import com.pinecone.framework.system.IrrationalProvokedException; import com.pinecone.framework.system.ProvokeHandleException; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.umc.msg.Messagus; import com.pinecone.hydra.umc.msg.UMCServiceException; import com.pinecone.hydra.umc.msg.event.ChannelEventHandler; import com.pinecone.hydra.umc.wolf.AsyncUlfMedium; import com.pinecone.hydra.umc.msg.event.ChannelInactiveHandler; import com.pinecone.hydra.umc.wolf.ChannelUtils; import com.pinecone.hydra.umc.wolf.GenericUMCByteMessageDecoder; import com.pinecone.hydra.umc.wolf.MCSecurityAuthentication; import com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter; import com.pinecone.hydra.umc.wolf.UlfChannel; import com.pinecone.hydra.umc.wolf.UlfChannelStatus; import com.pinecone.hydra.umc.wolf.UlfMCReceiver; import com.pinecone.hydra.umc.wolf.UlfMessageNode; import com.pinecone.hydra.umc.wolf.UnsetUlfAsyncMsgHandleAdapter; import com.pinecone.hydra.umc.wolf.WolfMCInitializationException; import com.pinecone.hydra.umc.wolf.WolfMCStandardConstants; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; import com.pinecone.hydra.umct.UMCTExpressHandler; import java.io.IOException; import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; /** * Pinecone Ursus For Java WolfClient [ Wolf, Uniform Message Control Protocol Client ] * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Bean Nuts Walnut Ulfhedinn Wolves/Ulfar Family. * Uniform Message Control Protocol (UMC) * UMC is a simple TCP/IP-based binary transmission protocol. * It supports methods similar to PUT/POST (HTTP), which are designed to fulfill uniform message control. * * Uniform Message Control Protocol for WolfMC Service [Client/Server] (Ulf UMC) * ***************************************************************************************** */ public class WolfMCClient extends ArchAsyncMessenger implements UlfClient { protected EventLoopGroup mExecutorGroup; protected Bootstrap mBootstrap; protected ClientConnectArguments mConnectionArguments; protected MCSecurityAuthentication mSecurityAuthentication; //TODO protected UlfAsyncMsgHandleAdapter mPrimeAsyncMessageHandler = new UnsetUlfAsyncMsgHandleAdapter( this ); // For all channels. protected List mChannelConnectedHandlers = new ArrayList<>(); public WolfMCClient( long nodeId, String szName, Processum parentProcess, UlfMessageNode parent, Map joConf, ExtraHeadCoder extraHeadCoder ){ super( nodeId, szName, parentProcess, parent, joConf, extraHeadCoder ); this.apply( joConf ); } public WolfMCClient( String szName, Processum parentProcess, UlfMessageNode parent, Map joConf, ExtraHeadCoder extraHeadCoder ){ super( Messagus.nextLocalId(), szName, parentProcess, parent, joConf, extraHeadCoder ); this.apply( joConf ); } public WolfMCClient( long nodeId, String szName, Processum parentProcess, Map joConf, ExtraHeadCoder extraHeadCoder ){ this( nodeId, szName, parentProcess, null, joConf, extraHeadCoder ); } public WolfMCClient( String szName, Processum parentProcess, Map joConf, ExtraHeadCoder extraHeadCoder ){ this( Messagus.nextLocalId(), szName, parentProcess, null, joConf, extraHeadCoder ); } public WolfMCClient( long nodeId, String szName, Processum parentProcess, Map joConf ){ this( nodeId, szName, parentProcess, joConf, null ); } public WolfMCClient( String szName, Processum parentProcess, Map joConf ){ this( Messagus.nextLocalId(), szName, parentProcess, joConf, null ); } public WolfMCClient( long nodeId, String szName, UlfMessageNode parent, Processum parentProcess, Map joConf ){ this( nodeId, szName, parentProcess, parent, joConf, null ); } public WolfMCClient( String szName, UlfMessageNode parent, Processum parentProcess, Map joConf ){ this( Messagus.nextLocalId(), szName, parentProcess, parent, joConf, null ); } protected WolfMCClient( Builder builder ){ this( builder.nodeId, builder.szName, builder.parentProcess, builder.parent, builder.joConf, builder.extraHeadCoder ); } @Override public UlfClient registerChannelConnectedHandler( ChannelEventHandler handler ) throws IllegalStateException { this.checkDeregisterHandlerStatus(); this.mChannelConnectedHandlers.add( handler ); return this; } @Override public UlfClient deregisterChannelConnectedHandler( ChannelEventHandler handler ) throws IllegalStateException { this.checkDeregisterHandlerStatus(); this.mChannelConnectedHandlers.remove( handler ); return this; } @Override public WolfMCClient apply( Map joConf ) { super.apply( joConf ); this.mConnectionArguments = new ClientConnectionArguments( this.getSectionConf() ); return this; } @Override public WolfMCClient apply( UlfAsyncMsgHandleAdapter fnAsyncMessageAdapter ) { this.mPrimeAsyncMessageHandler = fnAsyncMessageAdapter; return this; } @Override public UMCTExpressHandler getAsyncMsgHandler() { return this.mPrimeAsyncMessageHandler; } @Override public ClientConnectArguments getConnectionArguments() { return this.mConnectionArguments; } @Override public ClientConnectArguments getMessageNodeConfig() { return this.getConnectionArguments(); } public EventLoopGroup getEventLoopGroup() { return this.mExecutorGroup; } public Bootstrap getBootstrap() { return this.mBootstrap; } public int getParallelChannels() { return this.getConnectionArguments().getParallelChannels(); } protected void clear(){ this.mChannelPool.clear(); } @Override public void close() throws ProvokeHandleException { this.mStateMutex.lock(); try { if( this.mExecutorGroup != null ) { this.mExecutorGroup.shutdownGracefully(); this.clear(); this.mExecutorGroup = null; } } finally { this.mStateMutex.unlock(); } try { synchronized ( this.mPrimaryThreadJoinMutex ) { WolfMCClient.this.mPrimaryThreadJoinMutex.notify(); } } catch ( IllegalMonitorStateException e ) { throw new ProvokeHandleException( "IllegalMonitorStateException [WolfMCClient::close], this exception has been redirected to parent thread.", e ); } } @Override public void kill() { try { this.close(); } catch ( ProvokeHandleException e ) { super.kill(); // Kill master thread forcefully. this.clear(); } } @Override public boolean isShutdown() { if ( this.mExecutorGroup == null ) { return true; } return this.mExecutorGroup.isShutdown(); } @Override public boolean isTerminated() { if ( this.mExecutorGroup == null ) { return true; } return this.mExecutorGroup.isTerminated(); } protected void notifyChannelConnected( ChannelControlBlock block, ChannelHandlerContext ctx ) { for( ChannelEventHandler h : this.mChannelConnectedHandlers ) { h.afterEventTriggered( block, ctx ); } } protected MessengerNettyChannelControlBlock syncSpawnSoloChannel() throws IOException, UMCServiceException { MessengerNettyChannelControlBlock ccb = null; ccb = new MessengerNettyChannelControlBlock( this ); ChannelFuture future = ccb.getChannel().toConnect( new InetSocketAddress( this.getConnectionArguments().getHost(), this.getConnectionArguments().getPort() ) ).getLastChannelFuture(); UlfChannel channel = ccb.getChannel(); channel.getNativeHandle().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_CONTROL_BLOCK_KEY ) ).set( ccb ); ChannelUtils.setChannelIdentityID( channel, this.mnMessageNodeId ); this.getTaskManager().add( ccb ); future.addListener(new ChannelFutureListener() { @Override public void operationComplete( ChannelFuture channelFuture ) throws Exception { synchronized ( WolfMCClient.this.mPrimaryThreadJoinMutex ) { // if ( WolfMCClient.this.isShutdown() ) { // WolfMCClient.this.mShutdown = !channelFuture.isSuccess(); // } WolfMCClient.this.mPrimaryThreadJoinMutex.notify(); } } }); //channel.closeFuture().sync(); this.getChannelPool().pushBack( ccb ); synchronized ( this.mPrimaryThreadJoinMutex ) { try { this.mPrimaryThreadJoinMutex.wait( this.getConnectionArguments().getSocketTimeout() ); if( WolfMCClient.this.isShutdown() ) { throw new UnknownHostException( "Connect failed with '" + this.getConnectionArguments().getHost() + ":" + this.getConnectionArguments().getPort() + "'" ); } } catch ( InterruptedException e ) { Thread.currentThread().interrupt(); throw new WolfMCInitializationException( e ); } } this.notifyChannelConnected( ccb, null ); return ccb; } protected void syncSpawnChannels() throws IOException, UMCServiceException { int n = this.getConnectionArguments().getParallelChannels(); for ( int i = 0; i < n; i++ ) { MessengerNettyChannelControlBlock block = this.syncSpawnSoloChannel(); this.infoLifecycle( String.format( "Channel%d(%s)", i, block.getChannel().getChannelID() ), "Spawned" ); } } protected void invokeChannelOwnedOnError( ChannelHandlerContext ctx, Throwable cause ) { try { UlfAsyncMsgHandleAdapter handle = (UlfAsyncMsgHandleAdapter)ctx.channel().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_ASYNC_MSG_HANDLE_KEY ) ).get(); if( handle == null ) { ChannelControlBlock ccb = (ChannelControlBlock)ctx.channel().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_CONTROL_BLOCK_KEY ) ).get(); handle = ccb.pollMsgHandle( ArchAsyncMessenger.getSyncWaitingMillis( this ) ); if( handle == null ) { handle = WolfMCClient.this.mPrimeAsyncMessageHandler; } } handle.onError( ctx, cause ); } catch ( InterruptedException e ) { Thread.currentThread().interrupt(); } } protected void handleArrivedMessage( UlfAsyncMsgHandleAdapter handle, Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception { if( this.getErrorMessageAudit().isErrorMessage( msg ) ) { handle.onErrorMsgReceived( medium, block, msg, ctx, msg ); } else { handle.onSuccessfulMsgReceived( medium, block, msg, ctx, msg ); } } protected void shutdownIfAllChannelDetached ( MessengerNettyChannelControlBlock ccb ) { if ( !WolfMCClient.this.getConnectionArguments().isAutoReconnect() ) { if( WolfMCClient.this.getChannelPool().isAllChannelsTerminated() ) { try{ WolfMCClient.this.getLogger().warn( " All channels are terminated, client terminating." ); WolfMCClient.this.close(); } catch ( ProvokeHandleException e ) { throw new IrrationalProvokedException( e ); // Those should never have happened. } return; } WolfMCClient.this.getChannelPool().deactivate( ccb ); WolfMCClient.this.getMajorIOLock().lock(); try{ WolfMCClient.this.getTaskManager().erase( ccb ); } finally { WolfMCClient.this.getMajorIOLock().unlock(); } } } protected void initNettySubsystem() throws IOException, UMCServiceException { this.mExecutorGroup = new NioEventLoopGroup(); this.mBootstrap = new Bootstrap(); Bootstrap bootstrap = this.mBootstrap; bootstrap.group ( this.mExecutorGroup ); bootstrap.channel( NioSocketChannel.class ); bootstrap.option ( ChannelOption.CONNECT_TIMEOUT_MILLIS, this.getConnectionArguments().getSocketTimeout() ); bootstrap.handler( new ChannelInitializer() { @Override protected void initChannel( SocketChannel sc ) throws Exception { sc.pipeline().addLast( new ReadTimeoutHandler( WolfMCClient.this.getConnectionArguments().getKeepAliveTimeout(), TimeUnit.SECONDS ) ); sc.pipeline().addLast( new GenericUMCByteMessageDecoder( WolfMCClient.this.getExtraHeadCoder() ) ); sc.pipeline().addLast( new ChannelInboundHandlerAdapter (){ @Override public void channelActive( ChannelHandlerContext ctx ) throws Exception { super.channelActive(ctx); //UlfChannelControlBlock channel = WolfMCClient.this.getChannelPool().queryChannelById( ctx.channel().id() ); MessengerNettyChannelControlBlock channel = (MessengerNettyChannelControlBlock)ctx.channel().attr( AttributeKey.valueOf(WolfMCStandardConstants.CB_CONTROL_BLOCK_KEY) ).get(); channel.afterConnectionArrive( new AsyncUlfMedium( ctx, null, WolfMCClient.this ), false ); channel.setThreadAffinity( Thread.currentThread() ); synchronized ( WolfMCClient.this.mPrimaryThreadJoinMutex ) { WolfMCClient.this.mPrimaryThreadJoinMutex.notify(); } } @Override public void channelRead( ChannelHandlerContext ctx, Object msg ) throws Exception { Medium medium = new AsyncUlfMedium( ctx, (ByteBuf) msg, WolfMCClient.this ); UlfMCReceiver receiver = new UlfMCReceiver( medium ); UMCMessage message = receiver.readMsg(); MessengerNettyChannelControlBlock channelControlBlock = (MessengerNettyChannelControlBlock)ctx.channel().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_CONTROL_BLOCK_KEY ) ).get(); //Debug.trace( channelControlBlock.getChannel().getChannelID() ); if( channelControlBlock.getChannelStatus() == UlfChannelStatus.FORCE_SYNCHRONIZED ){ channelControlBlock.getSyncRetMsgQueue().add( message ); //WolfMCClient.this.mSyncRetMsgQueue.add( message ); } else { if ( !WolfMCClient.this.tryInvokeOrInterceptArrivedData( medium, channelControlBlock, message, ctx, msg ) ) { UlfAsyncMsgHandleAdapter handle = (UlfAsyncMsgHandleAdapter)ctx.channel().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_ASYNC_MSG_HANDLE_KEY ) ).get(); if ( handle == null ) { handle = channelControlBlock.pollMsgHandle( WolfMCClient.this.getSyncWaitingMillis() ); // Try pipeline. } if( handle != null ) { WolfMCClient.this.handleArrivedMessage( handle, medium, channelControlBlock, message, ctx, msg ); // Preserving binding-status for exclusive handler-binding channel. Object dyAsynExclusiveHandle = ctx.channel().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_ASY_EXCLUSIVE_HANDLE_KEY ) ).get(); if ( dyAsynExclusiveHandle == null || !(Boolean) dyAsynExclusiveHandle ){ ctx.channel().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_ASYNC_MSG_HANDLE_KEY ) ).set( null ); // For another channel to reset, likes ajax. } } else { WolfMCClient.this.handleArrivedMessage( WolfMCClient.this.mPrimeAsyncMessageHandler, medium, channelControlBlock, message, ctx, msg ); } } Object dyExternalChannel = ctx.channel().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_EXTERNAL_CHANNEL_KEY ) ).get(); if ( dyExternalChannel == null || !(Boolean) dyExternalChannel ){ WolfMCClient.this.getChannelPool().setIdleChannel( channelControlBlock ); } } medium.release(); medium = new AsyncUlfMedium( ctx, null, WolfMCClient.this ); channelControlBlock.afterConnectionArrive( medium, true ); } @Override public void channelInactive( ChannelHandlerContext ctx ) throws Exception { MessengerNettyChannelControlBlock ccb = (MessengerNettyChannelControlBlock)ctx.channel().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_CONTROL_BLOCK_KEY ) ).get(); if ( !WolfMCClient.this.mChannelInactiveHandlers.isEmpty() ) { boolean bBlocked = false; for ( ChannelInactiveHandler handler : WolfMCClient.this.mChannelInactiveHandlers ) { if ( handler.afterChannelInactive( ccb, ctx ) ) { bBlocked = true; } } if ( bBlocked ) { WolfMCClient.this.shutdownIfAllChannelDetached( ccb ); return; } } WolfMCClient.this.shutdownIfAllChannelDetached( ccb ); } @Override public void exceptionCaught( ChannelHandlerContext ctx, Throwable cause ) throws Exception { WolfMCClient.this.invokeChannelOwnedOnError( ctx, cause ); } } ); } @Override public void exceptionCaught( ChannelHandlerContext ctx, Throwable cause ) throws Exception { WolfMCClient.this.invokeChannelOwnedOnError( ctx, cause ); } }); this.syncSpawnChannels(); this.infoLifecycle( "Wolf<\uD83D\uDC3A>::initNettySubsystem", "Ready" ); } public void connect() throws IOException, UMCServiceException { this.mStateMutex.lock(); try{ if( this.isShutdown() ) { this.initNettySubsystem(); // Exception thrown and truncating next detach-mutex-release, redirecting to primary thread. } } finally { this.mStateMutex.unlock(); WolfMCClient.this.unlockOuterThreadDetachMutex(); // This lock shouldn`t be released in `finally`, waiting for primary thread to process. } synchronized ( this.mPrimaryThreadJoinMutex ) { try { this.mPrimaryThreadJoinMutex.wait( ); // Join the primary thread. } catch ( InterruptedException e ) { Thread.currentThread().interrupt(); throw new WolfMCInitializationException( e ); } } } @Override public void execute() throws UMCServiceException { if ( !this.isShutdown() ) { this.mLogger.info( "WolfMCClient [{}:{}] is already started. ", this.getName(), this.hashCode() ); return; } Exception[] lastException = new Exception[] { null }; Thread primaryThread = new Thread( new Runnable() { @Override public void run() { WolfMCClient.this.getTaskManager().notifyExecuting( WolfMCClient.this ); try{ WolfMCClient.this.connect(); } catch ( Exception e ) { lastException[0] = e; WolfMCClient.this.kill(); } finally { WolfMCClient.this.getTaskManager().notifyFinished( WolfMCClient.this ); WolfMCClient.this.unlockOuterThreadDetachMutex(); } } }); this.preparePrimaryThread( primaryThread ); primaryThread.start(); this.joinOuterThread(); try { this.redirectException2ParentThread( lastException[0] ); } catch ( IOException e ) { throw new WolfMCInitializationException( e ); } } @Override public UMCMessage sendSyncMsg( UMCMessage request ) throws IOException { return this.sendSyncMsg( request, false ); } @Override public UMCMessage sendSyncMsg( UMCMessage request, boolean bNoneBuffered ) throws IOException { return this.sendSyncMsg( request, bNoneBuffered, this.getConnectionArguments().getSyncWaitingMillis() ); } @Override public void sendAsynMsg( UMCMessage request ) throws IOException { this.sendAsynMsg( request, false ); } @Override public void sendAsynMsg( UMCMessage request, UlfAsyncMsgHandleAdapter handler ) throws IOException { this.sendAsynMsg( request, false, handler ); } public static class Builder { private long nodeId = -1; private String szName; private Processum parentProcess; private UlfMessageNode parent; private Map joConf; private ExtraHeadCoder extraHeadCoder; public Builder setNodeId( long nodeId ) { this.nodeId = nodeId; return this; } public Builder setName( String szName ) { this.szName = szName; return this; } public Builder setParentProcess( Processum parentProcess ) { this.parentProcess = parentProcess; return this; } public Builder setParent( UlfMessageNode parent ) { this.parent = parent; return this; } public Builder setJoConf( Map joConf ) { this.joConf = joConf; return this; } public Builder setExtraHeadCoder( ExtraHeadCoder extraHeadCoder ) { this.extraHeadCoder = extraHeadCoder; return this; } public WolfMCClient build() { this.validate(); return new WolfMCClient(this); } private void validate() { if ( this.szName == null || this.szName.isEmpty() ) { long nId = this.nodeId; if ( nId == -1 ) { nId = System.nanoTime(); } this.szName = WolfMCClient.class.getSimpleName() + "_" + nId; } if ( this.joConf == null ) { throw new IllegalArgumentException( "Configuration (Conf) cannot be null" ); } } } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/server/AbstractTimerTask.java ================================================ package com.pinecone.hydra.umc.wolf.server; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import io.netty.util.Timeout; import io.netty.util.TimerTask; /** * @Description * @Author welsir * @Date 2024/6/11 23:34 */ public abstract class AbstractTimerTask implements TimerTask { @Override public void run( Timeout timeout ) { // Collection allChannels = NettyServerChannelRecordPool.getAllChannels(); // for ( UlfRecipientChannelControlBlock channel : allChannels ) { // if (!channel.isShutdown()) { // doTask(channel); // } // } } protected abstract void doTask( ChannelControlBlock channel ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/server/IdleChannelTimerTask.java ================================================ package com.pinecone.hydra.umc.wolf.server; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.umc.msg.ChannelControlBlock; /** * @Description * @Author welsir * @Date 2024/6/11 23:35 */ public class IdleChannelTimerTask extends AbstractTimerTask { private final int idleTimeout; public IdleChannelTimerTask( int idleTimeout ) { this.idleTimeout = idleTimeout; } @Override protected void doTask( ChannelControlBlock channel ) { try { if(channel.isShutdown()){ return; } long now = System.currentTimeMillis(); boolean isReadTimeout = isReadTimeout(channel, now); boolean isWriteTimeout = isWriteTimeout(channel, now); if (isReadTimeout || isWriteTimeout) { Debug.echo("连接超时,尝试关闭连接...."); channel.close(); //NettyServerChannelRecordPool.removeChannel(channel); } } catch (Throwable t){ throw new RuntimeException(t); } } protected boolean isReadTimeout( ChannelControlBlock channel, long now ) { Long lastRead = lastRead(channel); return lastRead != null && now - lastRead > idleTimeout; } protected boolean isWriteTimeout( ChannelControlBlock channel, long now ) { Long lastWrite = lastWrite(channel); return lastWrite != null && now - lastWrite > idleTimeout; } public Long lastRead( ChannelControlBlock channel ){ return 0L; //return channel.getAttribute( IdleChannelHandler.KEY_READ_TIMESTAMP, Long.class ); } public Long lastWrite( ChannelControlBlock channel ){ return 0L; //return channel.getAttribute( IdleChannelHandler.KEY_WRITE_TIMESTAMP, Long.class ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/server/PassiveRegisterChannelPool.java ================================================ package com.pinecone.hydra.umc.wolf.server; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.unit.LinkedTreeMap; import com.pinecone.hydra.umc.msg.ChannelPool; import com.pinecone.hydra.umc.msg.RegisterChannelPool; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.wolf.ArchChannelPool; import com.pinecone.hydra.umc.wolf.InternalErrors; import com.pinecone.hydra.umc.wolf.UlfIOLoadBalanceStrategy; import java.io.IOException; import java.util.Map; import java.util.concurrent.locks.ReentrantReadWriteLock; /** * @Description PassiveRegisterChannelPool * @Author DragonKing, welsir * @Date 2024/6/30 16.41 */ public class PassiveRegisterChannelPool extends ArchChannelPool implements RegisterChannelPool { protected LinkedTreeMap mChannelMapPool; protected UlfIOLoadBalanceStrategy mLoadBalanceStrategy; protected final int mnMaximumPoolSize ; protected long mnMajorWaitTimeout = 5000; protected WolfMCServer mRecipient; protected ReentrantReadWriteLock mPoolIOLock = new ReentrantReadWriteLock(); public PassiveRegisterChannelPool( WolfMCServer recipient, UlfIOLoadBalanceStrategy strategy, int nMaximumPoolSize ) { this.mRecipient = recipient; this.mLoadBalanceStrategy = strategy; this.mChannelMapPool = new LinkedTreeMap<>(); this.mnMaximumPoolSize = nMaximumPoolSize; } protected ChannelControlBlock addChannel( ChannelControlBlock channel ){ try { this.mPoolIOLock.writeLock().lock(); if( this.size() >= this.mnMaximumPoolSize ){ try{ InternalErrors.sendTooManyConnections( channel ); channel.close(); } catch ( IOException e ) { throw new ProxyProvokeHandleException( e ); } return null; } ID channelId = this.warpKey( channel.getChannel().getChannelID() ) ; this.mChannelMapPool.put( channelId, channel ); return channel; } finally { this.mPoolIOLock.writeLock().unlock(); } } @Override public ChannelControlBlock depriveIdleChannel() { throw new UnsupportedOperationException( "Method `depriveIdleChannel` is inapplicable for `PassiveRegisterChannelPool`." ); } @SuppressWarnings( "unchecked" ) protected ID warpKey( Object id ) { return (ID)id; } @Override public ChannelControlBlock queryChannelById( Object id ) { return this.mChannelMapPool.get( this.warpKey( id ) ); } @Override public void onlyRemove( Object id ) { this.mChannelMapPool.remove( this.warpKey( id ) ); } @Override public int size() { return this.mChannelMapPool.size(); } @Override public void clear() { this.mPoolIOLock.writeLock().lock(); try{ for( ChannelControlBlock block : this.mChannelMapPool.values() ) { block.close(); block.release(); } this.mChannelMapPool.clear(); } finally { this.mPoolIOLock.writeLock().unlock(); } } @Override public boolean isEmpty() { return this.mChannelMapPool.isEmpty(); } @Override public Map getPooledMap() { return this.mChannelMapPool; } @Override public void remove(ChannelControlBlock ccb) { this.mPoolIOLock.writeLock().lock(); try { ID id = this.warpKey( ccb.getChannel().getChannelID() ); this.onlyRemove( id ); } finally { this.mPoolIOLock.writeLock().unlock(); } } @Override public void deactivate( ChannelControlBlock ccb ) { this.mPoolIOLock.writeLock().lock(); try { ID id = this.warpKey( ccb.getChannel().getChannelID() ); if( !ccb.getChannel().isShutdown() ) { ccb.close(); ccb.release(); } this.onlyRemove( id ); } finally { this.mPoolIOLock.writeLock().unlock(); } } @Override public ChannelControlBlock terminate( Object id ) throws InterruptedException { this.mPoolIOLock.writeLock().lock(); ChannelControlBlock block; try { block = this.queryChannelById( id ); if( block != null ) { block.close(); block.release(); this.onlyRemove( id ); } } finally { this.mPoolIOLock.writeLock().unlock(); } return block; } @Override public long getMajorWaitTimeout() { return this.mnMajorWaitTimeout; } @Override public PassiveRegisterChannelPool setMajorWaitTimeout( long nMillisTimeout ){ this.mnMajorWaitTimeout = nMillisTimeout; return this; } @Override public int getMaximumPoolSize() { return this.mnMaximumPoolSize; } @Override public PassiveRegisterChannelPool setIdleChannel( ChannelControlBlock block ) { this.addChannel( block ); // TODO return this; } @Override public ChannelPool add( ChannelControlBlock block ) { this.addChannel( block ); return null; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/server/RecipientNettyChannelControlBlock.java ================================================ package com.pinecone.hydra.umc.wolf.server; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.wolf.ArchChannelControlBlock; import com.pinecone.hydra.umc.wolf.UlfChannel; import io.netty.channel.Channel; import java.util.concurrent.locks.Lock; public class RecipientNettyChannelControlBlock extends ArchChannelControlBlock implements UlfRecipientChannelControlBlock { protected WolfMCServer mParentRecipient; public RecipientNettyChannelControlBlock( WolfMCServer recipient, UlfChannel channel, boolean bForceSyncMode ) { super( recipient, channel, bForceSyncMode ); this.mParentRecipient = recipient; } public RecipientNettyChannelControlBlock( WolfMCServer recipient, Channel nativeChannel, boolean bForceSyncMode ) { this( recipient, new UlfChannel( recipient, nativeChannel ), bForceSyncMode ); } public RecipientNettyChannelControlBlock( WolfMCServer recipient, Channel nativeChannel ) { this( recipient, nativeChannel, false ); } public RecipientNettyChannelControlBlock( WolfMCServer recipient, boolean bForceSyncMode ) { this( recipient, new UlfChannel( recipient ), bForceSyncMode ); } public RecipientNettyChannelControlBlock( WolfMCServer recipient ) { this( recipient, false ); } @Override public WolfMCServer getParentMessageNode() { return (WolfMCServer) super.getParentMessageNode(); } protected void afterConnectionArrive( Medium medium, boolean bRenew ) { super.afterConnectionArrive( medium, bRenew, this.getSynRequestLock() ); } protected Lock getSynRequestLock() { return this.getParentMessageNode().getSynRequestLock(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/server/ServerConnectArguments.java ================================================ package com.pinecone.hydra.umc.wolf.server; import com.pinecone.hydra.umc.wolf.MCConnectionArguments; public interface ServerConnectArguments extends MCConnectionArguments { int getMaximumClients() ; void setMaximumClients( int mnMaximumClients ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/server/ServerConnectionArguments.java ================================================ package com.pinecone.hydra.umc.wolf.server; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.umc.wolf.SharedConnectionArguments; import com.pinecone.hydra.umc.wolf.client.ArchAsyncMessenger; public class ServerConnectionArguments extends SharedConnectionArguments implements ServerConnectArguments { protected int mnMaximumClients; // 0 <= for unlimited clients public ServerConnectionArguments( JSONObject args ) { super( args ); this.mnMaximumClients = args.optInt( "MaximumClients", 0 ); } public ServerConnectionArguments( ArchAsyncMessenger args ) { this( args.getSectionConf() ); } @Override public int getMaximumClients() { return this.mnMaximumClients; } @Override public void setMaximumClients( int mnMaximumClients ) { this.mnMaximumClients = mnMaximumClients; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/server/UlfRecipientChannelControlBlock.java ================================================ package com.pinecone.hydra.umc.wolf.server; import com.pinecone.hydra.umc.msg.RecipientChannelControlBlock; import com.pinecone.hydra.umc.wolf.NettyChannelControlBlock; import com.pinecone.hydra.umc.wolf.UlfChannel; public interface UlfRecipientChannelControlBlock extends RecipientChannelControlBlock, NettyChannelControlBlock { @Override UlfChannel getChannel(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/server/UlfServer.java ================================================ package com.pinecone.hydra.umc.wolf.server; import com.pinecone.hydra.umc.msg.Recipient; import com.pinecone.hydra.umc.msg.event.ChannelEventHandler; import com.pinecone.hydra.umc.wolf.UlfMessageNode; import com.pinecone.hydra.umc.wolf.WolfMCNode; import com.pinecone.hydra.umct.UMCTExpressHandler; public interface UlfServer extends UlfMessageNode, Recipient { WolfMCNode apply( UMCTExpressHandler handler ); UlfServer registerDataArrivedEventHandlers( ChannelEventHandler handler ) throws IllegalStateException; UlfServer deregisterDataArrivedEventHandlers( ChannelEventHandler handler ) throws IllegalStateException; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/server/WolfMCServer.java ================================================ package com.pinecone.hydra.umc.wolf.server; import io.netty.bootstrap.ServerBootstrap; import io.netty.buffer.ByteBuf; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFutureListener; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelId; import io.netty.channel.ChannelInboundHandlerAdapter; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelOption; import io.netty.channel.EventLoopGroup; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.channel.socket.SocketChannel; import io.netty.channel.socket.nio.NioServerSocketChannel; import io.netty.handler.timeout.ReadTimeoutHandler; import io.netty.util.AttributeKey; import com.pinecone.framework.system.ProvokeHandleException; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.umc.msg.Messagus; import com.pinecone.hydra.umc.msg.RecipientChannelControlBlock; import com.pinecone.hydra.umc.msg.UMCServiceException; import com.pinecone.hydra.umc.msg.event.ChannelEventHandler; import com.pinecone.hydra.umc.wolf.AsyncUlfMedium; import com.pinecone.hydra.umc.msg.event.ChannelInactiveHandler; import com.pinecone.hydra.umc.wolf.ChannelUtils; import com.pinecone.hydra.umc.wolf.GenericUMCByteMessageDecoder; import com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter; import com.pinecone.hydra.umc.wolf.UlfIdleFirstBalanceStrategy; import com.pinecone.hydra.umc.wolf.UlfMCReceiver; import com.pinecone.hydra.umc.wolf.UlfMessageNode; import com.pinecone.hydra.umc.wolf.UnsetUlfAsyncMsgHandleAdapter; import com.pinecone.hydra.umc.wolf.WolfMCInitializationException; import com.pinecone.hydra.umc.wolf.WolfMCNode; import com.pinecone.hydra.umc.wolf.WolfMCStandardConstants; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.ChannelPool; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.net.BindException; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; /** * Pinecone Ursus For Java WolfServer [ Wolf, Uniform Message Control Protocol Server ] * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Bean Nuts Walnut Ulfhedinn Wolves/Ulfar Family. * Uniform Message Control Protocol (UMC) * UMC is a simple TCP/IP-based binary transmission protocol. * It supports methods similar to PUT/POST (HTTP), which are designed to fulfill uniform message control. * * Uniform Message Control Protocol for WolfMC Service [Client/Server] (Ulf UMC) * ***************************************************************************************** */ public class WolfMCServer extends WolfMCNode implements UlfServer { protected ServerConnectArguments mConnectionArguments ; protected EventLoopGroup mMasterEventGroup ; protected EventLoopGroup mWorkersEventGroup ; protected ServerBootstrap mBootstrap ; protected ChannelFuture mPrimaryBindFuture ; protected SocketAddress mPrimaryBindAddress ; protected PassiveRegisterChannelPool mChannelPool ; protected UlfAsyncMsgHandleAdapter mRecipientMsgHandler ; protected List mDataArrivedEventHandlers ; private final ReentrantLock mSynRequestLock = new ReentrantLock(); // For inner purposes. public WolfMCServer( long nodeId, String szName, Processum parentProcess, UlfMessageNode parent, Map joConf, ExtraHeadCoder extraHeadCoder ) { super( nodeId, szName, parentProcess, parent, joConf, extraHeadCoder ); this.mDataArrivedEventHandlers = new ArrayList<>(); this.apply( joConf ); } public WolfMCServer( String szName, Processum parentProcess, UlfMessageNode parent, Map joConf, ExtraHeadCoder extraHeadCoder ) { this( Messagus.nextLocalId(), szName, parentProcess, parent, joConf, extraHeadCoder ); } public WolfMCServer( long nodeId, String szName, Processum parentProcess, Map joConf, ExtraHeadCoder extraHeadCoder ) { this( nodeId, szName, parentProcess, null, joConf, extraHeadCoder ); } public WolfMCServer( String szName, Processum parentProcess, Map joConf, ExtraHeadCoder extraHeadCoder ) { this( Messagus.nextLocalId(), szName, parentProcess, null, joConf, extraHeadCoder ); } public WolfMCServer( long nodeId, String szName, Processum parentProcess, UlfMessageNode parent, Map joConf ) { this( nodeId, szName, parentProcess, parent, joConf, null ); } public WolfMCServer( String szName, Processum parentProcess, UlfMessageNode parent, Map joConf ) { this( Messagus.nextLocalId(), szName, parentProcess, parent, joConf, null ); } public WolfMCServer( long nodeId, String szName, Processum parentProcess, Map joConf ) { this( nodeId, szName, parentProcess, null, joConf ); } public WolfMCServer( String szName, Processum parentProcess, Map joConf ) { this( Messagus.nextLocalId(), szName, parentProcess, null, joConf ); } protected WolfMCServer( Builder builder ){ this( builder.nodeId, builder.szName, builder.parentProcess, builder.parent, builder.joConf, builder.extraHeadCoder ); } @Override public WolfMCServer apply( Map conf ) { super.apply( conf ); JSONObject joConf = this.getSectionConf(); this.mConnectionArguments = new ServerConnectionArguments( joConf ); this.mChannelPool = new PassiveRegisterChannelPool<>( this, new UlfIdleFirstBalanceStrategy(), joConf.optInt( "MaximumConnections", (int)1e7 ) ); try{ String szRecipientMsgHandler = joConf.optString( "RecipientMsgHandler" ); if( StringUtils.isEmpty( szRecipientMsgHandler ) ) { this.mRecipientMsgHandler = new UnsetUlfAsyncMsgHandleAdapter( this ) ; } else { this.mRecipientMsgHandler = (UlfAsyncMsgHandleAdapter) DynamicFactory.DefaultFactory.loadInstance( szRecipientMsgHandler, null, null ); } } catch ( ClassNotFoundException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) { throw new ProxyProvokeHandleException( e ); } return this; } @Override public WolfMCServer apply( UlfAsyncMsgHandleAdapter fnRecipientMsgHandler ) { this.mRecipientMsgHandler = fnRecipientMsgHandler; return this; } @Override public UlfServer registerDataArrivedEventHandlers( ChannelEventHandler handler ) throws IllegalStateException { this.checkDeregisterHandlerStatus(); this.mDataArrivedEventHandlers.add( handler ); return this; } @Override public UlfServer deregisterDataArrivedEventHandlers( ChannelEventHandler handler ) throws IllegalStateException { this.checkDeregisterHandlerStatus(); this.mDataArrivedEventHandlers.remove( handler ); return this; } protected void notifyDataArrivedEventHandlers( RecipientChannelControlBlock block, ChannelHandlerContext ctx ) { for( ChannelEventHandler h : this.mDataArrivedEventHandlers ) { h.afterEventTriggered( block, ctx ); } } @Override public int getMaximumConnections() { return this.mChannelPool.getMaximumPoolSize(); } @Override public void close() throws ProvokeHandleException { this.mStateMutex.lock(); try { if( this.mMasterEventGroup != null ) { this.mMasterEventGroup.shutdownGracefully(); this.mMasterEventGroup = null; //this.clear(); //this.mShutdown = true; } if( this.mWorkersEventGroup != null ) { this.mWorkersEventGroup.shutdownGracefully(); } } finally { this.mStateMutex.unlock(); } try { synchronized ( this.mPrimaryThreadJoinMutex ) { WolfMCServer.this.mPrimaryThreadJoinMutex.notify(); } } catch ( IllegalMonitorStateException e ) { throw new ProvokeHandleException( "IllegalMonitorStateException [WolfMCClient::close], this exception has been redirected to parent thread.", e ); } } @Override public void kill() { try{ this.close(); } catch ( ProvokeHandleException e ) { super.kill(); // Kill master thread forcefully. //this.clear(); } } @Override public boolean isShutdown() { if ( this.mMasterEventGroup == null ) { return true; } return this.mMasterEventGroup.isShutdown(); } @Override public boolean isTerminated() { if ( this.mMasterEventGroup == null ) { return true; } return this.mMasterEventGroup.isTerminated(); } protected void handleArrivedMessage(UlfAsyncMsgHandleAdapter handle, Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception { if( this.getErrorMessageAudit().isErrorMessage( msg ) ) { handle.onErrorMsgReceived( medium, block, msg, ctx, msg ); } else { handle.onSuccessfulMsgReceived( medium, block, msg, ctx, msg ); } } protected void initNettySubsystem() throws IOException, UMCServiceException { this.mMasterEventGroup = new NioEventLoopGroup(); this.mWorkersEventGroup = new NioEventLoopGroup(); this.mBootstrap = new ServerBootstrap(); this.mBootstrap.group ( this.mMasterEventGroup , this.mWorkersEventGroup ); this.mBootstrap.channel ( NioServerSocketChannel.class ); this.mBootstrap.option ( ChannelOption.SO_BACKLOG, 1024 ); this.mBootstrap.childHandler( new ChannelInitializer() { @Override protected void initChannel( SocketChannel sc ) throws Exception { sc.pipeline().addLast( new ReadTimeoutHandler( 1000, TimeUnit.SECONDS ) ); sc.pipeline().addLast( new GenericUMCByteMessageDecoder( WolfMCServer.this.getExtraHeadCoder() ) ); sc.pipeline().addLast( new ChannelInboundHandlerAdapter(){ @Override public void channelActive( ChannelHandlerContext ctx ) throws Exception { super.channelActive(ctx); RecipientNettyChannelControlBlock ccb = new RecipientNettyChannelControlBlock( WolfMCServer.this, ctx.channel(), false ); ccb.getChannel().getNativeHandle().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_CONTROL_BLOCK_KEY ) ).set( ccb ); Channel channel = ctx.channel(); SocketAddress remote = channel.remoteAddress(); String ipInfo = "??"; if ( remote instanceof InetSocketAddress ) { InetSocketAddress inet = (InetSocketAddress) remote; String ip = inet.getAddress().getHostAddress(); int port = inet.getPort(); ipInfo = ip + ":" + port; } WolfMCServer.this.getLogger().info( "[MessengerConnected] ", ctx.channel().id(), ipInfo ); ccb.afterConnectionArrive( new AsyncUlfMedium( ctx, null, WolfMCServer.this ), false ); ccb.setThreadAffinity( Thread.currentThread() ); WolfMCServer.this.getTaskManager().add( ccb ); WolfMCServer.this.mChannelPool.setIdleChannel( ccb ); } @Override public void channelRead( ChannelHandlerContext ctx, Object msg ) throws Exception { Medium medium = new AsyncUlfMedium( ctx, (ByteBuf) msg, WolfMCServer.this ); UlfMCReceiver receiver = new UlfMCReceiver( medium ); UMCMessage message = receiver.readMsg(); RecipientNettyChannelControlBlock channelControlBlock = (RecipientNettyChannelControlBlock)ctx.channel().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_CONTROL_BLOCK_KEY ) ).get(); ChannelUtils.setChannelIdentityID( channelControlBlock.getChannel(), message.getHead().getIdentityId() ); if ( !WolfMCServer.this.tryInvokeOrInterceptArrivedData( medium, channelControlBlock, message, ctx, msg ) ) { WolfMCServer.this.handleArrivedMessage( WolfMCServer.this.mRecipientMsgHandler, medium, channelControlBlock, message, ctx, msg ); WolfMCServer.this.notifyDataArrivedEventHandlers( channelControlBlock, ctx ); } medium.release(); medium = new AsyncUlfMedium( ctx, null, WolfMCServer.this ); channelControlBlock.afterConnectionArrive( medium, true ); } @Override public void channelInactive( ChannelHandlerContext ctx ) throws Exception { RecipientNettyChannelControlBlock ccb = (RecipientNettyChannelControlBlock)ctx.channel().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_CONTROL_BLOCK_KEY ) ).get(); if ( !WolfMCServer.this.mChannelInactiveHandlers.isEmpty() ) { boolean bBlocked = false; for ( ChannelInactiveHandler handler : WolfMCServer.this.mChannelInactiveHandlers ) { if ( handler.afterChannelInactive( ccb, ctx ) ) { bBlocked = true; } } if ( bBlocked ) { return; } } WolfMCServer.this.mChannelPool.deactivate( ccb ); WolfMCServer.this.getMajorIOLock().lock(); try{ WolfMCServer.this.getTaskManager().erase( ccb ); } finally { WolfMCServer.this.getMajorIOLock().unlock(); } WolfMCServer.this.getLogger().info( "[MessengerDetached] ", ctx.channel().id() ); } @Override public void exceptionCaught( ChannelHandlerContext ctx, Throwable cause ) throws Exception { WolfMCServer.this.mRecipientMsgHandler.onError( ctx, cause ); } } ); } @Override public void exceptionCaught( ChannelHandlerContext ctx, Throwable cause ) throws Exception { WolfMCServer.this.mRecipientMsgHandler.onError( ctx, cause ); } }); String szHost = this.getConnectionArguments().getHost(); short nPort = this.getConnectionArguments().getPort(); if( StringUtils.isEmpty( szHost ) ) { this.mPrimaryBindAddress = new InetSocketAddress( nPort ); } else { this.mPrimaryBindAddress = new InetSocketAddress( szHost, nPort ); } this.mPrimaryBindFuture = this.mBootstrap.bind( this.mPrimaryBindAddress ); this.mPrimaryBindFuture.addListener( new ChannelFutureListener() { @Override public void operationComplete( ChannelFuture channelFuture ) throws Exception { synchronized ( WolfMCServer.this.mPrimaryThreadJoinMutex ) { // if ( WolfMCServer.this.isShutdown() ) { // WolfMCServer.this.mShutdown = !channelFuture.isSuccess(); // } WolfMCServer.this.mPrimaryThreadJoinMutex.notify(); } } } ); synchronized ( this.mPrimaryThreadJoinMutex ) { try { this.mPrimaryThreadJoinMutex.wait( this.getConnectionArguments().getSocketTimeout() ); if( this.isShutdown() ) { throw new BindException( String.format( "%s [Serve], binding `%s` compromised.", this.className(), this.mPrimaryBindAddress.toString() ) ); } } catch ( InterruptedException e ) { Thread.currentThread().interrupt(); throw new WolfMCInitializationException( e ); } } /*try { this.mPrimaryBindFuture.channel().closeFuture().sync(); } catch ( InterruptedException e ) { throw new RuntimeException(e); }*/ } public void serve() throws UMCServiceException { this.mStateMutex.lock(); try{ if( this.isShutdown() ) { try { this.initNettySubsystem(); // Exception thrown and truncating next detach-mutex-release, redirecting to primary thread. } catch ( IOException e ) { throw new WolfMCInitializationException( e ); } } } finally { this.mStateMutex.unlock(); WolfMCServer.this.unlockOuterThreadDetachMutex(); // This lock shouldn`t be released in `finally`, waiting for primary thread to process. } synchronized ( this.mPrimaryThreadJoinMutex ) { try { this.mPrimaryThreadJoinMutex.wait(); // Join the primary thread. } catch ( InterruptedException e ) { Thread.currentThread().interrupt(); } } } @Override public void execute() throws UMCServiceException { if ( !this.isShutdown() ) { this.mLogger.info( "WolfMCServer [{}:{}] is already started. ", this.getName(), this.hashCode() ); return; } Exception[] lastException = new Exception[] { null }; Thread primaryThread = new Thread( new Runnable() { @Override public void run() { WolfMCServer.this.getTaskManager().notifyExecuting( WolfMCServer.this ); try{ WolfMCServer.this.serve(); } catch ( Exception e ) { lastException[0] = e; WolfMCServer.this.kill(); } finally { WolfMCServer.this.getTaskManager().notifyFinished( WolfMCServer.this ); WolfMCServer.this.unlockOuterThreadDetachMutex(); } } }); this.preparePrimaryThread( primaryThread ); primaryThread.start(); this.joinOuterThread(); if( !this.isShutdown() ) { this.infoLifecycle( String.format( "Wolf<\uD83D\uDC3A>::BindServer(%s)", this.mPrimaryBindAddress.toString() ), "Ready" ); } try { this.redirectException2ParentThread( lastException[0] ); } catch ( IOException e ) { throw new WolfMCInitializationException( e ); } } protected Lock getSynRequestLock() { return this.mSynRequestLock; } @Override public ServerConnectArguments getConnectionArguments() { return this.mConnectionArguments; } @Override public ServerConnectArguments getMessageNodeConfig() { return this.getConnectionArguments(); } @Override public ChannelPool getChannelPool() { return null; } public static class Builder { private long nodeId = -1; private String szName; private Processum parentProcess; private UlfMessageNode parent; private Map joConf; private ExtraHeadCoder extraHeadCoder; public Builder setNodeId( long nodeId ) { this.nodeId = nodeId; return this; } public Builder setName( String szName ) { this.szName = szName; return this; } public Builder setParentProcess( Processum parentProcess ) { this.parentProcess = parentProcess; return this; } public Builder setParent( UlfMessageNode parent ) { this.parent = parent; return this; } public Builder setJoConf( Map joConf ) { this.joConf = joConf; return this; } public Builder setExtraHeadCoder( ExtraHeadCoder extraHeadCoder ) { this.extraHeadCoder = extraHeadCoder; return this; } public WolfMCServer build() { this.validate(); return new WolfMCServer( this ); } private void validate() { if ( this.szName == null || this.szName.isEmpty() ) { long nId = this.nodeId; if ( nId == -1 ) { nId = System.nanoTime(); } this.szName = WolfMCServer.class.getSimpleName() + "_" + nId; } if ( this.joConf == null ) { throw new IllegalArgumentException( "Configuration (Conf) cannot be null" ); } } } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/AddressMapping.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.hydra.umc.msg.UMCMethod; import java.lang.annotation.*; @Target({ElementType.METHOD, ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface AddressMapping { String name() default ""; String[] value() default {}; boolean relative() default true; // Only for methods. UMCMethod[] method() default {}; boolean selfMappable() default true; // True for using self-method name if value is not given. } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/ArchMessagelet.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.hydra.umc.msg.UMCReceiver; import com.pinecone.hydra.umc.msg.UMCTransmit; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.JSONMaptron; import java.io.IOException; import java.util.Map; public abstract class ArchMessagelet implements Messagelet { protected Map mConfig; protected ArchMessagram mMessagelet; protected UMCConnection mMsgPackage; protected UMCTransmit mUMCTransmit; protected UMCReceiver mUMCReceiver; public ArchMessagelet( UMCConnection msgPackage, ArchMessagram servtron ) { this.mMsgPackage = msgPackage; this.mMessagelet = servtron; this.mConfig = new JSONMaptron(); //TODO } protected abstract Map $_MSG(); @Override public MessageDeliver getMessageDeliver() { return this.getMessagePackage().getDeliver(); } @Override public UMCConnection getMessagePackage() { return this.mMsgPackage; } @Override public UMCTransmit getTransmit(){ return this.mUMCTransmit; } @Override public UMCReceiver getReceiver(){ return this.mUMCReceiver; } @Override public ArchMessagram getMessagelet() { return this.mMessagelet; } protected void sendDefaultConfirmResponse() throws IOException { JSONObject jo = new JSONMaptron(); jo.put( "Messagelet", "ReceiveConfirm" ); this.getTransmit().sendInformMsg( jo ); } @Override public Map getConfig() { return this.mConfig; } @Override public abstract void dispatch(); @Override public String serviceName() { return this.className(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/ArchMessagram.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.framework.unit.LinkedTreeMap; import com.pinecone.hydra.servgram.ArchServgramium; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.framework.system.executum.Processum; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.Map; public abstract class ArchMessagram extends ArchServgramium implements Messagram { public static final String DefaultServiceKey = "__NAME__"; protected Map mExpresses; protected Map mProtoConfig; public ArchMessagram( String szName, Processum parent, Map config ) { super( szName, parent ); this.mExpresses = new LinkedTreeMap<>(); this.mProtoConfig = config; } @Override public Map getProtoConfig() { return this.mProtoConfig; } @Override public Messagram addExpress( MessageExpress express ) { this.mExpresses.put( express.getName(), express ); return this; } @Override public MessageExpress getExpressByName( String name ) { return this.mExpresses.get( name ); } @Override public Messagram removeExpress( String name ) { this.mExpresses.remove( name ); return this; } public abstract String getLetsNamespace() ; protected Messagelet contriveByClassName( String szClassName, UMCConnection UMCConnection) { Messagelet obj = null; try { Class pVoid = Class.forName( szClassName ); try{ Constructor constructor = pVoid.getConstructor( UMCConnection.class, ArchMessagram.class ); obj = (Messagelet) constructor.newInstance(UMCConnection, this ); } catch (NoSuchMethodException | InvocationTargetException e1){ e1.printStackTrace(); } } catch ( ClassNotFoundException | IllegalAccessException | InstantiationException e ){ return null; } return obj; } public Messagelet contriveByScheme( String szSchemeName, UMCConnection UMCConnection) throws IllegalArgumentException { String szClassName = this.getLetsNamespace() + szSchemeName ; Messagelet obj = this.contriveByClassName( szClassName, UMCConnection); if( obj == null ){ throw new IllegalArgumentException( "[Messagelet] Fantasy scheme with no crew member." ); } return obj; } @Override public Hydrogen parentSystem() { return (Hydrogen) super.parentSystem(); } @Override public void execute() { this.infoLifecycle( "toSeek", "Can do !" ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/ArchMsgDeliver.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.framework.unit.trie.TrieSegmentor; import com.pinecone.framework.unit.trie.UniTrieMaptron; import com.pinecone.framework.util.Bytes; import com.pinecone.framework.util.StringUtils; import com.pinecone.hydra.express.Package; import com.pinecone.hydra.umc.msg.Status; import com.pinecone.hydra.umc.msg.UMCHead; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umct.decipher.HeaderDecipher; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Supplier; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public abstract class ArchMsgDeliver implements MessageDeliver { protected String mszName; protected MessageExpress mExpress; protected MessageJunction mJunction; protected Map mRoutingTable; protected HeaderDecipher mHeaderDecipher; protected String mszServicePathKey; protected Logger mLogger; public ArchMsgDeliver( String szName, MessageExpress express, HeaderDecipher headerDecipher, String szServicePathKey, Supplier> routingTableSupplier ) { this.mszName = szName; this.mExpress = express; this.mJunction = this.mExpress.getJunction(); this.mHeaderDecipher = headerDecipher; this.mszServicePathKey = szServicePathKey; this.mLogger = LoggerFactory.getLogger( this.getClass() ); this.mRoutingTable = routingTableSupplier.get(); } public ArchMsgDeliver( String szName, MessageExpress express, HeaderDecipher headerDecipher, String szServicePathKey ) { this( szName, express, headerDecipher, szServicePathKey, HashMap::new ); } public ArchMsgDeliver( String szName, MessageExpress express, HeaderDecipher headerDecipher, String szServicePathKey, boolean bUsingTrie ) { this( szName, express, headerDecipher, szServicePathKey, () -> { if ( bUsingTrie ) { return new UniTrieMaptron<>( HashMap::new, new TrieSegmentor() { @Override public String[] segments( String szPathKey ) { return szPathKey.split( "\\.|\\/" ); } @Override public String getSeparator() { return StringUtils.FOLDER_SEPARATOR; } }); } else { return new HashMap<>(); } }); } @Override public String getServiceKeyword() { return this.mszServicePathKey; } @Override public String getName() { return this.mszName; } @Override public MessageExpress getExpress() { return this.mExpress; } public MessageJunction getJunction(){ return this.mJunction; } @Override public Map getRoutingTable() { return this.mRoutingTable; } @Override public void registerHandler( String addr, MessageHandler controller ){ this.mRoutingTable.put( addr, controller ); } protected UMCConnection wrap( Package that ) { return (UMCConnection) that; } protected abstract void prepareDispatch( Package that ) throws IOException; protected abstract boolean sift( Package that ); protected boolean isMyJob( Package that, String szServiceKey ) { return szServiceKey != null; } protected UMCMessage processResponse( UMCMessage request, UMCMessage response ) { MessageExpress me = this.getExpress(); try{ UMCTExpress ue = (UMCTExpress) me; return ue.processResponse( request, response ); } catch ( ClassCastException e ) { return response; } } protected void messageDispatch( Package that ) throws IOException, ServiceException { boolean bDenialService = false; try{ UMCConnection connection = this.wrap( that ); UMCMessage request = connection.getMessage(); if ( request.getHead().getStatus() != Status.OK ) { throw new ServiceInternalException( "Error response." ); } if( this.sift( that ) ) { connection.getTransmit().sendInformMsg( Bytes.Empty, Status.IllegalMessage ); return; } UMCHead head = request.getHead(); Object exHead = head.getExtraHead(); String szAddr = this.mHeaderDecipher.getServicePath( exHead ); if( szAddr == null ) { this.mHeaderDecipher.sendIllegalMessage( connection ); return; } MessageHandler controller = this.mRoutingTable.get( szAddr ); if( controller != null ) { connection.entrust( this ); Object[] args; if( controller.isArgsIndexed() ) { args = this.mHeaderDecipher.values( exHead, controller.getArgumentsDescriptor(), controller.getArgumentTemplate() ).toArray(); } else { List keys = controller.getArgumentsKey(); args = this.mHeaderDecipher.evals( exHead, controller.getArgumentsDescriptor(), keys, controller.getArgumentTemplate() ); } try { Object ret = controller.invoke( args ); UMCMessage response = this.mHeaderDecipher.assembleReturnMsg( ret, controller.getReturnDescriptor() ); connection.getTransmit().sendMsg( this.processResponse( request, response ) ); } catch ( Exception e ) { this.mLogger.warn( "MessageDeliver has handled an invocation exception, what => ", e ); this.mHeaderDecipher.sendInternalError( connection ); } } else { if ( this.mJunction != null ) { this.doMessagelet( szAddr, that ); } bDenialService = true; } } catch ( RuntimeException e ) { throw new ServiceInternalException( e ); } if ( bDenialService ) { throw new DenialServiceException( "It's none of my business." ); } } protected abstract void doMessagelet( String szMessagelet, Package that ) ; @Override public void toDispatch( Package that ) throws IOException, ServiceException { this.prepareDispatch( that ); this.messageDispatch( that ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/ArchMsgExpress.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.hydra.system.component.Slf4jTraceable; import com.pinecone.hydra.express.Deliver; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.LinkedHashMap; import java.util.Map; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.locks.ReadWriteLock; /** * Pinecone Ursus For Java MessageExpress [Archetype] * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * MessageExpress vs Messenger * Messenger is the abstract producer of domestic. * Messenger is the message reproducer, who import commodities(Messages) from abroad (inbound, the real producer). * * MessageExpress is the deliver center, that deliver the package(Message) through deliver man to the consumer(e.g. Messagelet). * [Server -> send message -> (out)] -> [(in) -> Messenger -> Express -> Deliver -> Messagelet ] * ***************************************************************************************** */ public abstract class ArchMsgExpress implements MessageExpress, Slf4jTraceable { protected String mszName ; protected MessageJunction mJunction ; protected Logger mLogger ; protected Map mDeliverPool = new LinkedHashMap<>(); protected ReadWriteLock mPoolLock = new ReentrantReadWriteLock(); public ArchMsgExpress( String name, MessageJunction junction, Logger logger ) { this.mszName = name; this.mLogger = logger; this.mJunction = junction; if( this.mszName == null ){ this.mszName = this.className(); } } public ArchMsgExpress( String name, MessageJunction junction ) { this( name, junction, null ); this.mLogger = LoggerFactory.getLogger( this.className() ); } public ArchMsgExpress( ArchMessagram messagram ) { this( null, messagram ); } @Override public String getName() { return this.mszName; } @Override public MessageJunction getJunction() { return this.mJunction; } @Override public Logger getLogger() { return this.mLogger; } protected ReadWriteLock getPoolLock() { return this.mPoolLock; } protected Map getDeliverPool() { return this.mDeliverPool; } protected MessageDeliver wrap( Deliver deliver ) { return (MessageDeliver) deliver; } protected abstract MessageDeliver spawn( String szName ); @Override public MessageDeliver recruit ( String szName ) { if( this.getDeliverPool().containsKey( szName ) ) { return this.getDeliverPool().get( szName ); } MessageDeliver deliver = this.spawn( szName ); this.register( deliver ); return deliver; } @Override public ArchMsgExpress register ( Deliver deliver ) { this.getDeliverPool().put( deliver.getName(), this.wrap( deliver ) ); return this; } @Override public ArchMsgExpress fired ( Deliver deliver ) { this.getDeliverPool().remove( deliver.getName(), this.wrap( deliver ) ); return this; } @Override public MessageDeliver getDeliver ( String szName ) { return this.getDeliverPool().get( szName ); } public ArchMsgExpress syncRegister( Deliver deliver ) { this.getPoolLock().writeLock().lock(); try{ this.register( deliver ); } finally { this.getPoolLock().writeLock().unlock(); } return this; } public ArchMsgExpress syncFired ( Deliver deliver ) { this.getPoolLock().writeLock().lock(); try{ this.fired( deliver ); } finally { this.getPoolLock().writeLock().unlock(); } return this; } @Override public boolean hasOwnDeliver( Deliver deliver ) { return this.hasOwnDeliver( deliver.getName() ); } @Override public boolean hasOwnDeliver( String deliverName ) { return this.getDeliverPool().containsKey( deliverName ); } @Override public int size() { return this.getDeliverPool().size(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/ArchUMCConnection.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.UMCReceiver; import com.pinecone.hydra.umc.msg.UMCTransmit; import com.pinecone.hydra.express.Deliver; public abstract class ArchUMCConnection implements UMCConnection { protected MessageDeliver mDeliver; protected Medium mMessageSource; protected UMCMessage mUMCMessage; protected UMCTransmit mUMCTransmit; protected UMCReceiver mUMCReceiver; public ArchUMCConnection( Medium medium, UMCMessage message, UMCTransmit transmit, UMCReceiver receiver ) { this.mMessageSource = medium; this.mUMCMessage = message; this.mUMCTransmit = transmit; this.mUMCReceiver = receiver; } @Override public MessageDeliver getDeliver() { return this.mDeliver; } @Override public UMCMessage getMessage() { return this.mUMCMessage; } @Override public UMCTransmit getTransmit() { return this.mUMCTransmit; } @Override public UMCReceiver getReceiver() { return this.mUMCReceiver; } @Override public Medium getMessageSource() { return this.mMessageSource; } @Override public ArchUMCConnection entrust( Deliver deliver ) { this.mDeliver = (MessageDeliver)deliver; return this; } @Override public void release() { this.mMessageSource.release(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/DenialServiceException.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.framework.system.prototype.Pinenut; public class DenialServiceException extends ServiceException implements Pinenut { public DenialServiceException() { super(); } public DenialServiceException( String message ) { super(message); } public DenialServiceException( String message, Throwable cause ) { super(message, cause); } public DenialServiceException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/DuplexExpress.java ================================================ package com.pinecone.hydra.umct; import java.io.IOException; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.ChannelPool; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter; import com.pinecone.hydra.uma.AsynMsgHandler; public interface DuplexExpress extends UMCTExpress { void afterChannelInactive( ChannelControlBlock controlBlock ) ; ChannelPool getPoolByClientId( long clientId ) ; void sendAsynMsg( long clientId, UMCMessage request, boolean bNoneBuffered, UlfAsyncMsgHandleAdapter handler ) throws IOException; void sendAsynMsg( long clientId, UMCMessage request, boolean bNoneBuffered, AsynMsgHandler handler ) throws IOException; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/GenericMessagramScanner.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.framework.util.lang.ClassScope; import com.pinecone.framework.util.lang.ClassScopeNSProtocolIteratorsFactory; import com.pinecone.framework.util.lang.NSProtocolIteratorsFactoryAdapter; import com.pinecone.ulf.util.lang.GenericPreloadClassInspector; import com.pinecone.ulf.util.lang.HierarchyClassInspector; import com.pinecone.ulf.util.lang.PooledClassCandidateScanner; import javassist.ClassPool; public class GenericMessagramScanner extends PooledClassCandidateScanner implements MessagramScanner { protected HierarchyClassInspector mClassInspector ; public GenericMessagramScanner ( ClassScope searchScope, ClassLoader classLoader, NSProtocolIteratorsFactoryAdapter iteratorsFactory, ClassPool classPool ) { super( searchScope, classLoader, iteratorsFactory, classPool ); this.mClassInspector = new GenericPreloadClassInspector( this.mClassPool ); } public GenericMessagramScanner ( ClassScope searchScope, ClassLoader classLoader, ClassPool classPool ) { this( searchScope, classLoader, new ClassScopeNSProtocolIteratorsFactory( classLoader, searchScope ), classPool ); } public GenericMessagramScanner ( ClassScope searchScope, ClassLoader classLoader ) { this( searchScope, classLoader, new ClassScopeNSProtocolIteratorsFactory( classLoader, searchScope ), ClassPool.getDefault() ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/IlleagalResponseException.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.framework.system.prototype.Pinenut; public class IlleagalResponseException extends ServiceException implements Pinenut { public IlleagalResponseException() { super(); } public IlleagalResponseException( String message ) { super(message); } public IlleagalResponseException( String message, Throwable cause ) { super(message, cause); } public IlleagalResponseException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/IntegratedMessagram.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.framework.system.ProvokeHandleException; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.framework.util.lang.GenericDynamicFactory; import java.util.Map; public class IntegratedMessagram extends ArchMessagram implements Messagram { protected String mszExpressFactory; protected Map mExpressesConf; protected DynamicFactory mExpressFactory; public IntegratedMessagram(String szName, Processum parent, Map config ) { super( szName, parent, config ); this.prepareConfig(); this.prepareExpresses(); } @SuppressWarnings( "unchecked" ) protected void prepareConfig() { this.mszExpressFactory = (String) this.getProtoConfig().get( "ExpressFactory" ); this.mExpressesConf = (Map) this.getProtoConfig().get( "Expresses" ); } protected void prepareExpresses() { if( StringUtils.isEmpty( this.mszExpressFactory ) ) { this.mExpressFactory = new GenericDynamicFactory( this.parentSystem().getTaskManager().getClassLoader() ); } else { this.mExpressFactory = (DynamicFactory) DynamicFactory.DefaultFactory.optLoadInstance( this.mszExpressFactory, null, new Object[] { this.parentSystem().getTaskManager().getClassLoader() } ); } try{ if( this.mExpressesConf != null ) { for( Map.Entry kv : this.mExpressesConf.entrySet() ) { Object v = kv.getValue(); Map map = (Map) v; String szEngine = (String) map.get( "Engine" ); Object node = this.mExpressFactory.loadInstance( szEngine, null, new Object[] { kv.getKey(), this } ); if( node instanceof MessageExpress ){ MessageExpress express = (MessageExpress) node; this.mExpresses.put( express.getName(), express ); } else { throw new IllegalArgumentException( "Illegal message express engine, should be `MessageExpress`: " + szEngine ); } } } } catch ( Exception e ) { throw new ProvokeHandleException( e ); } } @Override public Map getExpressesConfig() { return this.mExpressesConf; } @Override public String getLetsNamespace() { return ""; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/InvokeEntity.java ================================================ package com.pinecone.hydra.umct; import java.util.List; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.system.prototype.Pinenut; public interface InvokeEntity extends Pinenut { String getAddress(); String getEntityName(); /** * Invocation Path | 调用实体路径 * Address + EntityName * e.g. Package + MethodName => com.dragonking.method * e.g. Namespace + FunctionName => std::printf * e.g. Mapping + TargetName => /admin/audit/methodName */ String getInvocationPath(); List > getParameters(); /** * Invoked Interface (defaulted, [::function], anonymous global namespace) * Usually a class, struct, namespace or interface */ Object getInvokeIface(); /** * Invoked Entity * Usually a method, function or apis */ Object getInvokeEntity(); Object invoke( Object... args ) throws Exception; default Object call( Object... args ) { try{ return this.invoke( args ); } catch ( Exception e ) { throw new ProxyProvokeHandleException( e ); } } void execute( Object... args ) throws Exception; default void enforce( Object... args ) { try{ this.execute( args ); } catch ( Exception e ) { throw new ProxyProvokeHandleException( e ); } } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/JSONLetMsgDeliver.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.hydra.express.Package; import com.pinecone.hydra.umct.decipher.JSONHeaderDecipher; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.function.Supplier; public class JSONLetMsgDeliver extends ArchMsgDeliver { public JSONLetMsgDeliver( String name, MessageExpress express ) { this( name, express, ArchMessagram.DefaultServiceKey ); } public JSONLetMsgDeliver( String name, MessageExpress express, String szServiceKey, Supplier> routingTableSupplier ) { super( name, express, new JSONHeaderDecipher( szServiceKey ), szServiceKey, routingTableSupplier ); } public JSONLetMsgDeliver( String name, MessageExpress express, String szServiceKey ) { this( name, express, szServiceKey, HashMap::new ); } public JSONLetMsgDeliver( MessageExpress express ) { this( ProtoletMsgDeliver.class.getSimpleName(), express ); } @Override protected void prepareDispatch( Package that ) throws IOException { } @Override protected boolean sift( Package that ) { return false; } @Override protected void doMessagelet( String szMessagelet, Package that ) { if ( this.getJunction() instanceof ArchMessagram ) { ( (ArchMessagram)this.getJunction() ).contriveByScheme( szMessagelet, (UMCConnection) that ).dispatch(); } } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/MessageDeliver.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.hydra.express.Deliver; import com.pinecone.hydra.express.Package; import java.io.IOException; import java.util.Map; public interface MessageDeliver extends Deliver { MessageExpress getExpress(); void toDispatch( Package that ) throws IOException, ServiceException; String getServiceKeyword(); Map getRoutingTable(); void registerHandler( String addr, MessageHandler controller ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/MessageExpress.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.hydra.express.Deliver; import com.pinecone.hydra.express.Express; /** * Pinecone Ursus For Java UMCT Message Express [ Uniform Message Control Transmit ] * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ********************************************************** * Uniform Message Control Transmit Integrated Model - Express * 统一消息控制与数据传输一体化模型 - 总线分发调度器 * ********************************************************** */ public interface MessageExpress extends Express { String getName(); MessageJunction getJunction(); MessageDeliver recruit ( String szName ); MessageExpress register ( Deliver deliver ); MessageExpress fired ( Deliver deliver ); MessageDeliver getDeliver ( String szName ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/MessageHandler.java ================================================ package com.pinecone.hydra.umct; import java.util.List; import com.pinecone.framework.lang.field.FieldEntity; import com.pinecone.framework.system.functions.Function; public interface MessageHandler extends Function { String getAddressMapping(); @Override Object invoke( Object... args ) throws Exception; List getArgumentsKey(); default boolean isArgsIndexed() { return this.getArgumentsKey() == null; } Object getReturnDescriptor(); String getReturnGenericLabel(); Object getArgumentsDescriptor(); FieldEntity[] getArgumentTemplate(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/MessageJunction.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.hydra.servgram.Servgram; public interface MessageJunction extends Servgram { } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/Messagelet.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.UMCReceiver; import com.pinecone.hydra.umc.msg.UMCTransmit; import com.pinecone.framework.system.prototype.Pinenut; import java.util.Map; public interface Messagelet extends Pinenut { UMCMessage getReceivedMessage(); UMCTransmit getTransmit(); UMCReceiver getReceiver(); MessageDeliver getMessageDeliver(); UMCConnection getMessagePackage(); ArchMessagram getMessagelet(); Map getConfig(); Object getLetLocal(); void dispatch(); void terminate(); String serviceName(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/Messagram.java ================================================ package com.pinecone.hydra.umct; import java.util.Map; public interface Messagram extends MessageJunction { Messagram addExpress( MessageExpress express ); MessageExpress getExpressByName( String name ); Messagram removeExpress( String name ); Map getProtoConfig(); Map getExpressesConfig(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/MessagramScanner.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.framework.util.lang.ClassScanner; public interface MessagramScanner extends ClassScanner { } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/ProtoletMsgDeliver.java ================================================ package com.pinecone.hydra.umct; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.function.Supplier; import com.pinecone.hydra.express.Package; import com.pinecone.hydra.umct.decipher.PrototypeDecipher; import com.pinecone.hydra.umct.husky.compiler.CompilerEncoder; import com.pinecone.hydra.umct.husky.machinery.PMCTContextMachinery; public class ProtoletMsgDeliver extends ArchMsgDeliver { protected CompilerEncoder mCompilerEncoder; protected PMCTContextMachinery mPMCTContextMachinery; public ProtoletMsgDeliver( String name, MessageExpress express, PMCTContextMachinery machinery, CompilerEncoder encoder ) { this( name, express, ArchMessagram.DefaultServiceKey, machinery, encoder ); } public ProtoletMsgDeliver( String name, MessageExpress express, String szServiceKey, PMCTContextMachinery machinery, CompilerEncoder encoder, Supplier> routingTableSupplier ) { super( name, express, new PrototypeDecipher( szServiceKey, encoder, machinery.getFieldProtobufDecoder() ), szServiceKey, routingTableSupplier ); this.mCompilerEncoder = encoder; this.mPMCTContextMachinery = machinery; } public ProtoletMsgDeliver( String name, MessageExpress express, String szServiceKey, PMCTContextMachinery machinery, CompilerEncoder encoder ) { this( name, express, szServiceKey, machinery, encoder, HashMap::new ); } public ProtoletMsgDeliver( MessageExpress express, PMCTContextMachinery machinery, CompilerEncoder encoder ) { this( ProtoletMsgDeliver.class.getSimpleName(), express, machinery, encoder ); } @Override protected void prepareDispatch( Package that ) throws IOException { } @Override protected boolean sift( Package that ) { return false; } @Override protected void doMessagelet( String szMessagelet, Package that ) { if ( this.getJunction() instanceof ArchMessagram ) { ( (ArchMessagram)this.getJunction() ).contriveByScheme( szMessagelet, (UMCConnection) that ).dispatch(); } } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/ServiceException.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.framework.system.prototype.Pinenut; import java.io.IOException; public class ServiceException extends IOException implements Pinenut { public ServiceException() { super(); } public ServiceException( String message ) { super(message); } public ServiceException( String message, Throwable cause ) { super(message, cause); } public ServiceException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/ServiceInternalException.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.framework.system.prototype.Pinenut; public class ServiceInternalException extends ServiceException implements Pinenut { public ServiceInternalException() { super(); } public ServiceInternalException( String message ) { super(message); } public ServiceInternalException( String message, Throwable cause ) { super(message, cause); } public ServiceInternalException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/UMCConnection.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.UMCReceiver; import com.pinecone.hydra.umc.msg.UMCTransmit; import com.pinecone.hydra.express.Deliver; import com.pinecone.hydra.express.Package; public interface UMCConnection extends Package { MessageDeliver getDeliver(); UMCMessage getMessage(); UMCTransmit getTransmit(); UMCReceiver getReceiver(); Medium getMessageSource(); @Override default String getConsignee() { Object e = this.getMessage().getHead().getExHeaderVal( this.getDeliver().getServiceKeyword() ); if( e instanceof String ) { return (String) e; } return e.toString(); } UMCConnection entrust(Deliver deliver ); void release(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/UMCTExpress.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.hydra.umc.msg.UMCMessage; public interface UMCTExpress extends MessageExpress, UMCTExpressHandler { UMCMessage processResponse( UMCMessage request, UMCMessage response ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/UMCTExpressHandler.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.UMCReceiver; import com.pinecone.hydra.umc.msg.UMCTransmit; public interface UMCTExpressHandler extends Pinenut { default void onSuccessfulMsgReceived ( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception { } default void onErrorMsgReceived ( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception { } default void onError ( Object data, Throwable cause ) { } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/UMCTNode.java ================================================ package com.pinecone.hydra.umct; public interface UMCTNode extends MessageJunction { } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/UlfConnection.java ================================================ package com.pinecone.hydra.umct; import com.pinecone.hydra.umc.msg.*; public class UlfConnection extends ArchUMCConnection { protected Object[] mArguments; UlfConnection( Medium medium, UMCMessage message, UMCTransmit transmit, UMCReceiver receiver ) { super( medium, message, transmit, receiver ); } public UlfConnection( Medium medium, UMCMessage message, UMCTransmit transmit, UMCReceiver receiver, Object[] args ) { this( medium, message, transmit, receiver ); this.mArguments = args; } public Object[] getExArguments() { return this.mArguments; } @Override public void release() { super.release(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/UlfMessageHandler.java ================================================ package com.pinecone.hydra.umct; import java.util.List; import com.pinecone.framework.lang.field.FieldEntity; public class UlfMessageHandler implements MessageHandler { @Override public String getAddressMapping() { return null; } @Override public Object invoke( Object... args ) throws Exception { return null; } @Override public List getArgumentsKey() { return null; } @Override public Object getReturnDescriptor() { return null; } @Override public String getReturnGenericLabel() { return null; } @Override public Object getArgumentsDescriptor() { return null; } @Override public FieldEntity[] getArgumentTemplate() { return new FieldEntity[0]; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/WolfMCExpress.java ================================================ package com.pinecone.hydra.umct; import java.util.Map; import org.slf4j.Logger; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.framework.system.ProvokeHandleException; import com.pinecone.hydra.umc.msg.UMCReceiver; import com.pinecone.hydra.umc.msg.UMCTransmit; import com.pinecone.hydra.umct.husky.HuskyServiceErrorMessages; /** * Pinecone Ursus For Java Hydra Ulfar, Wolf Express * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. */ public class WolfMCExpress extends ArchMsgExpress implements UMCTExpress { public WolfMCExpress( String name, MessageJunction messagram, Logger logger ) { super( name, messagram, logger ); } public WolfMCExpress( String name, MessageJunction messagram ) { super( name, messagram ); } public WolfMCExpress( MessageJunction messagram ) { this( null, messagram ); } @Override protected MessageDeliver spawn( String szName ) { // TODO if( szName.equals( "Messagelet" ) ) { return new JSONLetMsgDeliver( this ); } return null; } @Override public UMCMessage processResponse( UMCMessage request, UMCMessage response ) { return response; } @Override public void onSuccessfulMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception { UlfConnection connection = new UlfConnection( medium, msg, transmit, receiver, args ); this.onSuccessfulMsgReceived( connection, args ); } protected void onSuccessfulMsgReceived( UMCConnection connection, Object[] args ) throws Exception { int c = 0; for( Map.Entry kv : this.mDeliverPool.entrySet() ) { try{ MessageDeliver deliver = kv.getValue(); deliver.toDispatch( connection ); } catch ( DenialServiceException e ) { // Just continue. // 你不干有的是人干. ++c; } } if( c == this.mDeliverPool.size() ) { connection.getTransmit().sendMsg( HuskyServiceErrorMessages.HCTP_MAPPING_NOT_FOUND ); } connection.release(); } @Override public void onErrorMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception { } @Override public void onError( Object ctx, Throwable cause ) { if( cause instanceof Exception ) { this.getLogger().error( "Express error, {}, {}" , cause.getMessage(), cause.toString(), cause ); } else { throw new ProvokeHandleException( cause ); } } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/bind/ArgParam.java ================================================ package com.pinecone.hydra.umct.bind; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.springframework.core.annotation.AliasFor; @Target({ElementType.PARAMETER}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface ArgParam { @AliasFor("name") String value() default ""; @AliasFor("value") String name() default ""; boolean required() default true; String defaultValue() default "\n\t\t\n\t\t\n\ue000\ue001\ue002\n\t\t\t\t\n"; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/decipher/HeaderDecipher.java ================================================ package com.pinecone.hydra.umct.decipher; import java.io.IOException; import java.util.Collection; import java.util.List; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umct.UMCConnection; import com.pinecone.hydra.umct.husky.HuskyServiceErrorMessages; public interface HeaderDecipher extends Pinenut { Object eval( Object that, @Nullable Object descriptor, String key ); default String evalString( Object that, @Nullable Object descriptor, String key ) { return this.eval( that, descriptor, key ).toString(); } default Number evalNumber( Object that, @Nullable Object descriptor, String key ) { Object o = this.eval( that, descriptor, key ); if( o instanceof Number ) { return (Number) o; } return null; } Collection values( Object that, @Nullable Object descriptor, @Nullable Object argTpl ); Object[] evals( Object that, @Nullable Object descriptor, List keys, @Nullable Object argTpl ); String getServicePath( Object that ); default void sendIllegalMessage( UMCConnection connection ) throws IOException { connection.getTransmit().sendMsg( HuskyServiceErrorMessages.HCTP_ILLEGAL_MESSAGE ); } default void sendInternalError( UMCConnection connection ) throws IOException { connection.getTransmit().sendMsg( HuskyServiceErrorMessages.HCTP_INTERNAL_ERROR ); } UMCMessage assembleReturnMsg( Object that, Object descriptor ) ; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/decipher/JSONHeaderDecipher.java ================================================ package com.pinecone.hydra.umct.decipher; import java.util.Collection; import java.util.List; import java.util.Map; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.umc.msg.ExtraEncode; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.wolf.UlfInformMessage; public class JSONHeaderDecipher implements HeaderDecipher { protected String mszServicePathKey; public JSONHeaderDecipher( String szServicePathKey ) { this.mszServicePathKey = szServicePathKey; } @Override public String getServicePath( Object that ) { return this.evalString( that, null, this.mszServicePathKey ); } @Override public Object eval( Object that, Object descriptor, String key ) { return ( (Map) that ).get( key ); } @Override @SuppressWarnings("unchecked") public Collection values( Object that, Object descriptor, Object argTpl ) { return ( (Map) that ).values(); } @Override public Object[] evals( Object that, Object descriptor, List keys, Object argTpl ) { Map map = (Map) that; Object[] ret = new Object[ keys.size() ]; int i = 0; for( String k : keys ) { ret[ i ] = map.get( k ); ++i; } return ret; } @Override public UMCMessage assembleReturnMsg( Object that, Object descriptor ) { if ( that instanceof UMCMessage ) { return (UMCMessage) that; } if ( that == null ) { return new UlfInformMessage( null, ExtraEncode.JSONString ); } JSONObject jo = new JSONMaptron(); jo.put( "__RESPONSE__", that ); return new UlfInformMessage( jo ); // TODO, Transfer } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/decipher/PrototypeDecipher.java ================================================ package com.pinecone.hydra.umct.decipher; import java.util.ArrayList; import java.util.Collection; import java.util.List; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; import com.pinecone.framework.lang.field.FieldEntity; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.umc.msg.ExtraEncode; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.wolf.UlfInformMessage; import com.pinecone.hydra.umct.husky.compiler.CompilerEncoder; import com.pinecone.ulf.util.protobuf.FieldProtobufDecoder; public class PrototypeDecipher implements HeaderDecipher { protected volatile static Descriptors.Descriptor PathDescriptor = null; protected static Descriptors.FieldDescriptor PathFieldDescriptor = null; protected String mszServicePathKey; protected CompilerEncoder mCompilerEncoder; protected FieldProtobufDecoder mFieldProtobufDecoder; public PrototypeDecipher( String szServicePathKey, CompilerEncoder encoder, FieldProtobufDecoder decoder ) { this.mszServicePathKey = szServicePathKey; this.mCompilerEncoder = encoder; this.mFieldProtobufDecoder = decoder; } @Override public String getServicePath( Object that ) { try{ Descriptors.Descriptor des = PrototypeDecipher.getPathDescriptor( this.mszServicePathKey ); if ( ! ( that instanceof byte[] ) ) { Debug.warnSyn( that ); } DynamicMessage ms = DynamicMessage.parseFrom( des, (byte[]) that ); return (String) ms.getField( PrototypeDecipher.PathFieldDescriptor ); } catch ( InvalidProtocolBufferException e ) { throw new ProxyProvokeHandleException( e ); } } @Override public Object eval( Object that, Object descriptor, String key ) { byte[] data = (byte[]) that; Descriptors.Descriptor messageDescriptor = (Descriptors.Descriptor) descriptor; try { DynamicMessage message = DynamicMessage.parseFrom( messageDescriptor, data ); Descriptors.FieldDescriptor fieldDescriptor = messageDescriptor.findFieldByName( key ); if ( fieldDescriptor == null ) { throw new IllegalArgumentException( "Field '" + key + "' not found in the descriptor" ); } return message.getField(fieldDescriptor); } catch ( InvalidProtocolBufferException e ) { throw new IllegalArgumentException( e ); } } @Override public Collection values( Object that, Object descriptor, Object argTpl ) { byte[] data = (byte[]) that; Descriptors.Descriptor messageDescriptor = (Descriptors.Descriptor) descriptor; try { DynamicMessage message = DynamicMessage.parseFrom( messageDescriptor, data ); Collection fieldValues = new ArrayList<>(); Object[] decodes = this.mFieldProtobufDecoder.decodeValues( (FieldEntity[]) argTpl, messageDescriptor, message, this.mCompilerEncoder.getExceptedKeys(), this.mCompilerEncoder.getOptions() ); int i = 0; for ( Object val : decodes ) { if ( i != 0 ) { fieldValues.add( val ); } ++i; } return fieldValues; } catch ( InvalidProtocolBufferException e ) { throw new IllegalArgumentException( e ); } } @Override public Object[] evals( Object that, Object descriptor, List keys, Object argTpl ) { byte[] data = (byte[]) that; Descriptors.Descriptor messageDescriptor = (Descriptors.Descriptor) descriptor; try { DynamicMessage message = DynamicMessage.parseFrom( messageDescriptor, data ); Object[] results = new Object[ keys.size() ]; Object[] decodes = this.mFieldProtobufDecoder.decodeValues( (FieldEntity[]) argTpl, messageDescriptor, message, this.mCompilerEncoder.getExceptedKeys(), this.mCompilerEncoder.getOptions() ); for ( int i = 1; i < keys.size(); ++i ) { String key = keys.get(i); Descriptors.FieldDescriptor fieldDescriptor = messageDescriptor.findFieldByName(key); if ( fieldDescriptor == null ) { results[i] = null; // Field not found } else { results[i] = decodes[i]; } } return results; } catch ( InvalidProtocolBufferException e ) { throw new IllegalArgumentException( e ); } } @Override public UMCMessage assembleReturnMsg( Object that, Object descriptor ) { if ( that instanceof UMCMessage ) { return (UMCMessage) that; } if ( that == null ) { return new UlfInformMessage( null, ExtraEncode.Prototype ); } Descriptors.Descriptor retDes = (Descriptors.Descriptor) descriptor; DynamicMessage retMsg = this.mCompilerEncoder.getEncoder().encode( retDes, that, this.mCompilerEncoder.getExceptedKeys(), this.mCompilerEncoder.getOptions() ); return new UlfInformMessage( retMsg.toByteArray() ); // TODO, Transfer } public static Descriptors.Descriptor getPathDescriptor( String fieldName ) { if ( PrototypeDecipher.PathDescriptor == null ) { synchronized ( PrototypeDecipher.class ) { if ( PrototypeDecipher.PathDescriptor == null ) { PrototypeDecipher.PathDescriptor = PrototypeDecipher.createPathDescriptor( "PathDescriptor", fieldName ); PrototypeDecipher.PathFieldDescriptor = PrototypeDecipher.PathDescriptor.findFieldByName( fieldName ); } } } return PrototypeDecipher.PathDescriptor; } public static Descriptors.Descriptor createPathDescriptor( String messageName, String fieldName ) { try { DescriptorProtos.FieldDescriptorProto fieldDescriptorProto = DescriptorProtos.FieldDescriptorProto.newBuilder() .setName(fieldName) .setNumber(1) .setType(DescriptorProtos.FieldDescriptorProto.Type.TYPE_STRING) .build(); DescriptorProtos.DescriptorProto descriptorProto = DescriptorProtos.DescriptorProto.newBuilder() .setName(messageName) .addField(fieldDescriptorProto) .build(); DescriptorProtos.FileDescriptorProto fileDescriptorProto = DescriptorProtos.FileDescriptorProto.newBuilder() .setName(messageName + ".proto") .addMessageType(descriptorProto) .build(); Descriptors.FileDescriptor fileDescriptor = Descriptors.FileDescriptor.buildFrom( fileDescriptorProto, new Descriptors.FileDescriptor[]{}); return fileDescriptor.findMessageTypeByName(messageName); } catch ( Descriptors.DescriptorValidationException e ) { throw new ProxyProvokeHandleException( "Failed to create descriptor", e ); } } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/AddressedEntity.java ================================================ package com.pinecone.hydra.umct.husky; import com.pinecone.framework.system.prototype.Pinenut; public interface AddressedEntity extends Pinenut { /** * Full Name / Path */ String getInterceptedPath(); /** * Function / Method / Mapping */ String getInterceptorName(); /** * Namespace / Domain / Package */ String getAddressPath(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/ArchAddressedEntity.java ================================================ package com.pinecone.hydra.umct.husky; public abstract class ArchAddressedEntity implements MessagePackage { protected String mszInterceptedPath; public ArchAddressedEntity( String szInterceptedPath ) { this.mszInterceptedPath = szInterceptedPath; } @Override public String getInterceptedPath() { return this.mszInterceptedPath; } @Override public String getInterceptorName() { String[] debris = this.mszInterceptedPath.split(StandardPathSeparator); if( debris.length > 1 ) { return debris [ debris.length - 1 ]; } return this.mszInterceptedPath; } @Override public String getAddressPath() { String interceptor = this.getInterceptorName(); int lastIndexof = this.mszInterceptedPath.lastIndexOf( interceptor ); if( lastIndexof > 0 ) { return this.mszInterceptedPath.substring( 0, lastIndexof ); } return this.mszInterceptedPath; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/ArchRequestPackage.java ================================================ package com.pinecone.hydra.umct.husky; public abstract class ArchRequestPackage extends ArchAddressedEntity { public ArchRequestPackage( String szInterceptedPath ) { super( szInterceptedPath ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/ArchResponsePackage.java ================================================ package com.pinecone.hydra.umct.husky; public abstract class ArchResponsePackage extends ArchAddressedEntity { public ArchResponsePackage( String szInterceptedPath ) { super( szInterceptedPath ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/HuskyCTPConstants.java ================================================ package com.pinecone.hydra.umct.husky; /** * Pinecone Ursus For Java Wolf-Husky Control Transmission Protocol * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Bean Nuts Walnut Ulfhedinn Wolves/Ulfar Family. * HCTP is an archetypal implementation of the Uniform Message Control Transmission Protocol (UMCT) * 哈士奇控制传输协议(HCTP)是统一消息控制传输协议(UMCT)的典型实现 * ***************************************************************************************** */ public final class HuskyCTPConstants { public static final String HCTP_DUP_PASSIVE_CHANNEL_KEY = "HCTPPassiveChannel"; public static final int HCTP_CONTROL_MASK = 0xFFB00000; public static final int HCTP_DUP_CONTROL_MASK = 0xFFBEA000; public static final int HCTP_DUP_CONTROL_REGISTER = HCTP_DUP_CONTROL_MASK | 0x00000001; public static final int HCTP_DUP_CONTROL_ALIVE = HCTP_DUP_CONTROL_MASK | 0x00000002; public static final int HCTP_DUP_CONTROL_PASSIVE_REQUEST = HCTP_DUP_CONTROL_MASK | 0x00000010; public static final int HCTP_DUP_CONTROL_PASSIVE_RESPONSE = HCTP_DUP_CONTROL_MASK | 0x00000011; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/HuskyServiceErrorMessages.java ================================================ package com.pinecone.hydra.umct.husky; import com.pinecone.framework.util.Bytes; import com.pinecone.hydra.umc.msg.Status; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.wolf.UlfInformMessage; public final class HuskyServiceErrorMessages { public static final UMCMessage HCTP_INTERNAL_ERROR ; public static final UMCMessage HCTP_ILLEGAL_MESSAGE ; public static final UMCMessage HCTP_MAPPING_NOT_FOUND ; static { HCTP_INTERNAL_ERROR = new UlfInformMessage( Bytes.Empty ); HCTP_INTERNAL_ERROR.getHead().setStatus( Status.InternalError ); HCTP_ILLEGAL_MESSAGE = new UlfInformMessage( Bytes.Empty ); HCTP_INTERNAL_ERROR.getHead().setStatus( Status.IllegalMessage ); HCTP_MAPPING_NOT_FOUND = new UlfInformMessage( Bytes.Empty ); HCTP_INTERNAL_ERROR.getHead().setStatus( Status.MappingNotFound ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/Interceptor.java ================================================ package com.pinecone.hydra.umct.husky; public interface Interceptor extends AddressedEntity { } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/MessagePackage.java ================================================ package com.pinecone.hydra.umct.husky; public interface MessagePackage extends AddressedEntity { String StandardPathSeparator = "\\.|\\/|::"; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/RequestPackage.java ================================================ package com.pinecone.hydra.umct.husky; public interface RequestPackage extends MessagePackage { } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/ResponsePackage.java ================================================ package com.pinecone.hydra.umct.husky; public interface ResponsePackage extends MessagePackage { } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/ArchIfaceCompiler.java ================================================ package com.pinecone.hydra.umct.husky.compiler; import java.util.List; import com.pinecone.hydra.umct.stereotype.Iface; import com.pinecone.hydra.umct.stereotype.IfaceUtils; import javassist.ClassPool; import javassist.CtClass; import javassist.CtMethod; import javassist.NotFoundException; public abstract class ArchIfaceCompiler extends ArchIfaceInspector implements IfaceCompiler { public ArchIfaceCompiler(ClassPool classPool, ClassLoader classLoader ) { super( classPool, classLoader ); } protected IfaceMethodSignature resolveMethodSignature( CtMethod method ) { try{ Class[] parameters = this.getParameters( method ); CtClass retType; try { retType = method.getReturnType(); } catch ( NotFoundException e ) { retType = null; } Class returnType; if ( retType != null ) { returnType = this.reinterpretClass( retType.getName() ); } else { returnType = null; } String[] parameterTypes = ArchIfaceCompiler.evalGenericParameterTypes( method ); String returnGType = ArchIfaceCompiler.evalGenericReturnType( method ); return new IfaceMethodSignature( parameters, parameterTypes, returnType, returnGType ); } catch ( ClassNotFoundException e ) { throw new CompileException( e ); } } protected MethodDigest compile ( ClassDigest classDigest, CtMethod method ) { try { IfaceMethodSignature signature = this.resolveMethodSignature( method ); Class[] parameters = signature.getParameters(); Class returnType = signature.getReturnType(); String[] parameterTypes = signature.getParameterGenericTypes(); String returnGType = signature.getReturnGenericType(); MethodDigest ret = new GenericMethodDigest( classDigest, this.getIfaceMethodName( method ), method.getName(), parameters, parameterTypes, returnType, returnGType, null ); List ifaceParamsDigests = this.inspectArgIfaceParams( ret, method ); ret.apply(ifaceParamsDigests); return ret; } catch ( ClassNotFoundException e ) { throw new CompileException( e ); } } protected String evalLogicClassName( String className ) throws NotFoundException { String szLogicClassName = className; CtClass ctClass = this.mClassPool.get( className ); if ( ctClass != null ) { Iface cIface = this.getAnnotation( ctClass, Iface.class ); String szLogicCN = IfaceUtils.queryIfaceLogicClassName( cIface ); if ( szLogicCN != null ) { szLogicClassName = szLogicCN; } } return szLogicClassName; } @Override public ClassDigest compile( Class clazz, boolean bAsIface ) { return this.compile( clazz.getName(), bAsIface ); } @Override public ClassDigest compile( String className, boolean bAsIface ) { try { List ifaceMethods = this.inspect( className, bAsIface ); if ( ifaceMethods.isEmpty() ) { return null; } String szLogicClassName = this.evalLogicClassName( className ); ClassDigest classDigest = new GenericClassDigest( szLogicClassName, className ); for ( CtMethod ctMethod : ifaceMethods ) { MethodDigest methodDigest = this.compile( classDigest, ctMethod ); classDigest.addMethod( methodDigest ); } return classDigest; } catch ( NotFoundException e ) { throw new CompileException( e ); } } public static final class IfaceMethodSignature { protected final Class[] mParameters; protected final String[] mParameterGenericTypes; protected final Class mReturnType; protected final String mszReturnGenericType; public IfaceMethodSignature( Class[] parameters, String[] parameterGenericTypes, Class returnType, String returnGenericType ) { this.mParameters = parameters; this.mParameterGenericTypes = parameterGenericTypes; this.mReturnType = returnType; this.mszReturnGenericType = returnGenericType; } public Class[] getParameters() { return this.mParameters; } public String[] getParameterGenericTypes() { return this.mParameterGenericTypes; } public Class getReturnType() { return this.mReturnType; } public String getReturnGenericType() { return this.mszReturnGenericType; } } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/ArchIfaceInspector.java ================================================ package com.pinecone.hydra.umct.husky.compiler; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.List; import com.pinecone.framework.util.StringUtils; import com.pinecone.hydra.umct.mapping.ArchMappingInspector; import com.pinecone.hydra.umct.mapping.ParamsDigest; import com.pinecone.hydra.umct.stereotype.Iface; import com.pinecone.hydra.umct.stereotype.IfaceUtils; import javassist.ClassPool; import javassist.CtClass; import javassist.CtMethod; import javassist.NotFoundException; public abstract class ArchIfaceInspector extends ArchMappingInspector implements IfaceInspector { public ArchIfaceInspector( ClassPool classPool, ClassLoader classLoader ) { super( classPool, classLoader ); } @Override public List inspect( Class clazz, boolean bAsIface ) throws NotFoundException { return this.inspect( clazz.getName(), bAsIface ); } @Override public List inspect( String className, boolean bAsIface ) throws NotFoundException { List ifaceMethods = new ArrayList<>(); CtClass ctClass = this.mClassPool.get( className ); boolean classHasIfaceAnnotation = this.hasOwnAnnotation( ctClass, Iface.class ); for ( CtMethod method : ctClass.getDeclaredMethods() ) { if ( Modifier.isPublic( method.getModifiers() ) ) { if ( bAsIface || classHasIfaceAnnotation || this.methodHasAnnotation( method, Iface.class ) ) { ifaceMethods.add( method ); } } } return ifaceMethods; } @Override public String getIfaceMethodName( CtMethod method ) throws ClassNotFoundException { String ifaceName = method.getName(); Object annotation = method.getAnnotation( Iface.class ); if ( annotation != null ) { Iface iface = (Iface) annotation; String name = IfaceUtils.getIfaceNameFieldVal( iface ); if ( StringUtils.isNoneEmpty( name ) ) { ifaceName = name; } } return ifaceName; } @SuppressWarnings( "unchecked" ) protected List inspectArgIfaceParams( Object methodDigest, CtMethod method ) { return (List ) (List) this.inspectArgParams( methodDigest, method ); } @Override protected ParamsDigest newParamsDigest( Object methodDigest, int parameterIndex, String name, String value, String defaultValue, boolean required ) { return new GenericIfaceParamsDigest( (MethodDigest) methodDigest, parameterIndex, this.annotationKeyNormalize(name), this.annotationKeyNormalize(value), this.annotationKeyNormalize(defaultValue), required ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/ArchProtoIfaceCompiler.java ================================================ package com.pinecone.hydra.umct.husky.compiler; import java.util.List; import javassist.ClassPool; import javassist.CtMethod; import javassist.NotFoundException; public abstract class ArchProtoIfaceCompiler extends ArchIfaceCompiler implements ProtoIfaceCompiler { protected CompilerEncoder mCompilerEncoder; public ArchProtoIfaceCompiler(ClassPool classPool, ClassLoader classLoader, CompilerEncoder encoder ) { super( classPool, classLoader ); this.mCompilerEncoder = encoder; } public ArchProtoIfaceCompiler(ClassPool classPool, ClassLoader classLoader ) { this( classPool, classLoader, CompilerEncoder.DefaultMethodArgumentsCompilerEncoder ); } protected MethodDigest compile ( ClassDigest classDigest, CtMethod method, CompilerEncoder encoder ) { try { IfaceMethodSignature signature = this.resolveMethodSignature( method ); Class[] parameters = signature.getParameters(); Class returnType = signature.getReturnType(); String[] parameterTypes = signature.getParameterGenericTypes(); String returnGType = signature.getReturnGenericType(); MethodDigest ret; if( encoder != null ) { ret = new DynamicMethodPrototype( classDigest, this.getIfaceMethodName( method ), method.getName(), parameters, parameterTypes, returnType, returnGType, encoder, null ); } else { ret = new GenericMethodDigest( classDigest, this.getIfaceMethodName( method ), method.getName(), parameters, parameterTypes, returnType, returnGType, null ); } List ifaceParamsDigests = this.inspectArgIfaceParams( ret, method ); ret.apply(ifaceParamsDigests); return ret; } catch ( ClassNotFoundException e ) { throw new CompileException( e ); } } @Override public ClassDigest compile ( String className, boolean bAsIface ) { return this.compile( className, bAsIface, this.mCompilerEncoder ); } @Override public ClassDigest compile ( Class clazz, boolean bAsIface ) { return this.compile( clazz.getName(), bAsIface ); } @Override public ClassDigest compile( Class clazz, boolean bAsIface, CompilerEncoder encoder ) { return this.compile( clazz.getName(), bAsIface, encoder ); } @Override public ClassDigest reinterpret( Class clazz, boolean bAsIface ) { return this.compile( clazz, bAsIface, null ); } @Override public ClassDigest reinterpret( String className, boolean bAsIface ) { return this.compile( className, bAsIface, null ); } @Override public ClassDigest compile( String className, boolean bAsIface, CompilerEncoder encoder ) { try { List ifaceMethods = this.inspect( className, bAsIface ); if ( ifaceMethods.isEmpty() ) { return null; } String szLogicClassName = this.evalLogicClassName( className ); ClassDigest classDigest = new GenericClassDigest( szLogicClassName, className ); for ( CtMethod ctMethod : ifaceMethods ) { MethodDigest methodDigest = this.compile( classDigest, ctMethod, encoder ); classDigest.addMethod( methodDigest ); } return classDigest; } catch ( NotFoundException e ) { throw new CompileException( e ); } } @Override public CompilerEncoder getCompilerEncoder() { return this.mCompilerEncoder; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/BytecodeIfaceCompiler.java ================================================ package com.pinecone.hydra.umct.husky.compiler; import java.util.ArrayList; import java.util.List; import com.pinecone.hydra.umct.mapping.MappingDigest; import javassist.ClassPool; public class BytecodeIfaceCompiler extends ArchProtoIfaceCompiler implements ProtoInterfacialCompiler { public BytecodeIfaceCompiler(ClassPool classPool, ClassLoader classLoader, CompilerEncoder encoder ) { super( classPool, classLoader, encoder ); } public BytecodeIfaceCompiler(ClassPool classPool, ClassLoader classLoader ) { super( classPool, classLoader ); } public BytecodeIfaceCompiler(ClassPool classPool ) { super( classPool, Thread.currentThread().getContextClassLoader() ); } @Override public IfaceMappingDigest compile( MappingDigest digest ) { return new GenericIfaceMappingDigest( digest, this.mCompilerEncoder ); } @Override public List compile( List digests ) { List result = new ArrayList<>( digests.size() ); for ( MappingDigest digest : digests ) { result.add( this.compile( digest ) ); } return result; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/ClassDigest.java ================================================ package com.pinecone.hydra.umct.husky.compiler; import java.util.List; import com.pinecone.framework.system.prototype.Pinenut; public interface ClassDigest extends Pinenut { String getClassName(); String getPhyClassName(); void addMethod( MethodDigest methodDigest ); List getMethodDigests(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/CompileException.java ================================================ package com.pinecone.hydra.umct.husky.compiler; import com.pinecone.hydra.umct.mapping.InspectException; public class CompileException extends InspectException { public CompileException () { super(); } public CompileException ( String message ) { super(message); } public CompileException ( String message, Throwable cause ) { super(message, cause); } public CompileException ( Throwable cause ) { super(cause); } protected CompileException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) { super( message, cause, enableSuppression, writableStackTrace ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/CompilerEncoder.java ================================================ package com.pinecone.hydra.umct.husky.compiler; import java.util.Set; import com.google.protobuf.Descriptors; import com.pinecone.framework.lang.field.DataStructureEntity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.ulf.util.protobuf.FieldProtobufEncoder; import com.pinecone.ulf.util.protobuf.Options; public interface CompilerEncoder extends Pinenut { CompilerEncoder DefaultMethodArgumentsCompilerEncoder = new GenericCompilerEncoder( "_Arguments" ); FieldProtobufEncoder getEncoder(); Options getOptions(); String getEntityExtend(); Set getExceptedKeys(); Descriptors.Descriptor transform( DataStructureEntity entity ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/DigestIfaceCompiler.java ================================================ package com.pinecone.hydra.umct.husky.compiler; import java.util.ArrayList; import java.util.List; import com.pinecone.hydra.umct.mapping.MappingDigest; import javassist.ClassPool; public class DigestIfaceCompiler extends ArchIfaceCompiler implements InterfacialCompiler { public DigestIfaceCompiler( ClassPool classPool, ClassLoader classLoader ) { super( classPool, classLoader ); } public DigestIfaceCompiler( ClassPool classPool ) { super( classPool, Thread.currentThread().getContextClassLoader() ); } @Override public IfaceMappingDigest compile( MappingDigest digest ) { return new GenericIfaceMappingDigest( digest, CompilerEncoder.DefaultMethodArgumentsCompilerEncoder ); } @Override public List compile(List digests ) { List result = new ArrayList<>( digests.size() ); for ( MappingDigest digest : digests ) { result.add( this.compile( digest ) ); } return result; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/DynamicMethodPrototype.java ================================================ package com.pinecone.hydra.umct.husky.compiler; import java.util.List; import com.google.protobuf.Descriptors; import com.pinecone.framework.lang.field.DataStructureEntity; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.json.JSONEncoder; import com.pinecone.hydra.umct.husky.function.ArgumentRequest; import com.pinecone.hydra.umct.husky.function.GenericArgumentRequest; public class DynamicMethodPrototype extends GenericMethodDigest implements MethodPrototype { protected Descriptors.Descriptor mArgumentsDescriptor; protected Descriptors.Descriptor mReturnDescriptor; public DynamicMethodPrototype( ClassDigest classDigest, String szName, String szRawName, Class[] parameters, String[] parametersGenericLabels, Class returnType, String genericRLabel, CompilerEncoder encoder, List ifaceParamsDigests ) { super( classDigest, szName, szRawName, parameters, parametersGenericLabels, returnType, genericRLabel, ifaceParamsDigests ); if( this.mArgumentTemplate != null ) { this.mArgumentsDescriptor = encoder.transform( this.mArgumentTemplate ); } if( this.mReturnType != null && !this.mReturnType.equals( void.class ) ) { this.mReturnDescriptor = encoder.getEncoder().transform( this.mReturnType, this.getGenericReturnTypeLabel(), null, encoder.getExceptedKeys() ); } } @Override public Descriptors.Descriptor getArgumentsDescriptor() { return this.mArgumentsDescriptor; } @Override public Descriptors.Descriptor getReturnDescriptor() { return this.mReturnDescriptor; } @Override public ArgumentRequest conformRequest() { DataStructureEntity protoEntity = this.getArgumentTemplate(); return new GenericArgumentRequest( protoEntity.getName(), protoEntity ); } @Override public ArgumentRequest conformRequest( Object[] args ) { DataStructureEntity protoEntity = this.getArgumentTemplate(); ArgumentRequest request = new GenericArgumentRequest( protoEntity.getName(), protoEntity ); if( args != null ) { for ( int i = 0; i < args.length; ++i ) { request.setField( i, args[ i ] ); } } return request; } @Override public String toJSONString() { List argFields = null; if( this.getArgumentsDescriptor() != null ) { argFields = this.getArgumentsDescriptor().getFields(); } List retFields = null; if( this.getReturnDescriptor() != null ) { retFields = this.getReturnDescriptor().getFields(); } return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "name" , this.getName() ), new KeyValue<>( "rawName" , this.getRawName() ), new KeyValue<>( "protoArguments" , argFields ), new KeyValue<>( "protoReturn" , retFields ), new KeyValue<>( "__ClassName__" , this.className() ), } ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/GenericClassDigest.java ================================================ package com.pinecone.hydra.umct.husky.compiler; import java.util.ArrayList; import java.util.List; public class GenericClassDigest implements ClassDigest { protected String mszClassName; protected String mszPhyClassName; protected List mMethodDigests; public GenericClassDigest( String szClassName, String szPhyClassName ) { this.mszClassName = szClassName; this.mszPhyClassName = szPhyClassName; this.mMethodDigests = new ArrayList<>(); } public GenericClassDigest( String szClassName ) { this( szClassName, szClassName ); } @Override public String getClassName() { return this.mszClassName; } @Override public String getPhyClassName() { return this.mszPhyClassName; } @Override public void addMethod( MethodDigest methodDigest ) { this.mMethodDigests.add( methodDigest ); } @Override public List getMethodDigests() { return this.mMethodDigests; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/GenericCompilerEncoder.java ================================================ package com.pinecone.hydra.umct.husky.compiler; import java.util.Set; import com.google.protobuf.Descriptors; import com.pinecone.framework.lang.field.DataStructureEntity; import com.pinecone.framework.unit.Units; import com.pinecone.ulf.util.protobuf.FieldProtobufEncoder; import com.pinecone.ulf.util.protobuf.GenericFieldProtobufEncoder; import com.pinecone.ulf.util.protobuf.Options; public class GenericCompilerEncoder implements CompilerEncoder { protected FieldProtobufEncoder mEncoder; protected Options mOptions; protected Set mExceptedKeys; protected String mszEntityExtend; public GenericCompilerEncoder( FieldProtobufEncoder encoder, Options options, Set exceptedKeys, String szEntityExtend ) { this.mEncoder = encoder; this.mOptions = options; this.mExceptedKeys = exceptedKeys; this.mszEntityExtend = szEntityExtend; } public GenericCompilerEncoder( String szEntityExtend ) { this( new GenericFieldProtobufEncoder(), Options.DefaultOptions, Units.emptySet(), szEntityExtend ); } @Override public FieldProtobufEncoder getEncoder() { return this.mEncoder; } @Override public Options getOptions() { return this.mOptions; } @Override public Set getExceptedKeys() { return this.mExceptedKeys; } @Override public String getEntityExtend() { return this.mszEntityExtend; } @Override public Descriptors.Descriptor transform( DataStructureEntity entity ) { String szEntityName = this.getOptions().normalizeDescriptorName( entity.getName() + this.getEntityExtend() ); return this.getEncoder().transform( entity.getSegments(), szEntityName, this.getExceptedKeys(), this.getOptions() ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/GenericIfaceInspector.java ================================================ package com.pinecone.hydra.umct.husky.compiler; import javassist.ClassPool; public class GenericIfaceInspector extends ArchIfaceInspector implements IfaceInspector { public GenericIfaceInspector( ClassPool classPool, ClassLoader classLoader ) { super( classPool, classLoader ); } public GenericIfaceInspector( ClassPool classPool ) { super( classPool, Thread.currentThread().getContextClassLoader() ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/GenericIfaceMappingDigest.java ================================================ package com.pinecone.hydra.umct.husky.compiler; import java.lang.reflect.Method; import java.util.List; import com.google.protobuf.Descriptors; import com.pinecone.hydra.umc.msg.UMCMethod; import com.pinecone.hydra.umct.mapping.GenericMappingDigest; import com.pinecone.hydra.umct.mapping.MappingDigest; import com.pinecone.hydra.umct.mapping.ParamsDigest; public class GenericIfaceMappingDigest extends GenericMappingDigest implements IfaceMappingDigest { protected Descriptors.Descriptor mArgumentsDescriptor; protected Descriptors.Descriptor mReturnDescriptor; public GenericIfaceMappingDigest( String[] szAddresses, Class[] parameters, String[] parametersGenericLabels, Class returnType, String szReturnGenericTypeLabel, Class classType, Method method, List paramsDigests, UMCMethod[] interceptMethods, CompilerEncoder encoder ) { super( szAddresses, parameters, parametersGenericLabels, returnType, szReturnGenericTypeLabel, classType, method, paramsDigests, interceptMethods ); this.encode( encoder ); } public GenericIfaceMappingDigest( MappingDigest mappingDigest, CompilerEncoder encoder ) { this.mszAddresses = mappingDigest.getAddresses(); this.mInterceptMethods = mappingDigest.getInterceptMethods(); this.mArgumentTemplate = mappingDigest.getArgumentTemplate(); this.mReturnType = mappingDigest.getReturnType(); this.mClassType = mappingDigest.getClassType(); this.mMappedMethod = mappingDigest.getMappedMethod(); this.mParamsDigests = mappingDigest.getParamsDigests(); this.mszReturnGenericTypeLabel = mappingDigest.getReturnGenericTypeLabel(); this.encode( encoder ); } protected void encode( CompilerEncoder encoder ) { if( this.mArgumentTemplate != null ) { this.mArgumentsDescriptor = encoder.transform( this.mArgumentTemplate ); } if( this.mReturnType != null && !this.mReturnType.equals( void.class ) ) { this.mReturnDescriptor = encoder.getEncoder().transform( this.mReturnType, this.getReturnGenericTypeLabel(),null, encoder.getExceptedKeys() ); } } @Override public Descriptors.Descriptor getArgumentsDescriptor() { return this.mArgumentsDescriptor; } @Override public Descriptors.Descriptor getReturnDescriptor() { return this.mReturnDescriptor; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/GenericIfaceParamsDigest.java ================================================ package com.pinecone.hydra.umct.husky.compiler; import com.pinecone.hydra.umct.mapping.GenericParamsDigest; public class GenericIfaceParamsDigest extends GenericParamsDigest implements IfaceParamsDigest { protected MethodDigest mMethodDigest; public GenericIfaceParamsDigest( MethodDigest methodDigest, int parameterIndex, String name, String value, String defaultValue, boolean required ) { super( parameterIndex, name, value, defaultValue, required ); this.mMethodDigest = methodDigest; } @Override public MethodDigest getMethodDigest() { return this.mMethodDigest; } @Override public String toJSONString() { return super.toJSONString(); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/GenericMethodDigest.java ================================================ package com.pinecone.hydra.umct.husky.compiler; import java.util.List; import com.pinecone.framework.lang.field.DataStructureEntity; import com.pinecone.framework.lang.field.GenericStructure; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.json.JSONEncoder; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.umct.mapping.MethodDigestUtils; import com.pinecone.hydra.umct.husky.function.MethodTemplates; public class GenericMethodDigest implements MethodDigest { protected ClassDigest mClassDigest; protected String mszName; protected String mszRawName; protected DataStructureEntity mArgumentTemplate; protected Class mReturnType; protected String mszGenericReturnTypeLabel; protected List mIfaceParamsDigests; public GenericMethodDigest( ClassDigest classDigest, String szName, String szRawName, Class[] parameters, String[] parametersGenericLabels, Class returnType, String genericRLabel, List ifaceParamsDigests ) { this.mClassDigest = classDigest; this.mszName = szName; this.mszRawName = szRawName; this.mReturnType = returnType; this.mIfaceParamsDigests = ifaceParamsDigests; this.mszGenericReturnTypeLabel = genericRLabel; String szInterceptedPath = classDigest.getClassName() + Namespace.DEFAULT_SEPARATOR + szName; if( parameters == null || parameters.length == 0 ) { this.mArgumentTemplate = new GenericStructure( szInterceptedPath, 0 ); } else { this.mArgumentTemplate = MethodTemplates.from( null, szInterceptedPath, parameters, parametersGenericLabels ); } } public GenericMethodDigest( ClassDigest classDigest, String szName, Class[] parameters, String[] parametersGenericLabels, Class returnType, String genericRLabel, List ifaceParamsDigests ) { this( classDigest, szName, szName, parameters, parametersGenericLabels, returnType, genericRLabel, ifaceParamsDigests ); } @Override public String getGenericReturnTypeLabel() { return this.mszGenericReturnTypeLabel; } @Override public void applyGenericReturnTypeLabel( String genericTypeLabel ) { this.mszGenericReturnTypeLabel = genericTypeLabel; } @Override public void apply( List ifaceParamsDigests) { this.mIfaceParamsDigests = ifaceParamsDigests; } @Override public List getArgumentsKey() { return MethodDigestUtils.getArgumentsKey( this.getParamsDigests(), this.getArgumentTemplate() ); } @Override public ClassDigest getClassDigest() { return this.mClassDigest; } @Override public String getName() { return this.mszName; } @Override public String getFullName() { return this.mClassDigest.getClassName() + Namespace.DEFAULT_SEPARATOR + this.getName(); } @Override public String getRawName() { return this.mszRawName; } @Override public DataStructureEntity getArgumentTemplate() { return this.mArgumentTemplate; } @Override public Class getReturnType() { return this.mReturnType; } @Override public List getParamsDigests() { return this.mIfaceParamsDigests; } @Override public String toJSONString() { return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "name" , this.getName() ), new KeyValue<>( "rawName" , this.getRawName() ), new KeyValue<>( "arguments" , this.getArgumentTemplate().getSegments() ), new KeyValue<>( "return" , this.getReturnType() ), } ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/IfaceCompiler.java ================================================ package com.pinecone.hydra.umct.husky.compiler; public interface IfaceCompiler extends IfaceInspector { ClassDigest compile ( String className, boolean bAsIface ); ClassDigest compile ( Class clazz, boolean bAsIface ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/IfaceInspector.java ================================================ package com.pinecone.hydra.umct.husky.compiler; import java.util.List; import com.pinecone.framework.system.prototype.Pinenut; import javassist.CtMethod; import javassist.NotFoundException; public interface IfaceInspector extends Pinenut { List inspect( Class clazz, boolean bAsIface ) throws NotFoundException ; List inspect( String className, boolean bAsIface ) throws NotFoundException; String getIfaceMethodName( CtMethod method ) throws ClassNotFoundException; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/IfaceMappingDigest.java ================================================ package com.pinecone.hydra.umct.husky.compiler; import com.google.protobuf.Descriptors; import com.pinecone.hydra.umct.mapping.MappingDigest; public interface IfaceMappingDigest extends MappingDigest { Descriptors.Descriptor getArgumentsDescriptor(); Descriptors.Descriptor getReturnDescriptor(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/IfaceParamsDigest.java ================================================ package com.pinecone.hydra.umct.husky.compiler; import com.pinecone.hydra.umct.mapping.ParamsDigest; public interface IfaceParamsDigest extends ParamsDigest { MethodDigest getMethodDigest(); default ClassDigest getClassDigest() { return this.getMethodDigest().getClassDigest(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/InterfacialCompiler.java ================================================ package com.pinecone.hydra.umct.husky.compiler; import java.util.List; import com.pinecone.hydra.umct.mapping.MappingDigest; public interface InterfacialCompiler extends IfaceCompiler { IfaceMappingDigest compile( MappingDigest digest ); List compile( List digests ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/MethodDigest.java ================================================ package com.pinecone.hydra.umct.husky.compiler; import java.util.List; import com.pinecone.framework.lang.field.DataStructureEntity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.ReflectionUtils; public interface MethodDigest extends Pinenut { ClassDigest getClassDigest(); String getName(); String getFullName(); String getRawName(); DataStructureEntity getArgumentTemplate(); Class getReturnType(); String getGenericReturnTypeLabel(); default String[] getGenericReturnTypeNames() { return ReflectionUtils.extractGenericClassNames( this.getGenericReturnTypeLabel() ); } void applyGenericReturnTypeLabel( String genericTypeLabel ); default boolean hasDeclaredGenericReturnType() { return this.getGenericReturnTypeLabel() != null && this.getGenericReturnTypeLabel().contains( "<" ) && this.getGenericReturnTypeLabel().contains( ">" ); } List getParamsDigests(); void apply( List ifaceParamsDigests); List getArgumentsKey(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/MethodPrototype.java ================================================ package com.pinecone.hydra.umct.husky.compiler; import com.google.protobuf.Descriptors; import com.pinecone.hydra.umct.husky.function.ArgumentRequest; public interface MethodPrototype extends MethodDigest { Descriptors.Descriptor getArgumentsDescriptor(); Descriptors.Descriptor getReturnDescriptor(); ArgumentRequest conformRequest(); ArgumentRequest conformRequest( Object[] args ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/ProtoIfaceCompiler.java ================================================ package com.pinecone.hydra.umct.husky.compiler; public interface ProtoIfaceCompiler extends IfaceCompiler { CompilerEncoder getCompilerEncoder(); ClassDigest compile ( String className, boolean bAsIface, CompilerEncoder encoder ); ClassDigest compile ( Class clazz, boolean bAsIface, CompilerEncoder encoder ); ClassDigest reinterpret ( String className, boolean bAsIface ); ClassDigest reinterpret ( Class clazz, boolean bAsIface ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/ProtoInterfacialCompiler.java ================================================ package com.pinecone.hydra.umct.husky.compiler; public interface ProtoInterfacialCompiler extends InterfacialCompiler, ProtoIfaceCompiler { } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/function/ArgumentRequest.java ================================================ package com.pinecone.hydra.umct.husky.function; import com.pinecone.framework.lang.field.DataStructureEntity; import com.pinecone.framework.lang.field.FieldEntity; import com.pinecone.hydra.umct.husky.RequestPackage; public interface ArgumentRequest extends RequestPackage { void from( Class[] parameters ); void from( Object[] args ); void conform( DataStructureEntity tpl ); DataStructureEntity getDataStructureEntity() ; FieldEntity[] getSegments() ; void setField( int index, String key, Object val ) ; void setField( int index, String key, Object val, String genericLabel ); void setField( int index, String key, Class type ) ; void setField( int index, String key, Class type, String genericLabel ); void setField( int index, Object val ) ; FieldEntity getField( int index ); FieldEntity findField( String key ); ArgumentRequest instancing(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/function/FunctionMold.java ================================================ package com.pinecone.hydra.umct.husky.function; import com.pinecone.framework.system.prototype.Pinenut; public interface FunctionMold extends Pinenut { ArgumentRequest getArgumentForm(); ReturnResponse getReturnForm(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/function/GenericArgumentRequest.java ================================================ package com.pinecone.hydra.umct.husky.function; import com.pinecone.framework.lang.field.DataStructureEntity; import com.pinecone.framework.lang.field.FieldEntity; import com.pinecone.framework.lang.field.GenericFieldEntity; import com.pinecone.framework.lang.field.GenericStructure; import com.pinecone.hydra.umct.husky.ArchRequestPackage; public class GenericArgumentRequest extends ArchRequestPackage implements ArgumentRequest { protected DataStructureEntity mDataStructureEntity; protected GenericArgumentRequest( String szInterceptedPath ) { super( szInterceptedPath ); } public GenericArgumentRequest( DataStructureEntity dataStructureEntity, String szInterceptedPath ) { super( szInterceptedPath ); this.mDataStructureEntity = dataStructureEntity; } public GenericArgumentRequest( String szInterceptedPath, Class[] parameters ) { super( szInterceptedPath ); this.from( parameters ); } public GenericArgumentRequest( String szInterceptedPath, Object[] args ) { super( szInterceptedPath ); this.from( args ); } public GenericArgumentRequest( String szInterceptedPath, DataStructureEntity tpl ) { super( szInterceptedPath ); this.conform( tpl ); } public GenericArgumentRequest( FieldEntity[] segments ) { this( new GenericStructure( segments, 0, 1 ) ); } public GenericArgumentRequest( DataStructureEntity entity ) { super( (String) entity.getSegments()[ 0 ].getValue() ); this.mDataStructureEntity = entity; } @Override public void from( Class[] parameters ) { this.mDataStructureEntity = MethodTemplates.from( this.mDataStructureEntity, this.mszInterceptedPath, parameters ); } @Override public void from( Object[] args ) { if( this.mDataStructureEntity == null || args.length != this.mDataStructureEntity.size() ) { this.mDataStructureEntity = new GenericStructure( this.mszInterceptedPath, args.length ); } for ( int i = 0; i < args.length; ++i ) { this.mDataStructureEntity.setDataField( i, args[ i ].getClass().getName().replace( ".", "_" ) + "_" + i, args[ i ] ); } } @Override public void conform( DataStructureEntity tpl ) { this.mDataStructureEntity = MethodTemplates.conform( tpl, this.mszInterceptedPath ); } @Override public DataStructureEntity getDataStructureEntity() { return this.mDataStructureEntity; } @Override public FieldEntity[] getSegments() { return this.mDataStructureEntity.getSegments(); } @Override public void setField( int index, String key, Object val ) { this.mDataStructureEntity.setDataField( index, key, val ); } @Override public void setField( int index, String key, Object val, String genericLabel ) { this.mDataStructureEntity.setDataField( index, key, val, genericLabel ); } @Override public void setField( int index, String key, Class type ) { this.mDataStructureEntity.setDataField( index, key, type ); } @Override public void setField( int index, String key, Class type, String genericLabel ) { this.mDataStructureEntity.setDataField( index, key, type, genericLabel ); } @Override public void setField( int index, Object val ) { FieldEntity field = this.getField( index ); if( field != null ) { field.setValue( val ); } else { this.setField( index, Integer.toString( index ), val ); } } @Override public FieldEntity getField( int index ) { return this.mDataStructureEntity.getDataField( index ); } @Override public FieldEntity findField( String key ) { return this.mDataStructureEntity.findDataField( key ); } @Override public String toJSONString() { return this.mDataStructureEntity.toJSONString(); } @Override public ArgumentRequest instancing() { FieldEntity[] proto = this.getSegments(); FieldEntity[] ins = new FieldEntity[ proto.length ]; for ( int i = 1; i < proto.length; ++i ) { FieldEntity entity = proto[ i ]; ins[ i ] = new GenericFieldEntity( entity.getName(), null, entity.getType(), entity.getGenericTypeLabel() ); } ins[ 0 ] = proto[ 0 ]; return new GenericArgumentRequest( ins ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/function/GenericFunctionMold.java ================================================ package com.pinecone.hydra.umct.husky.function; public class GenericFunctionMold implements FunctionMold { protected ArgumentRequest mArgumentRequest; protected ReturnResponse mReturnResponse; public GenericFunctionMold( ArgumentRequest request, ReturnResponse returnResponse ) { this.mArgumentRequest = request; this.mReturnResponse = returnResponse; } @Override public ArgumentRequest getArgumentForm() { return this.mArgumentRequest; } @Override public ReturnResponse getReturnForm() { return this.mReturnResponse; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/function/GenericReturnResponse.java ================================================ package com.pinecone.hydra.umct.husky.function; import com.pinecone.hydra.umct.husky.ArchResponsePackage; public class GenericReturnResponse extends ArchResponsePackage implements ReturnResponse { protected T mReturnTarget; public GenericReturnResponse( String szInterceptedPath, T returnVal ) { super( szInterceptedPath ); this.mReturnTarget = returnVal; } @Override public T getReturn() { return this.mReturnTarget; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/function/MethodTemplates.java ================================================ package com.pinecone.hydra.umct.husky.function; import com.pinecone.framework.lang.field.DataStructureEntity; import com.pinecone.framework.lang.field.FieldEntity; import com.pinecone.framework.lang.field.GenericStructure; import com.pinecone.ulf.util.protobuf.WolfProtobufConstants; public final class MethodTemplates { public static DataStructureEntity from( DataStructureEntity tpl, String szInterceptedPath, Class[] parameters, String[] parametersGenericLabels ) { if( tpl == null || parameters.length != tpl.size() ) { tpl = new GenericStructure( szInterceptedPath, parameters.length ); } int i = 0; if ( parametersGenericLabels == null || parametersGenericLabels.length == 0 ) { for( Class parameter : parameters ) { MethodTemplates.setDataField( i, parameter, tpl ); ++i; } } else { for( Class parameter : parameters ) { MethodTemplates.setDataField( i, parameter, tpl, parametersGenericLabels[ i ] ); ++i; } } return tpl; } public static DataStructureEntity from( DataStructureEntity tpl, String szInterceptedPath, Class[] parameters ) { if( tpl == null || parameters.length != tpl.size() ) { tpl = new GenericStructure( szInterceptedPath, parameters.length ); } int i = 0; for( Class parameter : parameters ) { MethodTemplates.setDataField( i, parameter, tpl ); ++i; } return tpl; } public static DataStructureEntity conform( DataStructureEntity tpl, String szInterceptedPath ) { if( tpl == null ) { return null; } DataStructureEntity neo = new GenericStructure( szInterceptedPath, tpl.size() ); FieldEntity[] segs = tpl.getSegments(); int j = 0; for ( int i = neo.getDataOffset(); i < segs.length; ++i ) { FieldEntity seg = segs[ i ]; Class parameter = seg.getType(); MethodTemplates.setDataField( j, parameter, tpl ); ++j; } return tpl; } public static DataStructureEntity conform( DataStructureEntity tpl, String szInterceptedPath, String genericLabel ) { if( tpl == null ) { return null; } DataStructureEntity neo = new GenericStructure( szInterceptedPath, tpl.size() ); FieldEntity[] segs = tpl.getSegments(); int j = 0; for ( int i = neo.getDataOffset(); i < segs.length; ++i ) { FieldEntity seg = segs[ i ]; Class parameter = seg.getType(); MethodTemplates.setDataField( j, parameter, tpl, genericLabel ); ++j; } return tpl; } protected static void setDataField( int i, Class parameter, DataStructureEntity tpl, String genericLabel ) { String szNormalName = parameter.getName(); if( szNormalName.startsWith( "[" ) ) { szNormalName = szNormalName.replace( "[", "" ); szNormalName += WolfProtobufConstants.ArrayTransformedName; } szNormalName = szNormalName.replaceAll( "[^a-zA-Z0-9_]", "_" ) + "_" + i; tpl.setDataField( i, szNormalName, parameter, genericLabel ); } protected static void setDataField( int i, Class parameter, DataStructureEntity tpl ) { String szNormalName = parameter.getName(); if( szNormalName.startsWith( "[" ) ) { szNormalName = szNormalName.replace( "[", "" ); szNormalName += WolfProtobufConstants.ArrayTransformedName; } tpl.setDataField( i, szNormalName.replaceAll( "[^a-zA-Z0-9_]", "_" ) + "_" + i, parameter ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/function/ReturnResponse.java ================================================ package com.pinecone.hydra.umct.husky.function; import com.pinecone.hydra.umct.husky.ResponsePackage; public interface ReturnResponse extends ResponsePackage { T getReturn(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/heartbeat/HeartbeatConstants.java ================================================ package com.pinecone.hydra.umct.husky.heartbeat; import com.pinecone.hydra.umc.msg.InformMessage; import com.pinecone.hydra.umc.wolf.UlfInstructMessage; public final class HeartbeatConstants { public static final int HCTP_HEART_CONTROL_MASK = 0xFFBEB000; // 0x000EB000 | 0xFFB00000 public static final int HCTP_HEART_REQUEST_ALIVE = HCTP_HEART_CONTROL_MASK | 0x00000010; public static final int HCTP_HEART_RESPONSE_ACK = HCTP_HEART_CONTROL_MASK | 0x00000011; public static final InformMessage HCTP_HEART_ALIVE = new UlfInstructMessage( HCTP_HEART_REQUEST_ALIVE ); public static final InformMessage HCTP_HEART_ACK = new UlfInstructMessage( HCTP_HEART_RESPONSE_ACK ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/heartbeat/HuskyHeartbeatControl.java ================================================ package com.pinecone.hydra.umct.husky.heartbeat; import java.io.IOException; import java.util.Collection; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.vita.HeartbeatControl; import io.netty.util.HashedWheelTimer; import io.netty.util.Timeout; import io.netty.util.TimerTask; public class HuskyHeartbeatControl implements HeartbeatControl { protected final HashedWheelTimer mTimer; protected final ConcurrentMap mHeartbeatTasks; protected static int nextPowerOfTwo( int num ) { int n = 1; while (n < num) { n <<= 1; } return n; } public static HashedWheelTimer createTimer( long heartIntervalMillis ) { // tickDuration ∈ [ 100ms, 1s ] // Tick too short (<100ms) results in high CPU polling. // Tick too long (>1s) causes significant heartbeat delay. // Optimal range is between 250ms and 500ms. // 40 is an empirical value for balancing, ensuring that heartbeats are evenly distributed on the time wheel (helps avoid instantaneous load). // Tick 过短(<100ms)CPU 轮询过高 // Tick 过长(>1s)心跳延迟较大 // 取 250ms ~ 800ms 较优 // 40 是均分调优经验值,让心跳能在时间轮上较好地分布均匀(较好地避免瞬时负载) long tickDuration = Math.min( Math.max( heartIntervalMillis / 40, 250 ), 800 ); // Ensure time-wheel covered all HeartbeatIntervals. int ticksPerWheel = (int) Math.ceil( (double) heartIntervalMillis / tickDuration ); // Adjust ticksPerWheel => 2^N ticksPerWheel = HuskyHeartbeatControl.nextPowerOfTwo( ticksPerWheel ); return new HashedWheelTimer( tickDuration, TimeUnit.MILLISECONDS, ticksPerWheel ); } public HuskyHeartbeatControl( long heartIntervalMillis ) { this.mTimer = HuskyHeartbeatControl.createTimer( heartIntervalMillis ); //this.mTimer = new HashedWheelTimer( 100, TimeUnit.MILLISECONDS, 512 ); this.mHeartbeatTasks = new ConcurrentHashMap<>(); } @Override public void registerChannels( Collection channels, long intervalMillis ) { for ( ChannelControlBlock ccb : channels ) { this.registerChannel( ccb, intervalMillis ); } } @Override public void registerChannel( ChannelControlBlock ccb, long intervalMillis ) { if ( this.mHeartbeatTasks.containsKey( ccb ) ) { return; } Timeout timeout = this.scheduleHeartbeat( ccb, intervalMillis ); this.mHeartbeatTasks.put( ccb, timeout ); } @Override public void deregisterChannel( ChannelControlBlock ccb ) { Timeout timeout = this.mHeartbeatTasks.remove( ccb ); if ( timeout != null ) { timeout.cancel(); } } protected Timeout scheduleHeartbeat( ChannelControlBlock ccb, long intervalMillis ) { return this.mTimer.newTimeout( new HeartbeatTask( ccb, intervalMillis ), intervalMillis, TimeUnit.MILLISECONDS ); } protected class HeartbeatTask implements TimerTask { private final ChannelControlBlock mChannelControlBlock; private final long mIntervalMillis; HeartbeatTask( ChannelControlBlock ccb, long intervalMillis ) { this.mChannelControlBlock = ccb; this.mIntervalMillis = intervalMillis; } @Override public void run( Timeout timeout ) throws IOException { if ( !HuskyHeartbeatControl.this.mHeartbeatTasks.containsKey( this.mChannelControlBlock ) ) { return; } if ( !this.mChannelControlBlock.isShutdown() ) { HuskyHeartbeatControl.this.sendHeartbeat( this.mChannelControlBlock ); Timeout newTimeout = HuskyHeartbeatControl.this.scheduleHeartbeat( this.mChannelControlBlock, this.mIntervalMillis ); HuskyHeartbeatControl.this.mHeartbeatTasks.put( mChannelControlBlock, newTimeout ); } } } protected void sendHeartbeat( ChannelControlBlock ccb ) throws IOException { if ( ccb.getChannelStatus().isAsynAvailable() && !ccb.isShutdown() ) { ccb.sendMsg( HeartbeatConstants.HCTP_HEART_ALIVE, true ); } } @Override public boolean interceptFeedback( ChannelControlBlock block, UMCMessage msg ) throws IOException { int nControlBits = msg.getHead().getControlBits(); if ( nControlBits == HeartbeatConstants.HCTP_HEART_RESPONSE_ACK ) { //Debug.traceSyn( msg ); // Do nothing. [Keep the format] return true; } return false; } @Override public void shutdown() { this.mTimer.stop(); this.mHeartbeatTasks.clear(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/heartbeat/HuskyHeartbeatFeedbackor.java ================================================ package com.pinecone.hydra.umct.husky.heartbeat; import java.io.IOException; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.vita.HeartbeatFeedbackor; public class HuskyHeartbeatFeedbackor implements HeartbeatFeedbackor { public HuskyHeartbeatFeedbackor() { } @Override public boolean interceptHeartbeat( ChannelControlBlock block, UMCMessage msg ) throws IOException { int nControlBits = msg.getHead().getControlBits(); if ( nControlBits == HeartbeatConstants.HCTP_HEART_REQUEST_ALIVE ) { this.feedback( block, msg ); return true; } return false; } @Override public void feedback( ChannelControlBlock block, UMCMessage msg ) throws IOException { if ( block.getChannelStatus().isAsynAvailable() && !block.isShutdown() ) { block.sendMsg( HeartbeatConstants.HCTP_HEART_ACK, true ); } } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/ArchRouteDispatcher.java ================================================ package com.pinecone.hydra.umct.husky.machinery; import java.lang.reflect.Method; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import com.pinecone.framework.lang.field.FieldEntity; import com.pinecone.hydra.express.Deliver; import com.pinecone.hydra.umct.MessageDeliver; import com.pinecone.hydra.umct.MessageExpress; import com.pinecone.hydra.umct.MessageHandler; import com.pinecone.hydra.umct.UMCTExpress; import com.pinecone.hydra.umct.husky.compiler.ClassDigest; import com.pinecone.hydra.umct.husky.compiler.DynamicMethodPrototype; import com.pinecone.hydra.umct.husky.compiler.IfaceMappingDigest; import com.pinecone.hydra.umct.husky.compiler.InterfacialCompiler; import com.pinecone.hydra.umct.husky.compiler.MethodDigest; import com.pinecone.hydra.umct.mapping.InspectException; import com.pinecone.hydra.umct.mapping.MappingDigest; import com.pinecone.hydra.umct.stereotype.IfaceUtils; import javassist.NotFoundException; public abstract class ArchRouteDispatcher implements RouteDispatcher { protected MCTContextMachinery mMCTContextMachinery; protected UMCTExpress mUMCTExpress; protected MessageDeliver mDefaultDeliver; protected ArchRouteDispatcher() { } protected void registerInstance( MessageDeliver deliver, Object instance, Class iface ) { if ( !iface.isInterface() ) { throw new IllegalArgumentException( "The provided class is not an interface: " + iface.getName() ); } List digests = this.compile( iface, false ).getMethodDigests(); Map digestMap = digests.stream() .collect( Collectors.toMap(MethodDigest::getName, digest -> digest) ); Method[] methods = iface.getMethods(); for ( Method method : methods ) { String methodName = IfaceUtils.getIfaceMethodName( method ); DynamicMethodPrototype digest = (DynamicMethodPrototype)digestMap.get( methodName ); String fullPath = digest.getFullName(); MessageHandler handler = new MessageHandler() { @Override public String getAddressMapping() { return digest.getFullName(); } @Override public Object invoke( Object... args ) throws Exception { return method.invoke( instance, args ); } @Override public List getArgumentsKey() { return digest.getArgumentsKey(); } @Override public Object getReturnDescriptor() { return digest.getReturnDescriptor(); } @Override public String getReturnGenericLabel() { return digest.getGenericReturnTypeLabel(); } @Override public Object getArgumentsDescriptor() { return digest.getArgumentsDescriptor(); } @Override public FieldEntity[] getArgumentTemplate() { return digest.getArgumentTemplate().getSegments(); } }; deliver.registerHandler( fullPath, handler ); this.mMCTContextMachinery.getMessageHandlerMap().put( fullPath, handler ); } } @Override public void setUMCTExpress( UMCTExpress handler ) { this.mUMCTExpress = handler; } @Override public MCTContextMachinery getContextMachinery() { return this.mMCTContextMachinery; } @Override public UMCTExpress getUMCTExpress() { return this.mUMCTExpress; } @Override public MessageExpress register( Deliver deliver ) { return this.mUMCTExpress.register( deliver ); } @Override public MessageExpress fired ( Deliver deliver ) { return this.mUMCTExpress.fired( deliver ); } @Override public MessageDeliver getDeliver( String name ) { return this.mUMCTExpress.getDeliver( name ); } @Override public MessageDeliver getDefaultDeliver() { return this.mDefaultDeliver; } @Override public InterfacialCompiler getInterfacialCompiler() { return this.mMCTContextMachinery.getInterfacialCompiler(); } @Override public void registerInstance( String deliverName, Object instance, Class iface ) { MessageDeliver deliver = this.getDeliver( deliverName ); if ( deliver == null ) { throw new IllegalArgumentException( "No such deliver: " + deliverName ); } this.registerInstance( deliver, instance, iface ); } @Override public void registerInstance( Object instance, Class iface ) { this.registerInstance( this.mDefaultDeliver, instance, iface ); } protected void registerController( MessageDeliver deliver, Object instance, Class controllerType ) { try { List digests = this.mMCTContextMachinery.getControllerInspector().characterize( controllerType ); List ifs = this.getInterfacialCompiler().compile( digests ); for ( IfaceMappingDigest imd : ifs ) { String[] addresses = imd.getAddresses(); for ( int i = 0; i < addresses.length; ++i ) { String address = addresses[ i ]; MessageHandler handler = new MessageHandler() { @Override public String getAddressMapping() { return address; } @Override public Object invoke( Object... args ) throws Exception { return imd.getMappedMethod().invoke( instance, args ); } @Override public List getArgumentsKey() { return imd.getArgumentsKey(); } @Override public Object getReturnDescriptor() { return imd.getReturnDescriptor(); } @Override public String getReturnGenericLabel() { return imd.getReturnGenericTypeLabel(); } @Override public Object getArgumentsDescriptor() { return imd.getArgumentsDescriptor(); } @Override public FieldEntity[] getArgumentTemplate() { return imd.getArgumentTemplate().getSegments(); } }; deliver.registerHandler( address, handler ); this.mMCTContextMachinery.getMessageHandlerMap().put( address, handler ); } } } catch ( NotFoundException e ) { throw new InspectException( e ); } } @Override public void registerController( String deliverName, Object instance, Class controllerType ) { MessageDeliver deliver = this.getDeliver( deliverName ); if ( deliver == null ) { throw new IllegalArgumentException( "No such deliver: " + deliverName ); } this.registerController( deliver, instance, controllerType ); } @Override public void registerController( Object instance, Class controllerType ) { this.registerController( this.mDefaultDeliver, instance, controllerType ); } @Override public ClassDigest queryClassDigest(String name ) { return this.mMCTContextMachinery.queryClassDigest( name ); } @Override public MethodDigest queryMethodDigest( String name ) { return this.mMCTContextMachinery.queryMethodDigest( name ); } @Override public void addClassDigest( ClassDigest that ) { this.mMCTContextMachinery.addClassDigest( that ); } @Override public void addMethodDigest( MethodDigest that ) { this.mMCTContextMachinery.addMethodDigest( that ); } @Override public ClassDigest compile( Class clazz, boolean bAsIface ) { return this.mMCTContextMachinery.compile( clazz, bAsIface ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/DigestContextMachinery.java ================================================ package com.pinecone.hydra.umct.husky.machinery; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.framework.util.lang.GenericDynamicFactory; import com.pinecone.framework.util.lang.ScopedPackage; import com.pinecone.hydra.umct.husky.compiler.InterfacialCompiler; import com.pinecone.hydra.umct.mapping.ControllerInspector; /** * Pinecone Ursus For Java Hydra Ulfar, DigestContextMachinery * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. */ public class DigestContextMachinery extends DigestTransformer implements MCTContextMachinery { protected DynamicFactory mIfaceFactory; protected MultiMappingLoader mMultiMappingLoader; public DigestContextMachinery( InterfacialCompiler compiler, ControllerInspector controllerInspector ) { super( compiler, controllerInspector ); this.mIfaceFactory = new GenericDynamicFactory( controllerInspector.getClassLoader() ); this.mMultiMappingLoader = new DigestMappingLoader( this.mIfaceFactory, this ); } @Override public MultiMappingLoader getMultiMappingLoader() { return this.mMultiMappingLoader; } @Override public MCTContextMachinery addScope ( String szPackageName ) { this.mIfaceFactory.getClassScope().addScope( szPackageName ); return this; } @Override public MCTContextMachinery addScope ( ScopedPackage scope ) { this.mIfaceFactory.getClassScope().addScope( scope ); return this; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/DigestMappingLoader.java ================================================ package com.pinecone.hydra.umct.husky.machinery; import com.pinecone.framework.util.lang.ClassScope; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.framework.util.name.Name; import com.pinecone.ulf.util.lang.ArchMultiScopeLoader; import com.pinecone.ulf.util.lang.GenericPreloadClassInspector; import com.pinecone.ulf.util.lang.PooledClassCandidateScanner; import javassist.ClassPool; import javassist.bytecode.annotation.Annotation; public class DigestMappingLoader extends ArchMultiScopeLoader implements MultiMappingLoader { protected MCTContextMachinery mMCTContextMachinery; protected DigestMappingLoader( ClassScope classScope, ClassLoader classLoader, ClassPool classPool, MCTContextMachinery machinery ) { super( classScope, classLoader, classPool, null, null ); this.mMCTContextMachinery = machinery; this.mClassScanner = new PooledClassCandidateScanner( new HuskyMappingScopeSet( this.mClassLoader ), this.mClassLoader, this.mClassPool ); this.mClassInspector = new GenericPreloadClassInspector( this.mClassPool ); this.mClassScanner.addExcludeFilter( new ExcludeDigestMappingFilters( this.mClassInspector, this.mMCTContextMachinery) ); } protected DigestMappingLoader( ClassScope classScope, ClassLoader classLoader, MCTContextMachinery marshal ) { this( classScope, classLoader, ClassPool.getDefault(), marshal ); } public DigestMappingLoader( DynamicFactory factory, MCTContextMachinery marshal ) { this( factory.getClassScope(), factory.getClassLoader(), marshal ); } @Override protected boolean isAnnotationQualified( Annotation that, String szName ) { return false; } @Override public Class load( Name simpleName ) throws ClassNotFoundException { return (Class )super.load( simpleName ); } // Directly by it`s name. @Override public Class loadByName( Name simpleName ) throws ClassNotFoundException { return (Class )super.loadByName( simpleName ); } // Scanning class`s annotations, methods or others. @Override public Class loadInClassTrait( Name simpleName ) throws ClassNotFoundException { return (Class )super.loadInClassTrait( simpleName ); } @Override protected Class loadSingleByFullClassName( String szFullClassName ) { try { Class clazz = this.mClassLoader.loadClass( szFullClassName ); if( this.filter( clazz ) ) { return null; } } catch ( ClassNotFoundException e ) { return null; } return null; } @Override public MultiMappingLoader updateScope() { return (MultiMappingLoader)super.updateScope(); } @Override public void clearCache() { this.mLoadedClassesPool.clear(); this.mVisitedClasses.clear(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/DigestTransformer.java ================================================ package com.pinecone.hydra.umct.husky.machinery; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import com.pinecone.hydra.umct.MessageHandler; import com.pinecone.hydra.umct.husky.compiler.ClassDigest; import com.pinecone.hydra.umct.husky.compiler.InterfacialCompiler; import com.pinecone.hydra.umct.husky.compiler.MethodDigest; import com.pinecone.hydra.umct.mapping.ControllerInspector; import com.pinecone.hydra.umct.mapping.MappingDigest; public class DigestTransformer implements MCTTransformer { protected InterfacialCompiler mInterfacialCompiler; protected Map mClassDigests; protected Map mMethodDigests; protected ControllerInspector mControllerInspector; protected List mMappingDigests; protected Map mMessageHandlerMap; public DigestTransformer(InterfacialCompiler compiler, ControllerInspector controllerInspector ) { this.mInterfacialCompiler = compiler; this.mControllerInspector = controllerInspector; this.mClassDigests = new LinkedHashMap<>(); this.mMethodDigests = new LinkedHashMap<>(); this.mMessageHandlerMap = new HashMap<>(); this.mMappingDigests = new ArrayList<>(); } @Override public InterfacialCompiler getInterfacialCompiler() { return this.mInterfacialCompiler; } @Override public ControllerInspector getControllerInspector() { return this.mControllerInspector; } @Override public List getMappingDigests() { return this.mMappingDigests; } @Override public Map getMessageHandlerMap() { return this.mMessageHandlerMap; } @Override public ClassDigest queryClassDigest( String name ) { return this.mClassDigests.get( name ); } @Override public MethodDigest queryMethodDigest( String name ) { return this.mMethodDigests.get( name ); } @Override public void addClassDigest( ClassDigest that ) { this.mClassDigests.put( that.getClassName(), that ); List digests = that.getMethodDigests(); for ( MethodDigest digest : digests ) { this.addMethodDigest( digest ); } } @Override public void addMethodDigest( MethodDigest that ) { this.mMethodDigests.put( that.getFullName(), that ); } @Override public ClassDigest compile( Class clazz, boolean bAsIface ) { ClassDigest neo = this.mInterfacialCompiler.compile( clazz, bAsIface ); this.addClassDigest( neo ); return neo; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/ExcludeDigestMappingFilters.java ================================================ package com.pinecone.hydra.umct.husky.machinery; import java.io.IOException; import java.util.List; import com.pinecone.framework.util.lang.TypeFilter; import com.pinecone.hydra.umct.husky.compiler.ClassDigest; import com.pinecone.hydra.umct.mapping.MappingDigest; import com.pinecone.ulf.util.lang.HierarchyClassInspector; import javassist.NotFoundException; public class ExcludeDigestMappingFilters implements TypeFilter { protected HierarchyClassInspector mClassInspector; protected MCTContextMachinery mMCTContextMachinery; public ExcludeDigestMappingFilters(HierarchyClassInspector inspector, MCTContextMachinery marshal ) { this.mClassInspector = inspector; this.mMCTContextMachinery = marshal; } @Override public boolean match( String szClassName, Object pool ) throws IOException { boolean isIface = this.scanIface( szClassName, pool ); if ( isIface ) { return false; } boolean isController = this.scanController( szClassName, pool ); return !isController; } protected boolean scanIface( String szClassName, Object pool ) throws IOException { ClassDigest classDigest = this.mMCTContextMachinery.getInterfacialCompiler().compile( szClassName, false ); if ( classDigest != null ) { this.mMCTContextMachinery.addClassDigest( classDigest ); return true; } return false; } protected boolean scanController( String szClassName, Object pool ) throws IOException { try{ List mappingDigests = this.mMCTContextMachinery.getControllerInspector().characterize( szClassName ); if ( mappingDigests != null && !mappingDigests.isEmpty() ) { this.mMCTContextMachinery.addAll( mappingDigests ); return true; } } catch ( NotFoundException e ) { return false; } return false; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/HuskyContextMachinery.java ================================================ package com.pinecone.hydra.umct.husky.machinery; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.framework.util.lang.GenericDynamicFactory; import com.pinecone.framework.util.lang.ScopedPackage; import com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler; import com.pinecone.hydra.umct.mapping.ControllerInspector; import com.pinecone.ulf.util.protobuf.FieldProtobufDecoder; /** * Pinecone Ursus For Java Hydra Ulfar, Husky Machinery * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Husky Transformer | Husky Machinery * ***************************************************************************************** */ public class HuskyContextMachinery extends HuskyTransformer implements PMCTContextMachinery { protected DynamicFactory mIfaceFactory; protected MultiMappingLoader mMultiMappingLoader; public HuskyContextMachinery( ProtoInterfacialCompiler compiler, ControllerInspector controllerInspector, FieldProtobufDecoder decoder ) { super( compiler, controllerInspector, decoder ); this.mIfaceFactory = new GenericDynamicFactory( controllerInspector.getClassLoader() ); this.mMultiMappingLoader = new HuskyMappingLoader( this.mIfaceFactory, this ); } @Override public MultiMappingLoader getMultiMappingLoader() { return this.mMultiMappingLoader; } @Override public PMCTContextMachinery addScope ( String szPackageName ) { this.mIfaceFactory.getClassScope().addScope( szPackageName ); return this; } @Override public PMCTContextMachinery addScope ( ScopedPackage scope ) { this.mIfaceFactory.getClassScope().addScope( scope ); return this; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/HuskyMappingLoader.java ================================================ package com.pinecone.hydra.umct.husky.machinery; import com.pinecone.framework.util.lang.ClassScope; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.framework.util.name.Name; import com.pinecone.ulf.util.lang.ArchMultiScopeLoader; import com.pinecone.ulf.util.lang.GenericPreloadClassInspector; import com.pinecone.ulf.util.lang.PooledClassCandidateScanner; import javassist.ClassPool; import javassist.bytecode.annotation.Annotation; public class HuskyMappingLoader extends ArchMultiScopeLoader implements MultiMappingLoader { protected PMCTContextMachinery mPMCTContextMachinery; protected HuskyMappingLoader( ClassScope classScope, ClassLoader classLoader, ClassPool classPool, PMCTContextMachinery machinery ) { super( classScope, classLoader, classPool, null, null ); this.mPMCTContextMachinery = machinery; this.mClassScanner = new PooledClassCandidateScanner( new HuskyMappingScopeSet( this.mClassLoader ), this.mClassLoader, this.mClassPool ); this.mClassInspector = new GenericPreloadClassInspector( this.mClassPool ); this.mClassScanner.addExcludeFilter( new ExcludeDigestMappingFilters( this.mClassInspector, this.mPMCTContextMachinery) ); } protected HuskyMappingLoader( ClassScope classScope, ClassLoader classLoader, PMCTContextMachinery marshal ) { this( classScope, classLoader, ClassPool.getDefault(), marshal ); } public HuskyMappingLoader( DynamicFactory factory, PMCTContextMachinery marshal ) { this( factory.getClassScope(), factory.getClassLoader(), marshal ); } @Override protected boolean isAnnotationQualified( Annotation that, String szName ) { return false; } @Override public Class load( Name simpleName ) throws ClassNotFoundException { return (Class )super.load( simpleName ); } // Directly by it`s name. @Override public Class loadByName( Name simpleName ) throws ClassNotFoundException { return (Class )super.loadByName( simpleName ); } // Scanning class`s annotations, methods or others. @Override public Class loadInClassTrait( Name simpleName ) throws ClassNotFoundException { return (Class )super.loadInClassTrait( simpleName ); } @Override protected Class loadSingleByFullClassName( String szFullClassName ) { try { Class clazz = this.mClassLoader.loadClass( szFullClassName ); if( this.filter( clazz ) ) { return null; } } catch ( ClassNotFoundException e ) { return null; } return null; } @Override public MultiMappingLoader updateScope() { return (MultiMappingLoader)super.updateScope(); } @Override public void clearCache() { this.mLoadedClassesPool.clear(); this.mVisitedClasses.clear(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/HuskyMappingScopeSet.java ================================================ package com.pinecone.hydra.umct.husky.machinery; import java.util.Set; import com.pinecone.framework.unit.LinkedTreeSet; import com.pinecone.framework.util.lang.ArchClassScopeSet; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.framework.util.lang.ScopedPackage; public class HuskyMappingScopeSet extends ArchClassScopeSet { public HuskyMappingScopeSet( Set scope, ClassLoader classLoader ) { super( scope, classLoader ); } public HuskyMappingScopeSet( ClassLoader classLoader ) { super( new LinkedTreeSet<>(), classLoader ); } public HuskyMappingScopeSet( DynamicFactory factory ) { super( new LinkedTreeSet<>(), factory.getClassLoader() ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/HuskyRouteDispatcher.java ================================================ package com.pinecone.hydra.umct.husky.machinery; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pinecone.framework.lang.field.FieldEntity; import com.pinecone.hydra.express.Deliver; import com.pinecone.hydra.system.component.Slf4jTraceable; import com.pinecone.hydra.umct.MessageDeliver; import com.pinecone.hydra.umct.MessageExpress; import com.pinecone.hydra.umct.MessageHandler; import com.pinecone.hydra.umct.MessageJunction; import com.pinecone.hydra.umct.ProtoletMsgDeliver; import com.pinecone.hydra.umct.UMCTExpress; import com.pinecone.hydra.umct.WolfMCExpress; import com.pinecone.hydra.uma.AppointServer; import com.pinecone.hydra.umct.husky.compiler.BytecodeIfaceCompiler; import com.pinecone.hydra.umct.husky.compiler.ClassDigest; import com.pinecone.hydra.umct.husky.compiler.CompilerEncoder; import com.pinecone.hydra.umct.husky.compiler.DynamicMethodPrototype; import com.pinecone.hydra.umct.husky.compiler.IfaceMappingDigest; import com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler; import com.pinecone.hydra.umct.husky.compiler.MethodDigest; import com.pinecone.hydra.umct.mapping.BytecodeControllerInspector; import com.pinecone.hydra.umct.mapping.ControllerInspector; import com.pinecone.hydra.umct.mapping.InspectException; import com.pinecone.hydra.umct.mapping.MappingDigest; import com.pinecone.hydra.umct.stereotype.IfaceUtils; import com.pinecone.ulf.util.protobuf.GenericFieldProtobufDecoder; import javassist.ClassPool; import javassist.NotFoundException; public class HuskyRouteDispatcher extends ArchRouteDispatcher implements ProtoRouteDispatcher { protected void applyExpress( ProtoInterfacialCompiler compiler, UMCTExpress express ) { this.mUMCTExpress = express; this.mDefaultDeliver = new ProtoletMsgDeliver( AppointServer.DefaultEntityName, this.mUMCTExpress, this.getContextMachinery(), compiler.getCompilerEncoder() ); this.mUMCTExpress.register( this.mDefaultDeliver ); } protected HuskyRouteDispatcher( ProtoInterfacialCompiler compiler, ControllerInspector controllerInspector ) { super(); this.mMCTContextMachinery = new HuskyContextMachinery( compiler, controllerInspector, new GenericFieldProtobufDecoder() ); } public HuskyRouteDispatcher( PMCTContextMachinery machinery, UMCTExpress express ) { super(); this.mMCTContextMachinery = machinery; this.applyExpress( machinery.getInterfacialCompiler(), express ); } public HuskyRouteDispatcher( ProtoInterfacialCompiler compiler, ControllerInspector controllerInspector, UMCTExpress express ) { this( compiler, controllerInspector ); this.applyExpress( compiler, express ); } public HuskyRouteDispatcher( CompilerEncoder encoder, UMCTExpress express, ClassLoader classLoader ) { this( new BytecodeIfaceCompiler( ClassPool.getDefault(), classLoader, encoder ), new BytecodeControllerInspector( ClassPool.getDefault(), classLoader ), express ); } public HuskyRouteDispatcher( UMCTExpress express, ClassLoader classLoader ) { this( new BytecodeIfaceCompiler( ClassPool.getDefault(), classLoader ), new BytecodeControllerInspector( ClassPool.getDefault(), classLoader ), express ); } public HuskyRouteDispatcher( Class expressType, MessageJunction junction, ClassLoader classLoader ) { this( new BytecodeIfaceCompiler( ClassPool.getDefault(), classLoader ), new BytecodeControllerInspector( ClassPool.getDefault(), classLoader ) ); try { Constructor constructor = expressType.getConstructor( String.class, MessageJunction.class, Logger.class ); Logger logger ; if ( junction instanceof Slf4jTraceable ) { logger = ((Slf4jTraceable) junction).getLogger(); } else { logger = LoggerFactory.getLogger( this.getClass().getName() ); } UMCTExpress express = (UMCTExpress) constructor.newInstance( AppointServer.DefaultEntityName, junction, logger ); this.applyExpress( this.getInterfacialCompiler(), express ); } catch ( NoSuchMethodException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) { throw new IllegalArgumentException( "`" + expressType.getSimpleName() + "` is not UMCTExpress calibre qualified." ); } } public HuskyRouteDispatcher( ClassLoader classLoader, boolean delayExpress ) { this( new BytecodeIfaceCompiler( ClassPool.getDefault(), classLoader ), new BytecodeControllerInspector( ClassPool.getDefault(), classLoader ) ); } public HuskyRouteDispatcher( MessageJunction junction, ClassLoader classLoader ) { this( WolfMCExpress.class, junction, classLoader ); } @Override public PMCTContextMachinery getContextMachinery() { return (PMCTContextMachinery) super.getContextMachinery(); } @Override public ProtoInterfacialCompiler getInterfacialCompiler() { return (ProtoInterfacialCompiler) super.getInterfacialCompiler(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/HuskyRouteDispatcherFabricator.java ================================================ package com.pinecone.hydra.umct.husky.machinery; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umct.UMCTExpress; public class HuskyRouteDispatcherFabricator implements Pinenut { public static void afterConstructed( HuskyRouteDispatcher dispatcher, UMCTExpress express ) { dispatcher.applyExpress( dispatcher.getInterfacialCompiler(), express ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/HuskyTransformer.java ================================================ package com.pinecone.hydra.umct.husky.machinery; import com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler; import com.pinecone.hydra.umct.mapping.ControllerInspector; import com.pinecone.ulf.util.protobuf.FieldProtobufDecoder; import com.pinecone.ulf.util.protobuf.FieldProtobufEncoder; public class HuskyTransformer extends DigestTransformer implements PMCTTransformer { protected FieldProtobufEncoder mFieldProtobufEncoder; protected FieldProtobufDecoder mFieldProtobufDecoder; public HuskyTransformer( ProtoInterfacialCompiler compiler, ControllerInspector controllerInspector, FieldProtobufDecoder decoder ) { super( compiler, controllerInspector ); this.mFieldProtobufEncoder = compiler.getCompilerEncoder().getEncoder(); this.mFieldProtobufDecoder = decoder; } @Override public ProtoInterfacialCompiler getInterfacialCompiler() { return (ProtoInterfacialCompiler) super.getInterfacialCompiler(); } @Override public FieldProtobufEncoder getFieldProtobufEncoder() { return this.mFieldProtobufEncoder; } @Override public FieldProtobufDecoder getFieldProtobufDecoder() { return this.mFieldProtobufDecoder; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/MCTContextMachinery.java ================================================ package com.pinecone.hydra.umct.husky.machinery; import com.pinecone.framework.util.lang.ScopedPackage; public interface MCTContextMachinery extends MCTTransformer { MCTContextMachinery addScope ( String szPackageName ); MCTContextMachinery addScope ( ScopedPackage scope ); MultiMappingLoader getMultiMappingLoader(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/MCTTransformer.java ================================================ package com.pinecone.hydra.umct.husky.machinery; import java.util.List; import java.util.Map; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umct.MessageHandler; import com.pinecone.hydra.umct.husky.compiler.ClassDigest; import com.pinecone.hydra.umct.husky.compiler.InterfacialCompiler; import com.pinecone.hydra.umct.husky.compiler.MethodDigest; import com.pinecone.hydra.umct.mapping.ControllerInspector; import com.pinecone.hydra.umct.mapping.MappingDigest; public interface MCTTransformer extends Pinenut { InterfacialCompiler getInterfacialCompiler(); ControllerInspector getControllerInspector(); Map getMessageHandlerMap(); List getMappingDigests(); default void addAll( List digests ){ this.getMappingDigests().addAll( digests ); } ClassDigest queryClassDigest( String name ); MethodDigest queryMethodDigest( String name ); void addClassDigest( ClassDigest that ); void addMethodDigest( MethodDigest that ); ClassDigest compile( Class clazz, boolean bAsIface ); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/MultiMappingLoader.java ================================================ package com.pinecone.hydra.umct.husky.machinery; import java.util.List; import com.pinecone.framework.util.lang.MultiClassScopeLoader; import com.pinecone.framework.util.name.Name; import com.pinecone.ulf.util.lang.MultiTraitClassLoader; public interface MultiMappingLoader extends MultiClassScopeLoader, MultiTraitClassLoader { @Override Class load( Name simpleName ) throws ClassNotFoundException ; // Directly by it`s name. @Override Class loadByName( Name simpleName ) throws ClassNotFoundException ; // Scanning class`s annotations, methods or others. @Override Class loadInClassTrait( Name simpleName ) throws ClassNotFoundException ; @Override MultiMappingLoader updateScope(); @Override List > loads( Name name ) ; @Override List > loadsByName( Name simpleName ); @Override List > loadsInClassTrait( Name simpleName ) ; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/PMCTContextMachinery.java ================================================ package com.pinecone.hydra.umct.husky.machinery; import com.pinecone.framework.util.lang.ScopedPackage; public interface PMCTContextMachinery extends PMCTTransformer, MCTContextMachinery { @Override PMCTContextMachinery addScope ( String szPackageName ); @Override PMCTContextMachinery addScope ( ScopedPackage scope ); @Override MultiMappingLoader getMultiMappingLoader(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/PMCTTransformer.java ================================================ package com.pinecone.hydra.umct.husky.machinery; import com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler; import com.pinecone.ulf.util.protobuf.FieldProtobufDecoder; import com.pinecone.ulf.util.protobuf.FieldProtobufEncoder; public interface PMCTTransformer extends MCTTransformer { @Override ProtoInterfacialCompiler getInterfacialCompiler(); default FieldProtobufEncoder getFieldProtobufEncoder() { return this.getInterfacialCompiler().getCompilerEncoder().getEncoder(); } FieldProtobufDecoder getFieldProtobufDecoder(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/ProtoRouteDispatcher.java ================================================ package com.pinecone.hydra.umct.husky.machinery; import com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler; public interface ProtoRouteDispatcher extends RouteDispatcher { @Override PMCTContextMachinery getContextMachinery(); @Override ProtoInterfacialCompiler getInterfacialCompiler(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/RouteDispatcher.java ================================================ package com.pinecone.hydra.umct.husky.machinery; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.express.Deliver; import com.pinecone.hydra.umct.MessageDeliver; import com.pinecone.hydra.umct.MessageExpress; import com.pinecone.hydra.umct.UMCTExpress; import com.pinecone.hydra.umct.husky.compiler.ClassDigest; import com.pinecone.hydra.umct.husky.compiler.InterfacialCompiler; import com.pinecone.hydra.umct.husky.compiler.MethodDigest; public interface RouteDispatcher extends Pinenut { void setUMCTExpress( UMCTExpress handler ); MCTContextMachinery getContextMachinery(); UMCTExpress getUMCTExpress(); MessageExpress register( Deliver deliver ); MessageExpress fired ( Deliver deliver ); MessageDeliver getDeliver( String name ); MessageDeliver getDefaultDeliver(); InterfacialCompiler getInterfacialCompiler(); void registerInstance( String deliverName, Object instance, Class iface ) ; void registerInstance( Object instance, Class iface ); void registerController( String deliverName, Object instance, Class controllerType ) ; void registerController( Object instance, Class controllerType ) ; default void registerController( Object instance ) { this.registerController( instance, instance.getClass() ); } ClassDigest queryClassDigest( String name ); MethodDigest queryMethodDigest( String name ) ; void addClassDigest( ClassDigest that ); void addMethodDigest( MethodDigest that ); ClassDigest compile( Class clazz, boolean bAsIface ) ; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/lets/MessageServiceScanner.java ================================================ package com.pinecone.hydra.umct.lets; import com.pinecone.framework.util.lang.ClassScope; import com.pinecone.framework.util.lang.ClassScopeNSProtocolIteratorsFactory; import com.pinecone.framework.util.lang.NSProtocolIteratorsFactoryAdapter; import com.pinecone.ulf.util.lang.GenericPreloadClassInspector; import com.pinecone.ulf.util.lang.HierarchyClassInspector; import com.pinecone.ulf.util.lang.PooledClassCandidateScanner; import com.pinecone.ulf.util.lang.SimpleAnnotationExcludeFilter; import javassist.ClassPool; public class MessageServiceScanner extends PooledClassCandidateScanner implements MessageletScanner { protected HierarchyClassInspector mClassInspector ; public MessageServiceScanner ( ClassScope searchScope, ClassLoader classLoader, NSProtocolIteratorsFactoryAdapter iteratorsFactory, ClassPool classPool ) { super( searchScope, classLoader, iteratorsFactory, classPool ); this.mClassInspector = new GenericPreloadClassInspector( this.mClassPool ); this.addExcludeFilter( new SimpleAnnotationExcludeFilter( this.mClassInspector, MsgService.class ) ); } public MessageServiceScanner ( ClassScope searchScope, ClassLoader classLoader, ClassPool classPool ) { this( searchScope, classLoader, new ClassScopeNSProtocolIteratorsFactory( classLoader, searchScope ), classPool ); } public MessageServiceScanner ( ClassScope searchScope, ClassLoader classLoader ) { this( searchScope, classLoader, new ClassScopeNSProtocolIteratorsFactory( classLoader, searchScope ), ClassPool.getDefault() ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/lets/MessageletScanner.java ================================================ package com.pinecone.hydra.umct.lets; import com.pinecone.framework.util.lang.ClassScanner; public interface MessageletScanner extends ClassScanner { } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/lets/MsgService.java ================================================ package com.pinecone.hydra.umct.lets; import java.lang.annotation.*; @Target({ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface MsgService { String value() default ""; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/mapping/ArchMappingInspector.java ================================================ package com.pinecone.hydra.umct.mapping; import java.lang.reflect.Array; import java.util.ArrayList; import java.util.List; import com.pinecone.hydra.umct.bind.ArgParam; import com.pinecone.ulf.util.lang.GenericPreloadClassInspector; import javassist.ClassPool; import javassist.CtClass; import javassist.CtMethod; import javassist.NotFoundException; import javassist.bytecode.ParameterAnnotationsAttribute; import javassist.bytecode.annotation.Annotation; public abstract class ArchMappingInspector extends GenericPreloadClassInspector implements MappingInspector { protected ClassLoader mClassLoader; public ArchMappingInspector( ClassPool classPool, ClassLoader classLoader ) { super( classPool ); this.mClassLoader = classLoader; } @Override public List inspectArgParams( Object methodDigest, CtMethod method ) { List argParams = null; ParameterAnnotationsAttribute paramAnnotationsAttr = ( ParameterAnnotationsAttribute) method.getMethodInfo(). getAttribute(ParameterAnnotationsAttribute.visibleTag ); if ( paramAnnotationsAttr != null ) { Annotation[][] parameterAnnotations = paramAnnotationsAttr.getAnnotations(); if ( parameterAnnotations.length > 0 ) { argParams = new ArrayList<>(); } for ( int i = 0; i < parameterAnnotations.length; ++i ) { for ( Annotation annotation : parameterAnnotations[ i ] ) { if ( ArgParam.class.getName().equals(annotation.getTypeName()) ) { String name = annotation.getMemberValue("name") != null ? annotation.getMemberValue("name").toString() : null; String value = annotation.getMemberValue("value") != null ? annotation.getMemberValue("value").toString() : null; String defVal = annotation.getMemberValue("defaultValue") != null ? annotation.getMemberValue("defaultValue").toString() : null; boolean required = annotation.getMemberValue("required") == null || Boolean.parseBoolean(annotation.getMemberValue("required").toString()); argParams.add( this.newParamsDigest( methodDigest, i, this.annotationKeyNormalize(name), this.annotationKeyNormalize(value), this.annotationKeyNormalize(defVal), required ) ); } } } } return argParams; } protected ParamsDigest newParamsDigest( Object methodDigest, int parameterIndex, String name, String value, String defaultValue, boolean required ) { return new GenericParamsDigest( parameterIndex, this.annotationKeyNormalize(name), this.annotationKeyNormalize(value), this.annotationKeyNormalize(defaultValue), required ); } protected String annotationKeyNormalize( String bad ) { if ( bad != null ) { bad = bad.trim(); if ( bad.startsWith( "\"" ) ) { return bad.replace( "\"", "" ); } } return bad; } protected Class reinterpretClass( String className ) throws ClassNotFoundException { switch (className) { case "boolean": { return boolean.class; } case "byte": { return byte.class; } case "char": { return char.class; } case "short": { return short.class; } case "int": { return int.class; } case "long": { return long.class; } case "float": { return float.class; } case "double": { return double.class; } case "void": { return void.class; } default: if ( className.endsWith( "[]" ) ) { String elementTypeName = className.substring( 0, className.length() - 2 ); Class elementType = this.reinterpretClass( elementTypeName ); return Array.newInstance( elementType, 0 ).getClass(); } return this.mClassLoader.loadClass(className); } } protected T getAnnotation( CtClass ctClass, Class annotationClass ) { try { Object rawAnnotation = ctClass.getAnnotation( annotationClass ); return annotationClass.cast( rawAnnotation ); } catch ( ClassNotFoundException e ) { return null; } } protected T getAnnotation( CtMethod ctMethod, Class annotationClass ) { try { Object rawAnnotation = ctMethod.getAnnotation( annotationClass ); return annotationClass.cast( rawAnnotation ); } catch ( ClassNotFoundException e ) { return null; } } protected Class[] getParameters ( CtMethod method ) throws ClassNotFoundException { CtClass[] pars; try{ pars = method.getParameterTypes(); } catch ( NotFoundException e ) { pars = null; } Class[] parameters; if( pars != null ) { parameters = new Class[ pars.length ]; for ( int i = 0; i < pars.length; ++i ) { CtClass par = pars[ i ]; String parName = par.getName(); Class pc = this.reinterpretClass( parName ); parameters[ i ] = pc; } } else { parameters = null; } return parameters; } @Override public ClassLoader getClassLoader() { return this.mClassLoader; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/mapping/BytecodeControllerInspector.java ================================================ package com.pinecone.hydra.umct.mapping; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.List; import com.pinecone.hydra.umc.msg.UMCMethod; import com.pinecone.hydra.umct.AddressMapping; import com.pinecone.hydra.umct.stereotype.Controller; import javassist.ClassPool; import javassist.CtClass; import javassist.CtMethod; import javassist.NotFoundException; public class BytecodeControllerInspector extends ArchMappingInspector implements ControllerInspector { public BytecodeControllerInspector( ClassPool classPool, ClassLoader classLoader ) { super( classPool, classLoader ); } public BytecodeControllerInspector( ClassPool classPool ) { this( classPool, Thread.currentThread().getContextClassLoader() ); } @Override public List inspect( String className ) throws NotFoundException { List mappingMethods = new ArrayList<>(); CtClass ctClass = this.mClassPool.get( className ); boolean classHasControllerAnnotation = this.hasOwnAnnotation( ctClass, Controller.class ); if ( classHasControllerAnnotation ) { for ( CtMethod method : ctClass.getDeclaredMethods() ) { if ( Modifier.isPublic( method.getModifiers() ) ) { if ( this.methodHasAnnotation( method, AddressMapping.class ) ) { mappingMethods.add( method ); } } } } return mappingMethods; } @Override public List inspect( Class clazz ) throws NotFoundException { return this.inspect( clazz.getName() ); } @Override public List characterize( String className ) throws NotFoundException { try{ List mappingDigests = new ArrayList<>(); CtClass ctClass = this.mClassPool.get(className); if ( !this.hasOwnAnnotation( ctClass, Controller.class ) ) { return mappingDigests; } AddressMapping classMapping = this.getAnnotation( ctClass, AddressMapping.class ); String[] classLevelMappings; if ( classMapping != null ) { classLevelMappings = classMapping.value(); } else { classLevelMappings = new String[]{}; } for ( CtMethod method : ctClass.getDeclaredMethods() ) { if ( !Modifier.isPublic( method.getModifiers() ) || !this.methodHasAnnotation( method, AddressMapping.class ) ) { continue; } AddressMapping methodMapping = this.getAnnotation( method, AddressMapping.class ); if ( methodMapping == null ) { continue; // Method must have an explicit `AddressMapping`. } String[] methodLevelMappings = methodMapping.value(); boolean isRelative = methodMapping.relative(); if ( methodLevelMappings.length == 0 && methodMapping.selfMappable() ) { methodLevelMappings = new String[]{ method.getName() }; } List fullAddresses = new ArrayList<>(); for ( String classMappingValue : classLevelMappings ) { for ( String methodMappingValue : methodLevelMappings ) { if ( isRelative ) { fullAddresses.add( classMappingValue + methodMappingValue ); } else { fullAddresses.add( methodMappingValue ); } } } List paramsDigests = this.inspectArgParams( null, method ); Class[] parameters = this.getParameters( method ); String[] parameterTypes = BytecodeControllerInspector.evalGenericParameterTypes( method ); String returnGType = BytecodeControllerInspector.evalGenericReturnType( method ); Class auth = this.reinterpretClass( className ); Method mappedMethod = auth.getMethod( method.getName(), parameters ); UMCMethod[] intMethods = methodMapping.method(); MappingDigest digest = new GenericMappingDigest( fullAddresses.isEmpty() ? methodLevelMappings : fullAddresses.toArray( new String[ 0 ] ), parameters, parameterTypes, this.reinterpretClass( method.getReturnType().getName() ), returnGType, auth, mappedMethod, paramsDigests, intMethods ); digest.apply( paramsDigests ); mappingDigests.add( digest ); } return mappingDigests; } catch ( ClassNotFoundException | NoSuchMethodException e ) { throw new InspectException( e ); } } @Override public List characterize( Class clazz ) throws NotFoundException { return this.characterize( clazz.getName() ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/mapping/ControllerInspector.java ================================================ package com.pinecone.hydra.umct.mapping; import java.util.List; import javassist.CtMethod; import javassist.NotFoundException; public interface ControllerInspector extends MappingInspector { List inspect( String className ) throws NotFoundException; List inspect( Class clazz ) throws NotFoundException; List characterize( String className ) throws NotFoundException; List characterize( Class clazz ) throws NotFoundException; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/mapping/GenericMappingDigest.java ================================================ package com.pinecone.hydra.umct.mapping; import java.lang.reflect.Method; import java.util.List; import com.pinecone.framework.lang.field.DataStructureEntity; import com.pinecone.framework.lang.field.GenericStructure; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.json.JSONEncoder; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.umc.msg.UMCMethod; import com.pinecone.hydra.umct.husky.function.MethodTemplates; public class GenericMappingDigest implements MappingDigest { protected String[] mszAddresses; protected UMCMethod[] mInterceptMethods; protected DataStructureEntity mArgumentTemplate; protected Class mReturnType; protected Class mClassType; protected String mszReturnGenericTypeLabel; protected Method mMappedMethod; protected List mParamsDigests; protected GenericMappingDigest() { } public GenericMappingDigest( String[] szAddresses, Class[] parameters, String[] parametersGenericLabels, Class returnType, String szReturnGenericTypeLabel, Class classType, Method method, List paramsDigests, UMCMethod[] interceptMethods ) { this.mszAddresses = szAddresses; this.mReturnType = returnType; this.mszReturnGenericTypeLabel = szReturnGenericTypeLabel; this.mParamsDigests = paramsDigests; this.mClassType = classType; this.mMappedMethod = method; this.mInterceptMethods = interceptMethods; String szDominatedAddress; if ( szAddresses.length > 0 ) { szDominatedAddress = szAddresses[0]; } else { szDominatedAddress = ""; // Using anonymous address. In fact, there is pointless for this argument template, which the address is for Iface only. } if( parameters == null || parameters.length == 0 ) { this.mArgumentTemplate = new GenericStructure( szDominatedAddress, 0 ); } else { this.mArgumentTemplate = MethodTemplates.from( null, szDominatedAddress, parameters, parametersGenericLabels ); } } @Override public void apply( List ifaceParamsDigests ) { this.mParamsDigests = ifaceParamsDigests; } @Override public List getArgumentsKey() { return MethodDigestUtils.getArgumentsKey( this.getParamsDigests(), this.getArgumentTemplate() ); } @Override public String[] getAddresses() { return this.mszAddresses; } @Override public UMCMethod[] getInterceptMethods() { return this.mInterceptMethods; } @Override public Method getMappedMethod() { return this.mMappedMethod; } @Override public Class getClassType() { return this.mClassType; } @Override public DataStructureEntity getArgumentTemplate() { return this.mArgumentTemplate; } @Override public Class getReturnType() { return this.mReturnType; } @Override public String getReturnGenericTypeLabel() { return this.mszReturnGenericTypeLabel; } @Override public void applyReturnGenericTypeLabel( String genericTypeLabel ) { this.mszReturnGenericTypeLabel = genericTypeLabel; } @Override public List getParamsDigests() { return this.mParamsDigests; } @Override public String toJSONString() { return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "addresses" , this.getAddresses() ), new KeyValue<>( "return" , this.getReturnType().getName() ), new KeyValue<>( "mappedClass" , this.getClassType().getName() ), new KeyValue<>( "mappedMethod" , this.getMappedMethod().getName() ), } ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/mapping/GenericParamsDigest.java ================================================ package com.pinecone.hydra.umct.mapping; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.json.JSONEncoder; public class GenericParamsDigest implements ParamsDigest { protected int mParameterIndex; protected String mszName; protected String mszValue; protected String mszDefaultValue; protected boolean mRequired; public GenericParamsDigest( int parameterIndex, String name, String value, String defaultValue, boolean required ) { this.mParameterIndex = parameterIndex; this.mszName = name; this.mszValue = value; this.mRequired = required; this.mszDefaultValue = defaultValue; } @Override public int getParameterIndex() { return this.mParameterIndex; } @Override public String getName() { return this.mszName; } @Override public String getValue() { return this.mszValue; } @Override public boolean isRequired() { return this.mRequired; } @Override public String getDefaultValue() { return this.mszDefaultValue; } @Override public String toJSONString() { return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "index" , this.getParameterIndex() ), new KeyValue<>( "name" , this.getName() ), new KeyValue<>( "value" , this.getValue() ), new KeyValue<>( "defaultValue" , this.getDefaultValue() ), new KeyValue<>( "required" , this.isRequired() ), } ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/mapping/InspectException.java ================================================ package com.pinecone.hydra.umct.mapping; import com.pinecone.framework.system.PineRuntimeException; public class InspectException extends PineRuntimeException { public InspectException () { super(); } public InspectException ( String message ) { super(message); } public InspectException ( String message, Throwable cause ) { super(message, cause); } public InspectException ( Throwable cause ) { super(cause); } protected InspectException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) { super( message, cause, enableSuppression, writableStackTrace ); } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/mapping/MappingDigest.java ================================================ package com.pinecone.hydra.umct.mapping; import java.lang.reflect.Method; import java.util.List; import com.pinecone.framework.lang.field.DataStructureEntity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.ReflectionUtils; import com.pinecone.hydra.umc.msg.UMCMethod; public interface MappingDigest extends Pinenut { String[] getAddresses(); default boolean isAnonymousAddress() { return this.getAddresses().length == 0; } UMCMethod[] getInterceptMethods(); DataStructureEntity getArgumentTemplate(); Class getClassType(); Method getMappedMethod(); Class getReturnType(); String getReturnGenericTypeLabel(); default String[] getReturnGenericTypeNames() { return ReflectionUtils.extractGenericClassNames( this.getReturnGenericTypeLabel() ); } void applyReturnGenericTypeLabel( String genericTypeLabel ); List getParamsDigests(); void apply( List ifaceParamsDigests); List getArgumentsKey(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/mapping/MappingInspector.java ================================================ package com.pinecone.hydra.umct.mapping; import java.util.List; import com.pinecone.ulf.util.lang.HierarchyClassInspector; import javassist.CtMethod; public interface MappingInspector extends HierarchyClassInspector { List inspectArgParams( Object methodDigest, CtMethod method ); ClassLoader getClassLoader(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/mapping/MethodDigestUtils.java ================================================ package com.pinecone.hydra.umct.mapping; import java.util.ArrayList; import java.util.List; import com.pinecone.framework.lang.field.DataStructureEntity; import com.pinecone.framework.util.StringUtils; public final class MethodDigestUtils { public static List getArgumentsKey( List paramsDigests, DataStructureEntity argumentTemplate ) { if ( paramsDigests == null || paramsDigests.isEmpty() || paramsDigests.size() != argumentTemplate.size() ) { return null; } List keys = new ArrayList<>( paramsDigests.size() ); for ( ParamsDigest digest : paramsDigests ) { String n = digest.getName(); if ( StringUtils.isEmpty( n ) ) { return null; } keys.add( n ); } return keys; } } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/mapping/ParamsDigest.java ================================================ package com.pinecone.hydra.umct.mapping; import com.pinecone.framework.system.prototype.Pinenut; public interface ParamsDigest extends Pinenut { int getParameterIndex() ; String getName(); String getValue(); boolean isRequired(); String getDefaultValue(); } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/proxy/UMCTHub.java ================================================ package com.pinecone.hydra.umct.proxy; import com.pinecone.framework.system.prototype.Pinenut; public class UMCTHub implements Pinenut { } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/stereotype/Controller.java ================================================ package com.pinecone.hydra.umct.stereotype; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Target({ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface Controller { String value() default ""; } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/stereotype/Iface.java ================================================ package com.pinecone.hydra.umct.stereotype; import org.springframework.core.annotation.AliasFor; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Target({ElementType.TYPE, ElementType.METHOD}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface Iface { @AliasFor("name") String value() default ""; @AliasFor("value") String name() default ""; String objectAddress() default ""; // Class only. } ================================================ FILE: Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/stereotype/IfaceUtils.java ================================================ package com.pinecone.hydra.umct.stereotype; import com.pinecone.framework.util.StringUtils; import java.lang.reflect.Method; public final class IfaceUtils { public static String getIfaceNameFieldVal( Iface annotation ) { String name = annotation.name(); if ( StringUtils.isEmpty( name ) ) { name = annotation.value(); } return name; } public static String queryIfaceLogicClassName ( Iface cIface ) { String szLogicClassName = null; if ( cIface != null ) { String objectAddress = cIface.objectAddress(); if ( StringUtils.isNoneEmpty( objectAddress ) ) { szLogicClassName = objectAddress; } else { objectAddress = IfaceUtils.getIfaceNameFieldVal( cIface ); } if ( StringUtils.isNoneEmpty( objectAddress ) ) { szLogicClassName = objectAddress; } } return szLogicClassName; } public static String queryIfaceLogicClassName ( Class clazz ) { Iface cIface = clazz.getAnnotation( Iface.class ); return IfaceUtils.queryIfaceLogicClassName( cIface ); } public static String queryIfaceClassNameAddress ( Class clazz ) { String szLogicCN = IfaceUtils.queryIfaceLogicClassName( clazz ); if ( szLogicCN != null ) { return szLogicCN; } return clazz.getName(); } public static String getIfaceMethodName( Method method ){ String ifaceName = method.getName(); Iface annotation = method.getAnnotation(Iface.class); if ( annotation != null ) { String name = IfaceUtils.getIfaceNameFieldVal( annotation ); if ( StringUtils.isNoneEmpty( name ) ) { ifaceName = name; } } return ifaceName; } } ================================================ FILE: Hydra/hydra-message-control/src/test/java/com/umc/TestUMCC.java ================================================ package com.umc; import java.nio.ByteBuffer; import com.pinecone.Pinecone; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.hydra.umc.msg.UMCCHead; import com.pinecone.hydra.umc.msg.UMCCHeadV1; import com.pinecone.hydra.umc.msg.UMCMethod; import com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder; import com.pinecone.hydra.umc.msg.extra.GenericExtraHeadCoder; public class TestUMCC { public static void testUMCC() throws Exception { UMCCHeadV1 head = new UMCCHeadV1(); ExtraHeadCoder coder = new GenericExtraHeadCoder(); head.setBodyLength( 136 ); head.setControlBits( 512 ); head.applyExtraHeadCoder( coder ); head.setExtraHead( new JSONMaptron( "{k:123, k1: abcdefg}" ) ); UMCCHeadV1.EncodePair pair = UMCCHeadV1.encode( head, coder ); ByteBuffer buffer = pair.byteBuffer; //new UMCCHeadV1("", UMCMethod.INFORM ); Debug.redf( head, pair.bufLength ); UMCCHead dec = UMCCHeadV1.decode( buffer.array(), head.getSignature(), coder ); byte[] headBuf = new byte[ head.getExtraHeadLength() ]; int headSize = head.sizeof(); System.arraycopy( buffer.array(), headSize, headBuf, 0, head.getExtraHeadLength() ); Object object = coder.getDecoder().decode( dec, headBuf ); Debug.bluef( dec, object ); } public static void main( String[] args ) throws Exception { //String szJson = FileUtils.readAll("J:/120KWordsPhonetics.json5"); Pinecone.init( (Object...cfg )->{ TestUMCC.testUMCC(); return 0; }, (Object[]) args ); } } ================================================ FILE: Hydra/hydra-service-control/pom.xml ================================================ hydra com.pinecone.hydra 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.pinecone.hydra.kernel hydra-service-control 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile com.pinecone.ulf ulfhedinn 1.2.1 compile com.pinecone.hydra.kernel hydra-architecture 2.1.0 compile com.pinecone.hydra.kernel hydra-framework-service 2.1.0 compile ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/ClientServiceRegisterException.java ================================================ package com.pinecone.hydra.service.registry; public class ClientServiceRegisterException extends ServiceControlException { public ClientServiceRegisterException() { super(); } public ClientServiceRegisterException( String message ) { super(message); } public ClientServiceRegisterException( String message, Throwable cause ) { super(message, cause); } public ClientServiceRegisterException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/ServiceControlException.java ================================================ package com.pinecone.hydra.service.registry; import com.pinecone.framework.system.prototype.Pinenut; public class ServiceControlException extends Exception implements Pinenut { public ServiceControlException() { super(); } public ServiceControlException( String message ) { super(message); } public ServiceControlException( String message, Throwable cause ) { super(message, cause); } public ServiceControlException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/ServiceControlRPCException.java ================================================ package com.pinecone.hydra.service.registry; public class ServiceControlRPCException extends ServiceControlException { public ServiceControlRPCException() { super(); } public ServiceControlRPCException( String message ) { super(message); } public ServiceControlRPCException( String message, Throwable cause ) { super(message, cause); } public ServiceControlRPCException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/ServiceInstanceCreationException.java ================================================ package com.pinecone.hydra.service.registry; public class ServiceInstanceCreationException extends ServiceControlException { public ServiceInstanceCreationException() { super(); } public ServiceInstanceCreationException( String message ) { super(message); } public ServiceInstanceCreationException( String message, Throwable cause ) { super(message, cause); } public ServiceInstanceCreationException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/ServiceValidationException.java ================================================ package com.pinecone.hydra.service.registry; public class ServiceValidationException extends ServiceControlException { public ServiceValidationException() { super(); } public ServiceValidationException( String message ) { super(message); } public ServiceValidationException( String message, Throwable cause ) { super(message, cause); } public ServiceValidationException( Throwable cause ) { super(cause); } } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/UniformService.java ================================================ package com.pinecone.hydra.service.registry; import com.pinecone.framework.util.id.Identification; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.service.ArchService; import com.pinecone.hydra.service.Serviciom; import com.pinecone.hydra.service.kom.entity.ServiceElement; public class UniformService extends ArchService implements Serviciom { public UniformService( Identification serviceId, ServiceElement serviceElement ) { super( serviceId, serviceElement ); } @Override public Namespace getGroupNamespace() { return null; } @Override public String getGroupName() { return null; } @Override public Object getProcessImageObject() { return null; } } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/WolfServiceInstance.java ================================================ package com.pinecone.hydra.service.registry; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.Identification; import com.pinecone.hydra.service.Service; import com.pinecone.hydra.service.Servicium; import com.pinecone.hydra.service.entity.BindUSII; import com.pinecone.hydra.service.entity.USII; public class WolfServiceInstance implements Servicium { protected USII mUSII; protected Service mService; public WolfServiceInstance( long clientId, Service service, GUID guid ) { this.mUSII = BindUSII.wrap( clientId, service.getId(), guid ); this.mService = service; } @Override public Identification getId() { return this.mUSII.getInstanceId(); } @Override public Identification getServiceId() { return this.mUSII.getServiceId(); } @Override public USII getUSII() { return this.mUSII; } @Override public Processum getProcessObject() { return null; } @Override public Service getService() { return this.mService; } } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/appoint/ServiceAppointServer.java ================================================ package com.pinecone.hydra.service.registry.appoint; import com.pinecone.hydra.appoints.AppointNodus; import com.pinecone.hydra.service.registry.server.ServiceManager; public interface ServiceAppointServer extends AppointNodus { ServiceAppointServer hookServiceManager( ServiceManager serviceManager ); ServiceManager serviceManager(); boolean isTerminated(); boolean isStarted(); } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/appoint/ServiceClientile.java ================================================ package com.pinecone.hydra.service.registry.appoint; import java.net.SocketAddress; import java.util.Collection; import com.pinecone.framework.system.prototype.Pinenut; public interface ServiceClientile extends Pinenut { /** * One Client ID corresponds to one instance and can only have one address. * For scenarios where a single client multiple connections is opened, there can only be one main address. * 一个 ClientId,对应一个实例,只能有一个地址 * 对于开了单客户端多复用连接的场景,只能有一个主地址 */ SocketAddress getRemoteAddress(); void afterNewConnectionInbound( Long clientId, Object connectId, Object connection, Object context ); void afterConnectionDetach( Long clientId, Object channelId, Object connection ); ServiceAppointServer serviceAppointServer(); long getClientId(); int connectionCount(); boolean isDefunct(); /** * Some servers may not be able to obtain connection-id. */ Object queryNativeConnection( Object connectionIdentity ); Collection connections(); void shutdown(); } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/client/ArchServiceClient.java ================================================ package com.pinecone.hydra.service.registry.client; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.service.registry.ServiceControlRPCException; public abstract class ArchServiceClient implements ServiceClient { protected Logger mLogger; protected GuidAllocator mGuidAllocator; protected GUID mServiceId; protected GUID mInstanceId; public ArchServiceClient( @Nullable GUID serviceId, GuidAllocator guidAllocator ) { this.mLogger = LoggerFactory.getLogger( this.getClass() ); this.mGuidAllocator = guidAllocator; this.mServiceId = serviceId; } public ArchServiceClient( GuidAllocator guidAllocator ) { this( null, guidAllocator ); } @Override public void startService() throws ServiceControlRPCException { this.initRPCSubsystem(); } @Override public GuidAllocator getGuidAllocator() { return this.mGuidAllocator; } protected abstract void initRPCSubsystem() throws ServiceControlRPCException ; } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/client/HuskyServiceClient.java ================================================ package com.pinecone.hydra.service.registry.client; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.service.registry.server.ServiceLifecycleIface; import com.pinecone.hydra.service.registry.server.ServiceMetaManipulationIface; import com.pinecone.hydra.service.registry.dto.RegisterServiceDTO; import com.pinecone.hydra.service.registry.ClientServiceRegisterException; import com.pinecone.hydra.service.registry.ServiceControlRPCException; import com.pinecone.hydra.uma.DuplexAppointClient; import com.pinecone.hydra.uma.wolf.WolvesAppointClient; import com.pinecone.hydra.umc.wolf.client.UlfClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class HuskyServiceClient extends ArchServiceClient implements ServiceClient { protected DuplexAppointClient mDuplexAppointClient; protected UlfClient mRPCClient; protected ServiceLifecycleIface mServiceLifecycleIface; protected ServiceMetaManipulationIface mServiceMetaManipulationIface; public HuskyServiceClient( @Nullable GUID serviceId, UlfClient ulfClient, GuidAllocator guidAllocator ) { super( serviceId, guidAllocator ); this.mRPCClient = ulfClient; } public HuskyServiceClient( UlfClient ulfClient, GuidAllocator guidAllocator ) { this( null, ulfClient, guidAllocator ); } @Override public void startService() throws ServiceControlRPCException { this.initRPCSubsystem(); } @Override public void terminateService() { if( this.mDuplexAppointClient == null ) { throw new IllegalStateException( "RPCClient dose not started yet." ); } this.deregister(); this.mDuplexAppointClient.terminate(); this.mDuplexAppointClient = null; } @Override public DuplexAppointClient getAppointNodus() { return this.mDuplexAppointClient; } @Override public GuidAllocator getGuidAllocator() { return this.mGuidAllocator; } protected void initRPCSubsystem() throws ServiceControlRPCException { if ( this.mDuplexAppointClient != null && !this.mDuplexAppointClient.getMessageNode().isTerminated() ) { throw new IllegalStateException( "DuplexAppointClient has started." ); } this.mDuplexAppointClient = new WolvesAppointClient( this.mRPCClient ); try { this.mDuplexAppointClient.execute(); this.mDuplexAppointClient.compile( ServiceLifecycleIface.class, false ); this.mDuplexAppointClient.compile( ServiceMetaManipulationIface.class, false ); this.mServiceLifecycleIface = this.mDuplexAppointClient.getIface( ServiceLifecycleIface.class ); this.mServiceMetaManipulationIface = this.mDuplexAppointClient.getIface( ServiceMetaManipulationIface.class ); this.mLogger.info( "RPC initialization successful" ); } catch ( Exception e ) { this.mServiceLifecycleIface = null; throw new ServiceControlRPCException( e ); } } @Override public GUID registerService( GUID serviceId, GUID deployGuid ) throws ClientServiceRegisterException { RegisterServiceDTO serviceDTO = new RegisterServiceDTO(); serviceDTO.setServiceId( serviceId.toString() ); serviceDTO.setClientId( this.mRPCClient.getMessageNodeId() ); if ( deployGuid != null ) { serviceDTO.setDeployId( deployGuid.toString() ); } this.mServiceId = serviceId; try { String insId = this.mServiceLifecycleIface.registerService( serviceDTO ); if ( insId != null ) { this.mInstanceId = this.mGuidAllocator.parse( insId ); this.mLogger.info( "Successfully register service : {}, instanceId: {}", serviceDTO.getServiceId(), insId ); } } catch ( Exception e ) { this.mLogger.error( "Register Service {} failed", serviceDTO.getServiceId() ); throw new ClientServiceRegisterException( e ); } return this.mInstanceId; } @Override public void deregister() { if ( this.mInstanceId != null ) { this.mServiceLifecycleIface.deregisterServiceByInstanceId( this.mInstanceId.toString() ); } } } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/client/ServiceClient.java ================================================ package com.pinecone.hydra.service.registry.client; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.appoints.AppointNodus; import com.pinecone.hydra.service.registry.ClientServiceRegisterException; import com.pinecone.hydra.service.registry.ServiceControlRPCException; public interface ServiceClient extends Pinenut { void startService () throws ServiceControlRPCException; void terminateService (); AppointNodus getAppointNodus (); GuidAllocator getGuidAllocator (); GUID registerService( GUID serviceId, GUID deployGuid ) throws ClientServiceRegisterException; void deregister(); } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/constant/ServiceStatus.java ================================================ package com.pinecone.hydra.service.registry.constant; import com.pinecone.framework.system.prototype.Pinenut; public enum ServiceStatus implements Pinenut { SERVICE_NEW( 0x00, "New" ), SERVICE_RUNNING( 0x01, "Running" ), // 服务运行中 SERVICE_SUSPENDED( 0x02, "Suspended" ), // 服务暂停 SERVICE_EXISTED( 0x03, "Existed" ), // 服务存活 SERVICE_TERMINATED( 0x04, "Terminated" ), // 服务终止(正常结束) SERVICE_ERROR( 0x05, "Error" ); // 服务终止(因错误结束) private final int code; private final String name; ServiceStatus( int code, String name ) { this.code = code; this.name = name; } public int getCode() { return this.code; } public String getName() { return this.name; } public static ServiceStatus getByCode(int code ) { for ( ServiceStatus type : ServiceStatus.values() ) { if ( type.code == code ) { return type; } } return null; } } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/constant/ServiceVitalizationStatus.java ================================================ package com.pinecone.hydra.service.registry.constant; public enum ServiceVitalizationStatus { New ( 0x00 ), Vitalized ( 0x01 ), Error ( 0x02 ), Success ( 0x03 ); private final int code; ServiceVitalizationStatus( int code ) { this.code = code; } public int getCode() { return this.code; } public static ServiceVitalizationStatus getByCode( int code ) { for ( ServiceVitalizationStatus type : ServiceVitalizationStatus.values() ) { if ( type.code == code ) { return type; } } return null; } } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/dto/ApplicationMetaDTO.java ================================================ package com.pinecone.hydra.service.registry.dto; public class ApplicationMetaDTO { private String guid; private String name; private String type; private String displayName; private String alias; private String fullName; private String deploymentMethod; private String resourceType; private String level; public String getGuid() { return this.guid; } public void setGuid(String guid) { this.guid = guid; } public String getLevel() { return this.level; } public void setLevel(String level) { this.level = level; } public String getResourceType() { return this.resourceType; } public void setResourceType(String resourceType) { this.resourceType = resourceType; } public String getDeploymentMethod() { return this.deploymentMethod; } public void setDeploymentMethod(String deploymentMethod) { this.deploymentMethod = deploymentMethod; } public String getFullName() { return this.fullName; } public void setFullName(String fullName) { this.fullName = fullName; } public String getAlias() { return this.alias; } public void setAlias(String alias) { this.alias = alias; } public String getDisplayName() { return this.displayName; } public void setDisplayName(String displayName) { this.displayName = displayName; } public String getType() { return this.type; } public void setType(String type) { this.type = type; } public String getName() { return this.name; } public void setName(String name) { this.name = name; } } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/dto/RegisterServiceDTO.java ================================================ package com.pinecone.hydra.service.registry.dto; import com.pinecone.framework.system.prototype.Pinenut; public class RegisterServiceDTO implements Pinenut { protected Long clientId; protected String serviceId; protected String deployId; public RegisterServiceDTO() { } public RegisterServiceDTO( Long clientId, String serviceId, String deployId ) { this.clientId = clientId; this.serviceId = serviceId; this.deployId = deployId; } public Long getClientId() { return this.clientId; } public void setClientId( Long clientId ) { this.clientId = clientId; } public String getServiceId() { return this.serviceId; } public void setServiceId( String serviceId ) { this.serviceId = serviceId; } public String getDeployId() { return this.deployId; } public void setDeployId(String deployId) { this.deployId = deployId; } } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/dto/ServiceMetaDTO.java ================================================ package com.pinecone.hydra.service.registry.dto; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.service.Service; import com.pinecone.hydra.service.kom.entity.GenericServiceElement; import com.pinecone.hydra.service.kom.entity.Namespace; import com.pinecone.hydra.service.kom.entity.ServiceElement; import com.pinecone.ulf.util.guid.GUIDs; import java.time.LocalDateTime; public class ServiceMetaDTO implements Pinenut { private String guid; private String name; private String type; private String displayName; private String description; private String fullName; private String groupNamespace; private String groupName; private String scenario; private String primaryImplLang; private String extraInformation; private String level; public String getType() { return this.type; } public void setType( String type ) { this.type = type; } public String getGuid() { return this.guid; } public void setGuid(String guid) { this.guid = guid; } public String getName() { return this.name; } public void setName(String name) { this.name = name; } public String getDisplayName() { return this.displayName; } public void setDisplayName(String displayName) { this.displayName = displayName; } public String getDescription() { return this.description; } public void setDescription(String description) { this.description = description; } public String getFullName() { return this.fullName; } public void setFullName(String fullName) { this.fullName = fullName; } public String getGroupNamespace() { return this.groupNamespace; } public void setGroupNamespace(String groupNamespace) { this.groupNamespace = groupNamespace; } public String getGroupName() { return this.groupName; } public void setGroupName(String groupName) { this.groupName = groupName; } public String getScenario() { return this.scenario; } public void setScenario(String scenario) { this.scenario = scenario; } public String getPrimaryImplLang() { return this.primaryImplLang; } public void setPrimaryImplLang(String primaryImplLang) { this.primaryImplLang = primaryImplLang; } public String getExtraInformation() { return this.extraInformation; } public void setExtraInformation(String extraInformation) { this.extraInformation = extraInformation; } public String getLevel() { return this.level; } public void setLevel(String level) { this.level = level; } public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } public static ServiceMetaDTO from( Service service ){ ServiceMetaDTO serviceMetaDTO = new ServiceMetaDTO(); serviceMetaDTO.setGuid( service.getId().toString() ); serviceMetaDTO.setName(service.getName()); serviceMetaDTO.setDescription( service.getDescription() ); serviceMetaDTO.setDisplayName( service.getDisplayName() ); serviceMetaDTO.setFullName( service.getFullName() ); serviceMetaDTO.setExtraInformation( service.getExtraInformation() ); serviceMetaDTO.setLevel( service.getLevel() ); serviceMetaDTO.setScenario( service.getScenario() ); serviceMetaDTO.setPrimaryImplLang( service.getPrimaryImplLang() ); serviceMetaDTO.setGroupName( service.getGroupName() ); serviceMetaDTO.setType( service.getType() ); return serviceMetaDTO; } public static ServiceMetaDTO from( ServiceElement service ){ ServiceMetaDTO serviceMetaDTO = new ServiceMetaDTO(); serviceMetaDTO.setGuid( service.getId().toString() ); serviceMetaDTO.setName(service.getName()); serviceMetaDTO.setDescription( service.getDescription() ); serviceMetaDTO.setDisplayName( service.getName() ); serviceMetaDTO.setFullName( service.getPath() ); serviceMetaDTO.setExtraInformation( service.getExtraInformation() ); serviceMetaDTO.setLevel( service.getLevel() ); serviceMetaDTO.setScenario( service.getScenario() ); serviceMetaDTO.setPrimaryImplLang( service.getPrimaryImplLang() ); serviceMetaDTO.setGroupName( null ); serviceMetaDTO.setType( service.getType() ); return serviceMetaDTO; } public static ServiceElement toServiceElement( ServiceMetaDTO meta, GuidAllocator guidAllocator ) { ServiceElement element = new GenericServiceElement(); if ( meta.getGuid() != null ) { element.setGuid( guidAllocator.parse(meta.getGuid()) ); } element.setName( meta.getName()); element.setDescription( meta.getDescription() ); element.setExtraInformation( meta.getExtraInformation() ); element.setLevel( meta.getLevel() ); element.setScenario( meta.getScenario() ); element.setPrimaryImplLang( meta.getPrimaryImplLang() ); element.setType( meta.getType() ); return element; } } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/event/ServiceRegisterEvent.java ================================================ package com.pinecone.hydra.service.registry.event; public enum ServiceRegisterEvent { Created ( 0x00, "Created" ), Registered ( 0x01, "Registered" ), Deregistered ( 0x02, "Deregistered" ), Detached ( 0x03, "Detached" ), ; private final int code; private final String name; ServiceRegisterEvent(int code, String name ) { this.code = code; this.name = name; } public int getCode() { return this.code; } public String getName() { return this.name; } } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/event/ServiceRegisterEventHandler.java ================================================ package com.pinecone.hydra.service.registry.event; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface ServiceRegisterEventHandler extends Pinenut { void fired( long clientId, GUID insId, GUID serviceId, ServiceRegisterEvent event, Object caused ); } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/server/ServiceEventHooker.java ================================================ package com.pinecone.hydra.service.registry.server; import java.util.function.Supplier; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.service.registry.appoint.ServiceClientile; public interface ServiceEventHooker extends Pinenut { void afterNewConnectionInbound( Long clientId, Object connectId, Object connection, Object context, Supplier constructor ); void afterConnectionDetach( Long clientId, Object channelId, Object connection ); } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/server/ServiceLifecycleIface.java ================================================ package com.pinecone.hydra.service.registry.server; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.service.kom.entity.ServiceInstanceEntry; import com.pinecone.hydra.service.registry.dto.RegisterServiceDTO; import com.pinecone.hydra.umct.stereotype.Iface; @Iface public interface ServiceLifecycleIface extends Pinenut { /** * @return with service-instance-guid */ String registerService( RegisterServiceDTO serviceDTO ); boolean createInstanceMeta( ServiceInstanceEntry serviceInstanceEntry ); void deregisterServiceByClientId( Long clientId ); void deregisterServiceByInstanceId( String instanceId ); boolean hasOwnedServiceByServiceId( String serviceId ); boolean hasOwnedServiceInstance( Long clientId ); boolean hasOwnedServiceInstance( String instanceId ); boolean hasOwnedServiceClient( Long clientId ); Integer countRegisteredService(); } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/server/ServiceLifecycleService.java ================================================ package com.pinecone.hydra.service.registry.server; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.kom.entity.GenericServiceInstanceEntity; import com.pinecone.hydra.service.registry.ClientServiceRegisterException; import com.pinecone.hydra.service.registry.ServiceInstanceCreationException; import com.pinecone.hydra.service.registry.dto.RegisterServiceDTO; public class ServiceLifecycleService implements Pinenut { protected ServiceManager mServiceManager; protected ServiceInstrument mServiceInstrument; protected GuidAllocator mGuidAllocator; protected Logger mLogger; public ServiceLifecycleService( ServiceManager mServiceManager ) { this.mServiceManager = mServiceManager; this.mServiceInstrument = mServiceManager.getServicesInstrument(); this.mGuidAllocator = this.mServiceInstrument.getGuidAllocator(); this.mLogger = LoggerFactory.getLogger( this.getClass() ); } public String registerService( RegisterServiceDTO serviceDTO ) throws ClientServiceRegisterException { Long clientId = serviceDTO.getClientId(); String szServId = serviceDTO.getServiceId(); GUID serviceId = this.mGuidAllocator.parse( szServId ); GUID deployId = null; if ( StringUtils.isNotBlank(serviceDTO.getDeployId()) ) { deployId = this.mGuidAllocator.parse( serviceDTO.getDeployId() ); } GUID insId = this.mServiceManager.registerService( clientId, serviceId, deployId ); if ( insId != null ) { return insId.toString(); } return null; } public boolean createInstanceMeta( GenericServiceInstanceEntity instanceEntity ) throws ServiceInstanceCreationException { try { this.mServiceInstrument.createServiceInstance( instanceEntity ); } catch (Exception e) { throw new ServiceInstanceCreationException( e ); } return true; } public void deregisterServiceByClientId( Long clientId ) { this.mServiceManager.deregisterServiceInstance( clientId ); } public void deregisterServiceByInstanceId( String instanceId ) { this.mServiceManager.deregisterServiceInstance( this.mGuidAllocator.parse( instanceId ) ); } public boolean hasOwnedServiceByServiceId( String serviceId ) { return this.mServiceManager.hasOwnedService( this.mGuidAllocator.parse( serviceId ) ); } public boolean hasOwnedServiceInstance( Long clientId ) { return this.mServiceManager.hasOwnedServiceInstance( clientId ); } public boolean hasOwnedServiceClient( Long clientId ) { return this.mServiceManager.hasOwnedServiceClient( clientId ); } public boolean hasOwnedServiceInstance( String instanceId ) { return this.mServiceManager.hasOwnedInstance( this.mGuidAllocator.parse( instanceId ) ); } public Integer countRegisteredService() { return this.mServiceManager.countRegisteredService(); } } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/server/ServiceManager.java ================================================ package com.pinecone.hydra.service.registry.server; import java.util.Collection; import java.util.function.Supplier; import com.pinecone.framework.system.regime.arch.Manager; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.Identification; import com.pinecone.hydra.service.ServiceInstance; import com.pinecone.hydra.service.entity.USII; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.registry.ClientServiceRegisterException; import com.pinecone.hydra.service.registry.ServiceControlRPCException; import com.pinecone.hydra.service.registry.appoint.ServiceAppointServer; import com.pinecone.hydra.service.registry.event.ServiceRegisterEventHandler; import com.pinecone.hydra.system.component.Slf4jTraceable; public interface ServiceManager extends Manager, Slf4jTraceable { Collection getServers(); /** * Add server only. */ ServiceManager addAppointServer( ServiceAppointServer appointServer ); /** * Add, and hook. */ ServiceManager hookAppointServer( ServiceAppointServer appointServer ); /** * Add, hook, and start. */ ServiceManager vitalizeAppointServer( ServiceAppointServer appointServer ) throws ServiceControlRPCException; ServiceAppointServer getAppointServerById( Long appointNodeId ); ServiceAppointServer evictAppointServerById( Long appointNodeId ); int serverSize(); ServiceEventHooker serviceEventHooker(); void startService () throws ServiceControlRPCException; void registerServiceInstance( ServiceInstance instance ); GUID registerService( Long clientId, GUID serviceId, GUID deployGuid ) throws ClientServiceRegisterException; void destroyServiceInstance( GUID serviceId, GUID instanceGuid ); Collection fetchServiceInstance( Long clientId ); Collection fetchServiceInstance( Identification serviceId ); Collection fetchServiceInstanceByIId( Identification instanceId ); Collection fetchServiceInstance( USII usii ); ServiceInstance queryServiceInstance( Long clientId ); ServiceInstance queryServiceInstance( USII usii ); boolean hasOwnedService( Identification serviceId ); boolean hasOwnedInstance( Identification instanceId ); boolean hasOwnedService( USII usii ); boolean hasOwnedServiceInstance( Long clientId ); boolean hasOwnedServiceClient( Long clientId ); default ServiceInstance queryFirstInstance( Long clientId ) { Collection instances = this.fetchServiceInstance( clientId ); if ( !instances.isEmpty() ) { return instances.iterator().next(); } return null; } default ServiceInstance queryFirstInstance( Identification serviceId ) { Collection instances = this.fetchServiceInstance( serviceId ); if ( !instances.isEmpty() ) { return instances.iterator().next(); } return null; } default ServiceInstance queryFirstInstance( USII usii ) { Collection instances = this.fetchServiceInstance( usii ); if ( !instances.isEmpty() ) { return instances.iterator().next(); } return null; } ServiceInstance getInstance( Identification instanceId ) ; Collection deregisterServiceInstance ( Long clientId ); Collection deregisterServiceInstance( Identification instanceId ); Collection deregisterService( Identification serviceId ); ServiceInstrument getServicesInstrument(); int countRegisteredService(); void addRegisterEventHandler( ServiceRegisterEventHandler handler ) ; void removeRegisterEventHandler( ServiceRegisterEventHandler handler ) ; int registerEventHandlerSize( ) ; ServiceLifecycleService serviceLifecycleService(); ServiceMetaService getServiceMetaService(); } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/server/ServiceMetaManipulationIface.java ================================================ package com.pinecone.hydra.service.registry.server; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.service.registry.dto.ServiceMetaDTO; import com.pinecone.hydra.umct.stereotype.Iface; import java.util.List; @Iface public interface ServiceMetaManipulationIface extends Pinenut { List fetchServiceInsMetaByClientId( long clientId ); List fetchServiceInsMetaByServiceId( String serviceId ); ServiceMetaDTO queryServiceMetaByPath( String path ); ServiceMetaDTO queryServiceMetaByGuid( String guid ); String evalCreationStatement( String jonsStatement ); String createNewService( String parentAppPath, ServiceMetaDTO meta ); } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/server/ServiceMetaService.java ================================================ package com.pinecone.hydra.service.registry.server; import java.util.ArrayList; import java.util.Collection; import java.util.List; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.hydra.service.Service; import com.pinecone.hydra.service.ServiceInstance; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.kom.entity.ApplicationElement; import com.pinecone.hydra.service.kom.entity.ElementNode; import com.pinecone.hydra.service.kom.entity.ServiceElement; import com.pinecone.hydra.service.kom.marshaling.ServiceJSONDecoder; import com.pinecone.hydra.service.registry.dto.ServiceMetaDTO; import com.pinecone.hydra.unit.imperium.entity.TreeNode; public class ServiceMetaService implements Pinenut { protected ServiceManager mServiceManager; protected ServiceInstrument mServiceInstrument; protected ServiceJSONDecoder mServiceJSONDecoder; public ServiceMetaService( ServiceManager serviceManager ){ this.mServiceManager = serviceManager; this.mServiceInstrument = serviceManager.getServicesInstrument(); this.mServiceJSONDecoder = new ServiceJSONDecoder( this.mServiceInstrument ); } public List fetchServiceInsMetaByClientId(long clientId ){ List serviceMetaDTOS = new ArrayList<>(); Collection serviceInstances = this.mServiceManager.fetchServiceInstance( clientId ); for( ServiceInstance serviceInstance : serviceInstances ){ Service service = serviceInstance.getService(); serviceMetaDTOS.add( ServiceMetaDTO.from( service ) ); } return serviceMetaDTOS; } public List fetchServiceInsMetaByServiceId( String serviceId ) { List serviceMetaDTOS = new ArrayList<>(); Collection serviceInstances = this.mServiceManager.fetchServiceInstance( this.mServiceInstrument.getGuidAllocator().parse( serviceId ) ); for( ServiceInstance serviceInstance : serviceInstances ){ Service service = serviceInstance.getService(); serviceMetaDTOS.add( ServiceMetaDTO.from( service ) ); } return serviceMetaDTOS; } public ServiceMetaDTO queryServiceMetaByPath( String path ) { ElementNode node = this.mServiceManager.getServicesInstrument().queryElement( path ); ServiceElement serviceElement = node.evinceServiceElement(); if ( serviceElement == null ) { return null; } return ServiceMetaDTO.from( serviceElement ); } public ServiceMetaDTO queryServiceMetaByGuid( String guid ) { TreeNode node = this.mServiceManager.getServicesInstrument().get( this.mServiceInstrument.getGuidAllocator().parse( guid ) ); if ( node instanceof ServiceElement ) { ServiceElement serviceElement = (ServiceElement) node; return ServiceMetaDTO.from( serviceElement ); } return null; } public String evalCreationStatement( String jonsStatement ) { ElementNode node = this.mServiceJSONDecoder.decode( new JSONMaptron( jonsStatement ) ); if ( node == null ) { return null; } return node.getGuid().toString(); } public String createNewService( String parentAppPath, ServiceMetaDTO meta ) { ElementNode node = this.mServiceInstrument.queryElement( parentAppPath ); if ( node instanceof ApplicationElement) { ApplicationElement applicationElement = (ApplicationElement) node; ServiceElement serviceElement = ServiceMetaDTO.toServiceElement( meta, this.mServiceInstrument.getGuidAllocator() ); if ( serviceElement.getGuid() == null ) { serviceElement.setGuid( this.mServiceInstrument.getGuidAllocator().nextGUID() ); } this.mServiceInstrument.put( serviceElement ); this.mServiceInstrument.affirmOwnedNode( applicationElement.getGuid(), serviceElement.getGuid() ); return serviceElement.getGuid().toString(); } return null; } } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/server/UniformServiceEventHooker.java ================================================ package com.pinecone.hydra.service.registry.server; import java.util.function.Supplier; import com.pinecone.hydra.service.registry.appoint.ServiceClientile; public class UniformServiceEventHooker implements ServiceEventHooker { protected UniformServiceManager mUniformServiceManager; public UniformServiceEventHooker( UniformServiceManager manager ) { this.mUniformServiceManager = manager; } @Override public void afterNewConnectionInbound( Long clientId, Object connectId, Object connection, Object context, Supplier constructor ) { this.mUniformServiceManager.mClientRegistry.compute( clientId, (key, ins ) -> { if ( ins == null ) { ins = constructor.get(); } ins.afterNewConnectionInbound( clientId, connectId, connection, context ); return ins; } ); } @Override public void afterConnectionDetach( Long clientId, Object channelId, Object connection ) { synchronized ( this.mUniformServiceManager.mClientRegistry ) { ServiceClientile client = this.mUniformServiceManager.mClientRegistry.get( clientId ); // It’s not thread-safe beyond this critical zone, as the size may be mutated by other threads after this point. // 该临界区后面线程并不安全, size 可能在该临界区后被其他线程破坏. if ( client != null ) { client.afterConnectionDetach( clientId, channelId, connection ); if ( client.connectionCount() < 1 ) { this.mUniformServiceManager.mClientRegistry.remove( clientId ); this.mUniformServiceManager.deregisterServiceInstance( clientId ); } } } } } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/server/UniformServiceManager.java ================================================ package com.pinecone.hydra.service.registry.server; import com.mysql.cj.exceptions.AssertionFailedException; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.id.Identification; import com.pinecone.hydra.service.ServiceInstance; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.entity.USII; import com.pinecone.hydra.service.kom.entity.GenericServiceInstanceEntity; import com.pinecone.hydra.service.kom.entity.ServiceElement; import com.pinecone.hydra.service.kom.entity.ServiceInstanceEntry; import com.pinecone.hydra.service.registry.ClientServiceRegisterException; import com.pinecone.hydra.service.registry.ServiceControlRPCException; import com.pinecone.hydra.service.registry.UniformService; import com.pinecone.hydra.service.registry.WolfServiceInstance; import com.pinecone.hydra.service.registry.appoint.ServiceClientile; import com.pinecone.hydra.service.registry.appoint.ServiceAppointServer; import com.pinecone.hydra.service.registry.constant.ServiceStatus; import com.pinecone.hydra.service.registry.event.ServiceRegisterEvent; import com.pinecone.hydra.service.registry.event.ServiceRegisterEventHandler; import com.pinecone.hydra.system.component.LogStatuses; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; public class UniformServiceManager implements ServiceManager { protected final ServiceInstrument mServiceInstrument; protected final ConcurrentMap mServerPoolMap; // ServerId => Node protected final ConcurrentMap mCIdInstanceRegistry; // ClientId => Instance protected final ConcurrentMap > mServiceRegistry; // ServiceId => protected final ConcurrentMap mInstanceRegistry; // InstanceId => Instance protected final ConcurrentMap mClientRegistry; // ClientId => Client protected final List mRegisterEventHandlers; protected final GuidAllocator mGuidAllocator; protected final ServiceEventHooker mServiceEventHooker; protected final ServiceLifecycleService mServiceLifecycleService; protected final ServiceMetaService mServiceMetaService; private final Logger mLogger; private final ReadWriteLock mEventHandlerLock = new ReentrantReadWriteLock(); @Override public Logger getLogger() { return this.mLogger; } protected void vitalizeRPCSubsystem() throws ServiceControlRPCException { try { for ( Map.Entry entry : this.mServerPoolMap.entrySet() ) { if ( !entry.getValue().isStarted() ) { entry.getValue().execute(); } } this.infoLifecycle( "RPC Subsystem Service Vitalization", LogStatuses.StatusDone ); } catch ( Exception e ) { throw new ServiceControlRPCException( e ); } } public UniformServiceManager( ServiceInstrument serviceInstrument ) { this.mServiceInstrument = serviceInstrument; this.mServerPoolMap = new ConcurrentHashMap<>(); this.mServiceRegistry = new ConcurrentHashMap<>(); this.mCIdInstanceRegistry = new ConcurrentHashMap<>(); this.mInstanceRegistry = new ConcurrentHashMap<>(); this.mClientRegistry = new ConcurrentHashMap<>(); this.mLogger = LoggerFactory.getLogger( this.getClass() ); this.mRegisterEventHandlers = new ArrayList<>(); this.mGuidAllocator = serviceInstrument.getGuidAllocator(); this.mServiceEventHooker = new UniformServiceEventHooker( this ); this.mServiceLifecycleService = new ServiceLifecycleService( this ); this.mServiceMetaService = new ServiceMetaService( this ); } @Override public Collection getServers() { return this.mServerPoolMap.values(); } @Override public ServiceManager addAppointServer( ServiceAppointServer appointServer ) { this.mServerPoolMap.put( appointServer.getMessageNodeId(), appointServer ); return this; } @Override public ServiceManager hookAppointServer( ServiceAppointServer appointServer ) { this.addAppointServer( appointServer ); appointServer.hookServiceManager( this ); return this; } @Override public ServiceManager vitalizeAppointServer( ServiceAppointServer appointServer ) throws ServiceControlRPCException { try { this.hookAppointServer( appointServer ); appointServer.execute(); return this; } catch ( Exception e ) { throw new ServiceControlRPCException( e ); } } @Override public ServiceAppointServer getAppointServerById( Long appointNodeId ) { return this.mServerPoolMap.get( appointNodeId ); } @Override public ServiceAppointServer evictAppointServerById( Long appointNodeId ) { ServiceAppointServer legacy = this.mServerPoolMap.remove( appointNodeId ); if ( legacy != null ) { legacy.close(); // In principle, all connections will be closed cascadingly. return legacy; } return null; } @Override public int serverSize() { return this.mServerPoolMap.size(); } @Override public ServiceEventHooker serviceEventHooker() { return this.mServiceEventHooker; } @Override public ServiceLifecycleService serviceLifecycleService() { return this.mServiceLifecycleService; } @Override public ServiceMetaService getServiceMetaService() { return this.mServiceMetaService; } @Override public void startService() throws ServiceControlRPCException { this.vitalizeRPCSubsystem(); } @Override public void addRegisterEventHandler( ServiceRegisterEventHandler handler ) { try { this.mEventHandlerLock.writeLock().lock(); this.mRegisterEventHandlers.add( handler ); } finally { this.mEventHandlerLock.writeLock().unlock(); } } @Override public void removeRegisterEventHandler( ServiceRegisterEventHandler handler ) { try { this.mEventHandlerLock.readLock().lock(); this.mRegisterEventHandlers.remove( handler ); } finally { this.mEventHandlerLock.readLock().unlock(); } } @Override public int registerEventHandlerSize() { try { this.mEventHandlerLock.readLock().lock(); return this.mRegisterEventHandlers.size(); } finally { this.mEventHandlerLock.readLock().unlock(); } } protected void triggerServiceEvent( long clientId, Identification insId, ServiceRegisterEvent event, Object caused ) { ServiceInstance instance = this.mCIdInstanceRegistry.get( clientId ); if ( instance == null ) { return; } GUID serviceId = (GUID) instance.getUSII().getServiceId(); for ( ServiceRegisterEventHandler handler : this.mRegisterEventHandlers ) { handler.fired( clientId, (GUID) insId, serviceId, event, caused ); } } // @Override // public void registerService( ServiceInstance instance ) { // USII primaryKey = instance.getUSII(); // Long clientId = primaryKey.getClientId(); // // this.mServiceRegistry.compute( primaryKey, ( key, ins ) -> { // if ( ins == null ) { // ins = new ConcurrentHashMap<>(); // } // ins.put( clientId, instance ); // return ins; // } ); // } @Override public void registerServiceInstance( ServiceInstance instance ) { Identification primaryKey = instance.getUSII().getServiceId(); Long clientId = instance.getUSII().getClientId(); this.mCIdInstanceRegistry.put( clientId, instance ); this.mInstanceRegistry.put( instance.getId(), new ClientInstance( clientId, instance ) ); this.mServiceRegistry.compute( primaryKey, ( key, ins ) -> { if ( ins == null ) { ins = new ConcurrentHashMap<>(); } ins.put( clientId, instance ); return ins; } ); this.triggerServiceEvent( clientId, instance.getId(), ServiceRegisterEvent.Registered, instance ); } @Override public GUID registerService( Long clientId, GUID serviceId, GUID deployGuid ) throws ClientServiceRegisterException { synchronized ( this.mServiceRegistry ) { ServiceClientile client = this.mClientRegistry.get( clientId ); if ( client == null ) { throw new ClientServiceRegisterException( "Client " + clientId + " is not existed." ); } SocketAddress remote = client.getRemoteAddress(); String ip = ""; if ( remote instanceof InetSocketAddress ) { InetSocketAddress inet = (InetSocketAddress) remote; ip = inet.getAddress().getHostAddress(); } ServiceInstanceEntry neo = this.createServiceInstanceMeta( serviceId, deployGuid, ip ); // new ServiceInstanceEntry element = this.updateServiceInstanceStatus( neo.getGuid(), ServiceStatus.SERVICE_RUNNING ); TreeNode node = this.mServiceInstrument.get( serviceId ); ServiceElement serviceElement = (ServiceElement) node; ServiceInstance serviceInstance = new WolfServiceInstance( clientId, new UniformService( serviceId, serviceElement ), element.getGuid() ); this.registerServiceInstance( serviceInstance ); this.mLogger.info( "Remote serviceInstance {} register success. ", element.getGuid(), ip ); return element.getGuid(); } } protected ServiceInstanceEntry updateServiceInstanceStatus( GUID id, ServiceStatus status ) { ServiceInstanceEntry element = this.mServiceInstrument.queryServiceInstance( id ); if ( element != null ) { element.setStatus( status.getName() ); element.setRunCount( element.getRunCount() + 1 ); this.mServiceInstrument.updateServiceInstance( element ); } return element; } @Override public void destroyServiceInstance( GUID serviceId, GUID instanceGuid ) { } @Override public Collection fetchServiceInstance( Long clientId ) { return List.of( this.mCIdInstanceRegistry.get( clientId ) ); } @Override public Collection fetchServiceInstance( Identification serviceId ) { ConcurrentMap map = this.mServiceRegistry.get( serviceId ); if ( map != null ) { return map.values(); } return List.of(); } @Override public Collection fetchServiceInstanceByIId( Identification instanceId ) { ClientInstance i = this.mInstanceRegistry.get( instanceId ); if ( i == null ) { return List.of(); } return List.of( i.getInstance() ); } @Override public Collection fetchServiceInstance( USII usii ) { return this.fetchServiceInstance( usii.getServiceId() ); } @Override public ServiceInstance queryServiceInstance( USII usii ) { return this.queryServiceInstance( usii.getClientId() ); } @Override public ServiceInstance queryServiceInstance( Long clientId ) { return this.mCIdInstanceRegistry.get( clientId ); } @Override public boolean hasOwnedService( USII usii ) { return this.hasOwnedService( usii.getServiceId() ); } @Override public boolean hasOwnedService( Identification serviceId ) { return this.mServiceRegistry.containsKey( serviceId ); } @Override public boolean hasOwnedInstance( Identification instanceId ) { return this.mInstanceRegistry.containsKey( instanceId ); } @Override public boolean hasOwnedServiceInstance( Long clientId ) { return this.mClientRegistry.containsKey( clientId ); } @Override public boolean hasOwnedServiceClient( Long clientId ) { return this.mClientRegistry.containsKey( clientId ); } @Override public ServiceInstance getInstance( Identification instanceId ) { ClientInstance i = this.mInstanceRegistry.get( instanceId ); if ( i == null ) { return null; } return i.getInstance(); } /** * Finally elimination inlet function. * 终末清除入口点 */ @Override public Collection deregisterServiceInstance( Long clientId ) { synchronized ( this.mServiceRegistry ) { ServiceInstance eliminated = this.mCIdInstanceRegistry.remove( clientId ); // It’s not thread-safe beyond this critical zone, as the size may be mutated by other threads after this point. // 该临界区后面线程并不安全, size 可能在该临界区后被其他线程破坏. if ( eliminated != null ) { ConcurrentMap instances = this.mServiceRegistry.get( eliminated.getServiceId() ); if ( instances != null ) { this.mInstanceRegistry.remove( eliminated.getId() ); this.updateServiceInstanceStatus( (GUID) eliminated.getId(), ServiceStatus.SERVICE_TERMINATED ); this.getLogger().info( "Detached service instance, { clientId: {}, instanceId: {}, serviceId: {} }. ", clientId, eliminated.getId(), eliminated.getServiceId() ); if ( instances.size() <= 1 ) { instances = this.mServiceRegistry.remove( eliminated.getServiceId() ); return instances.values(); } else { // 副本实例,不用额外变更状态 ServiceInstance instance = instances.remove( clientId ); if ( instance != null ) { return List.of( instance ); } } } else { throw new AssertionFailedException( "Illegal internal statue, mismatched elimination-service size." ); } this.triggerServiceEvent( clientId, eliminated.getId(), ServiceRegisterEvent.Deregistered, eliminated ); } return null; } } @Override public Collection deregisterServiceInstance( Identification instanceId ) { ClientInstance clientInstance = this.mInstanceRegistry.get( instanceId ); if ( clientInstance == null ) { return null; } return this.deregisterServiceInstance( clientInstance.getClientId() ); } @Override public Collection deregisterService( Identification serviceId ) { ConcurrentMap instances = this.mServiceRegistry.remove( serviceId ); if ( instances != null ) { for ( Map.Entry kv : instances.entrySet() ) { this.deregisterServiceInstance( kv.getKey() ); } return instances.values(); } return null; } @Override public ServiceInstrument getServicesInstrument() { return this.mServiceInstrument; } @Override public int countRegisteredService() { return this.mServiceRegistry.size(); } protected ServiceInstanceEntry createServiceInstanceMeta( GUID serviceId, GUID deployGuid, String ip ) { GUID guid = this.mGuidAllocator.nextGUID(); ServiceInstanceEntry instanceEntity = new GenericServiceInstanceEntity(); instanceEntity.setDeployGuid( deployGuid ); instanceEntity.setStatus( ServiceStatus.SERVICE_NEW.getName() ); instanceEntity.setLatestStartTime( LocalDateTime.now() ); instanceEntity.setIp( ip ); instanceEntity.setGuid( guid ); instanceEntity.setServiceGuid( serviceId ); this.mServiceInstrument.createServiceInstance( instanceEntity ); return instanceEntity; } protected static class ClientInstance { protected Long clientId; protected ServiceInstance instance; public ClientInstance( Long clientId, ServiceInstance instance ) { this.clientId = clientId; this.instance = instance; } public Long getClientId() { return this.clientId; } public ServiceInstance getInstance() { return this.instance; } } } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/ulf/HuskyServiceAppointServer.java ================================================ package com.pinecone.hydra.service.registry.ulf; import com.pinecone.framework.util.config.PatriarchalConfig; import com.pinecone.hydra.service.registry.appoint.ServiceAppointServer; import com.pinecone.hydra.service.registry.server.ServiceManager; import com.pinecone.hydra.uma.DuplexAppointServer; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.ChannelHandleException; import com.pinecone.hydra.umc.msg.MessageNode; import com.pinecone.hydra.umc.msg.event.ChannelEventHandler; import com.pinecone.hydra.umc.msg.event.ChannelInactiveHandler; import com.pinecone.hydra.umc.wolf.server.UlfServer; public class HuskyServiceAppointServer implements ServiceAppointServer { protected DuplexAppointServer mAppointServer; protected ServiceManager mServiceManager; public HuskyServiceAppointServer( DuplexAppointServer duplexAppointServer ) { this.mAppointServer = duplexAppointServer; } public HuskyServiceAppointServer( DuplexAppointServer duplexAppointServer, ServiceManager serviceManager ) { this( duplexAppointServer ); this.mServiceManager = serviceManager; } @Override public ServiceManager serviceManager() { return this.mServiceManager; } @Override public ServiceAppointServer hookServiceManager( ServiceManager serviceManager ) { if( this.mServiceManager != null ) { throw new IllegalStateException( "Manager has already hooked." ); } this.mServiceManager = serviceManager; this.mAppointServer.registerController( new ServiceLifecycleController( this.mServiceManager ) ); this.mAppointServer.registerController( new ServiceMetaController( this.mServiceManager ) ); MessageNode messageNode = this.mAppointServer.getMessageNode(); UlfServer ulfServer = (UlfServer) messageNode; ulfServer.registerDataArrivedEventHandlers(new ChannelEventHandler() { @Override public void afterEventTriggered( ChannelControlBlock block, Object context ) { long clientId = block.getChannel().getIdentityID(); Object channelId = block.getChannel().getChannelID(); mServiceManager.serviceEventHooker().afterNewConnectionInbound( clientId, channelId, block.getChannel(), context, () -> new HuskyServiceClientile( HuskyServiceAppointServer.this ) ); } }); ulfServer.registerChannelInactiveHandler(new ChannelInactiveHandler() { @Override public boolean afterChannelInactive( ChannelControlBlock ccb, Object context ) throws ChannelHandleException { Long clientId = ccb.getChannel().getIdentityID(); Object channelId = ccb.getChannel().getChannelID(); mServiceManager.serviceEventHooker().afterConnectionDetach( clientId, channelId, ccb.getChannel() ); return false; } }); this.mServiceManager.getLogger().info( "AppointServer[{}] has been hooked to service manager.", this.mAppointServer.getName() ); return this; } @Override public String getName() { return this.mAppointServer.getName(); } @Override public PatriarchalConfig getConfig() { return this.mAppointServer.getConfig(); } @Override public void close() { this.mAppointServer.close(); } @Override public void execute() throws Exception { this.mAppointServer.execute(); } @Override public long getMessageNodeId() { return this.mAppointServer.getMessageNodeId(); } @Override public boolean isTerminated() { return this.mAppointServer.getMessageNode().isTerminated(); } @Override public boolean isStarted() { return !this.isTerminated(); } } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/ulf/HuskyServiceClientile.java ================================================ package com.pinecone.hydra.service.registry.ulf; import java.net.SocketAddress; import java.util.Collection; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import com.pinecone.hydra.service.registry.appoint.ServiceClientile; import com.pinecone.hydra.service.registry.appoint.ServiceAppointServer; import com.pinecone.hydra.umc.msg.UMCChannel; public class HuskyServiceClientile implements ServiceClientile { protected long mClientId = -1; // connectionId => channel protected final ConcurrentMap mServiceChannels; protected final ServiceAppointServer mServiceAppointServer; protected SocketAddress mMainRemoteAddress; public HuskyServiceClientile( ServiceAppointServer serviceAppointServer ) { this.mServiceChannels = new ConcurrentHashMap<>(); this.mServiceAppointServer = serviceAppointServer; } @Override public long getClientId() { return this.mClientId; } @Override public int connectionCount() { return this.mServiceChannels.size(); } @Override public boolean isDefunct() { return this.mServiceChannels.isEmpty(); } @Override public Object queryNativeConnection( Object connectionIdentity ) { return this.mServiceChannels.get( connectionIdentity ); } @Override public Collection connections() { return this.mServiceChannels.values(); } @Override public void shutdown() { for ( UMCChannel umcChannel : this.mServiceChannels.values() ) { umcChannel.close(); } this.mServiceChannels.clear(); } @Override public ServiceAppointServer serviceAppointServer() { return this.mServiceAppointServer; } @Override public void afterNewConnectionInbound( Long clientId, Object connectId, Object connection, Object context ) { UMCChannel channel = (UMCChannel) connection; this.mServiceChannels.put( connectId, channel ); this.mClientId = clientId; this.mMainRemoteAddress = channel.remoteAddress(); } @Override public void afterConnectionDetach( Long clientId, Object channelId, Object connection ) { this.mServiceChannels.remove( channelId ); } @Override public SocketAddress getRemoteAddress() { return this.mMainRemoteAddress; } } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/ulf/ServiceLifecycleController.java ================================================ package com.pinecone.hydra.service.registry.ulf; import com.pinecone.hydra.service.kom.entity.GenericServiceInstanceEntity; import com.pinecone.hydra.service.registry.ClientServiceRegisterException; import com.pinecone.hydra.service.registry.ServiceInstanceCreationException; import com.pinecone.hydra.service.registry.server.ServiceLifecycleService; import com.pinecone.hydra.service.registry.server.ServiceManager; import com.pinecone.hydra.service.registry.dto.RegisterServiceDTO; import com.pinecone.hydra.umct.AddressMapping; import com.pinecone.hydra.umct.stereotype.Controller; @Controller @AddressMapping("com.pinecone.hydra.service.registry.server.ServiceLifecycleIface.") public class ServiceLifecycleController { protected ServiceLifecycleService serviceLifecycleService; public ServiceLifecycleController( ServiceManager serviceManager ) { this.serviceLifecycleService = serviceManager.serviceLifecycleService(); } @AddressMapping( "registerService" ) public String registerService( RegisterServiceDTO serviceDTO ) throws ClientServiceRegisterException { return this.serviceLifecycleService.registerService( serviceDTO ); } @AddressMapping("createInstanceMeta") boolean createInstanceMeta( GenericServiceInstanceEntity instanceEntity ) throws ServiceInstanceCreationException { return this.serviceLifecycleService.createInstanceMeta( instanceEntity ); } @AddressMapping("deregisterServiceByClientId") public void deregisterServiceByClientId( Long clientId ) { this.serviceLifecycleService.deregisterServiceByClientId( clientId ); } @AddressMapping("deregisterServiceByInstanceId") public void deregisterServiceByInstanceId( String instanceId ) { this.serviceLifecycleService.deregisterServiceByInstanceId( instanceId ); } @AddressMapping("hasOwnedServiceByServiceId") public boolean hasOwnedServiceByServiceId( String serviceId ) { return this.serviceLifecycleService.hasOwnedServiceByServiceId( serviceId ); } @AddressMapping("hasOwnedServiceInstance") public boolean hasOwnedServiceInstance( Long clientId ) { return this.serviceLifecycleService.hasOwnedServiceInstance( clientId ); } @AddressMapping("hasOwnedServiceClient") public boolean hasOwnedServiceClient( Long clientId ) { return this.serviceLifecycleService.hasOwnedServiceClient( clientId ); } @AddressMapping("hasOwnedServiceClient") public boolean hasOwnedServiceInstance( String instanceId ) { return this.serviceLifecycleService.hasOwnedServiceInstance( instanceId ); } @AddressMapping("countRegisteredService") public Integer countRegisteredService() { return this.serviceLifecycleService.countRegisteredService(); } } ================================================ FILE: Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/ulf/ServiceMetaController.java ================================================ package com.pinecone.hydra.service.registry.ulf; import java.util.List; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.service.registry.server.ServiceManager; import com.pinecone.hydra.service.registry.dto.ServiceMetaDTO; import com.pinecone.hydra.service.registry.server.ServiceMetaService; import com.pinecone.hydra.umct.AddressMapping; import com.pinecone.hydra.umct.stereotype.Controller; @Controller @AddressMapping("com.pinecone.hydra.service.registry.server.ServiceMetaManipulationIface.") public class ServiceMetaController implements Pinenut { protected ServiceMetaService serviceMetaService; public ServiceMetaController( ServiceManager serviceManager ) { this.serviceMetaService = serviceManager.getServiceMetaService(); } @AddressMapping( "fetchServiceInsMetaByClientId" ) public List fetchServiceInsMetaByClientId( long clientId ){ return this.serviceMetaService.fetchServiceInsMetaByClientId( clientId ); } @AddressMapping( "fetchServiceInsMetaByServiceId" ) public List fetchServiceInsMetaByServiceId( String serviceId ) { return this.serviceMetaService.fetchServiceInsMetaByServiceId( serviceId ); } @AddressMapping( "queryServiceMetaByPath" ) public ServiceMetaDTO queryServiceMetaByPath( String path ) { return this.serviceMetaService.queryServiceMetaByPath( path ); } @AddressMapping( "queryServiceMetaByGuid" ) public ServiceMetaDTO queryServiceMetaByGuid( String guid ) { return this.serviceMetaService.queryServiceMetaByGuid( guid ); } @AddressMapping( "evalCreationStatement" ) public String evalCreationStatement( String jonsStatement ) { return this.serviceMetaService.evalCreationStatement( jonsStatement ); } @AddressMapping( "createNewService" ) public String createNewService( String parentAppPath, ServiceMetaDTO meta ) { return this.serviceMetaService.createNewService( parentAppPath, meta ); } } ================================================ FILE: Hydra/hydra-system-reign/pom.xml ================================================ hydra com.pinecone.hydra 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.pinecone.hydra.kernel hydra-system-reign 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 com.pinecone.hydra.kernel hydra-framework-config 2.1.0 com.pinecone.hydra.kernel hydra-framework-device 2.1.0 com.pinecone.hydra.kernel hydra-message-control 2.1.0 com.pinecone.hydra.kernel hydra-message-broadcast 2.1.0 com.pinecone.tritium hydra-system-tritium 2.1.0 com.pinecone.slime slime 2.1.0 com.pinecone.slime.jelly jelly 2.1.0 com.pinecone.ulf ulfhedinn 1.2.1 org.javassist javassist 3.29.0-GA io.netty netty-all 4.1.80.Final org.springframework.boot spring-boot-starter-web org.jsoup jsoup 1.15.4 org.mybatis mybatis 3.5.9 org.mybatis mybatis-spring 2.0.6 mysql mysql-connector-java 8.0.26 org.slf4j slf4j-api 1.7.30 net.spy spymemcached 2.12.3 org.apache.httpcomponents.client5 httpclient5 5.1 org.apache.commons commons-vfs2 2.9.0 org.apache.commons commons-vfs2-jackrabbit1 2.9.0 org.apache.commons commons-lang3 3.12.0 ================================================ FILE: Hydra/hydra-system-reign/src/main/java/com/pinecone/hydra/reign/UnixInstitutionalizedMetaImperiumPrivy.java ================================================ package com.pinecone.hydra.reign; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.system.ArchSystemCascadeComponent; import com.pinecone.hydra.system.HyComponent; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.system.imperium.ImperiumPrivy; import com.pinecone.hydra.system.ko.KernelObjectConfig; import com.pinecone.hydra.system.ko.kom.ExpressInstrument; import com.pinecone.hydra.system.ko.runtime.GenericRuntimeInstrumentConfig; import com.pinecone.hydra.system.ko.runtime.KernelExpressInstrument; public class UnixInstitutionalizedMetaImperiumPrivy extends ArchSystemCascadeComponent implements ImperiumPrivy { protected ExpressInstrument expressInstrument; public UnixInstitutionalizedMetaImperiumPrivy( Namespace name, Hydrogen system, HyComponent parent, KernelObjectConfig config ) { super( name, system, system.getComponentManager(), parent ); this.expressInstrument = new KernelExpressInstrument( system, "", config ); } public UnixInstitutionalizedMetaImperiumPrivy( Namespace name, Hydrogen system, HyComponent parent, @Nullable JSONConfig config ) { this( name, system, parent, new GenericRuntimeInstrumentConfig( config ) ); } public UnixInstitutionalizedMetaImperiumPrivy( Hydrogen system, HyComponent parent, @Nullable JSONConfig config ) { this( (Namespace) null, system, parent, config ); } public UnixInstitutionalizedMetaImperiumPrivy( Hydrogen system, @Nullable JSONConfig config ) { this( system, null,config ); } public UnixInstitutionalizedMetaImperiumPrivy( String name, Hydrogen system, HyComponent parent, @Nullable JSONConfig config ) { this( system, parent, config ); this.setTargetingName( name ); } public UnixInstitutionalizedMetaImperiumPrivy( String name, Hydrogen system, @Nullable JSONConfig config ) { this( name, system, null, config ); } @Override public ExpressInstrument getExpressInstrument() { return this.expressInstrument; } } ================================================ FILE: Hydra/hydra-system-tritium/pom.xml ================================================ hydra com.pinecone.hydra 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.pinecone.tritium hydra-system-tritium 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 com.pinecone.hydra.kernel hydra-framework-config 2.1.0 com.pinecone.hydra.kernel hydra-framework-device 2.1.0 com.pinecone.hydra.kernel hydra-message-control 2.1.0 com.pinecone.hydra.kernel hydra-message-broadcast 2.1.0 com.pinecone.slime slime 2.1.0 com.pinecone.slime.jelly jelly 2.1.0 com.pinecone.ulf ulfhedinn 1.2.1 com.pinecone.summer.springram springram 2.1.0 org.javassist javassist 3.29.0-GA io.netty netty-all 4.1.80.Final org.springframework.boot spring-boot-starter-web org.jsoup jsoup 1.15.4 us.codecraft webmagic-core 0.8.0 us.codecraft webmagic-extension 0.8.0 org.mybatis mybatis 3.5.9 org.mybatis mybatis-spring 2.0.6 com.baomidou mybatis-plus-core 3.4.3.4 com.baomidou mybatis-plus-annotation 3.4.3.4 mysql mysql-connector-java 8.0.26 org.slf4j slf4j-api 1.7.30 net.spy spymemcached 2.12.3 org.apache.httpcomponents.client5 httpclient5 5.1 org.apache.commons commons-vfs2 2.9.0 org.apache.commons commons-vfs2-jackrabbit1 2.9.0 org.apache.commons commons-lang3 3.12.0 com.alibaba druid 1.2.8 ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/ConfigConstants.java ================================================ package com.pinecone.tritium; public final class ConfigConstants { public static final String KeyMasterOrchestrator = "MasterOrchestrator"; } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/MasterServgramOrchestrator.java ================================================ package com.pinecone.tritium; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.Pinecore; import com.pinecone.framework.util.config.PatriarchalConfig; import com.pinecone.hydra.servgram.*; public class MasterServgramOrchestrator extends LocalServgramOrchestrator { public MasterServgramOrchestrator( Pinecore system, PatriarchalConfig sectionConfig, @Nullable GramFactory factory, GramTransaction transaction ) { super( system, sectionConfig, factory, transaction ); } public MasterServgramOrchestrator( Pinecore system, String szSectionName, @Nullable GramFactory factory, GramTransaction transaction ) { super( system, system.getGlobalConfig().getChild( szSectionName ), factory, transaction ); } public MasterServgramOrchestrator( Pinecore system, String szSectionName ) { super( system, system.getGlobalConfig().getChild( szSectionName ) ); } public MasterServgramOrchestrator( Pinecore system ) { this( system, ConfigConstants.KeyMasterOrchestrator ); } } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/Tritium.java ================================================ package com.pinecone.tritium; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.framework.util.lang.GenericDynamicFactory; import com.pinecone.hydra.Hydra; import com.pinecone.hydra.Hydradom; import com.pinecone.hydra.servgram.ServgramOrchestrator; import com.pinecone.hydra.system.component.GenericResourceDispenserCenter; import com.pinecone.hydra.system.component.ResourceDispenserCenter; import com.pinecone.hydra.system.component.GenericTracerScope; import com.pinecone.hydra.system.component.Slf4jTraceable; import com.pinecone.hydra.system.component.Slf4jTracerScope; import com.pinecone.hydra.system.component.LogStatuses; import com.pinecone.framework.unit.MultiScopeMap; import com.pinecone.framework.util.config.JSONSystemConfig; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.system.component.TracerConfigurator; import com.pinecone.hydra.umb.rabbit.RabbitMQClient; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.io.Tracerson; import com.pinecone.framework.util.json.homotype.DirectObjectInjector; import com.pinecone.tritium.system.ConfigScope; import com.pinecone.tritium.system.Hierarchy; import com.pinecone.tritium.system.InterWareDirector; import com.pinecone.tritium.system.KnittedMiddlewareDirector; import com.pinecone.hydra.system.component.LoggingConfigurator; import com.pinecone.tritium.system.TritiumConfigScope; import com.pinecone.tritium.system.TritiumSystem; import com.pinecone.tritium.system.ServersScope; import com.pinecone.tritium.system.StorageSystem; import com.pinecone.tritium.system.SystemDaemon; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.nio.file.Path; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.Map; /** * Bean Nuts Pinecone Hydra Tritium * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Hydra - Tritium Kingdom - Pyramid Hierarchy - Centralized Architecture * Hydra - 九头龙第三帝国 - 金字塔阶级控制架构 - 中央集权架构典型实现 * ***************************************************************************************** * Hydrogen | 氕 | Prime Frame | 框架层原型 * Deuterium | 氘 | Federal Arch | 九头龙第二共和国 * Tritium | 氚 | Hierarchy | 九头龙第三帝国 * ***************************************************************************************** * Tritium Hydra | 氚 * DragonKing.cn of Harald */ public class Tritium extends Hydradom implements TritiumSystem, Slf4jTraceable { public static final String NUTLET_NAME = "Bean Nuts Hazelnut Sauron Hydra"; public static final long VER_PINE = 202506L; public static final String VERSION = "2.1.0"; public static final String RELEASE_DATE = "2026/06/06"; public static final String ROOT_SERVER = "https://www.dragonking.cn/"; public static final String CONTACT_INFO = "E-Mail:info#dragonking.cn"; // Giving your contact information, if this program interrupt abnormally. public static final String SYSTEM_PATH = "./system/"; public static final String SETUP_PATH = Tritium.SYSTEM_PATH + "setup/"; public static final String MAIN_CONFIG_FILE = Tritium.SETUP_PATH + "config.json5"; protected String mMinionName; protected Hierarchy mServiceHierarchy = Hierarchy.H_SLAVE; protected boolean mMasterQuery; protected Logger mLogger; protected Path mPrimaryConfigPath ; protected DirectObjectInjector mObjectInjector ; protected ServersScope mServersScope ; protected StorageSystem mStorageSystem ; protected Slf4jTracerScope mTracerScope ; protected TracerConfigurator mTracerConfigurator ; protected SystemDaemon mSystemPrimaryDaemon ; protected ConfigScope mPrimaryConfigScope ; // Program runtime global variable retrieving config-scope. protected InterWareDirector mMiddlewareDirector ; protected ResourceDispenserCenter mDispenserCenter ; protected DynamicFactory mShardDynamicFactory ; protected void prepare_system_log4j_logger() { this.mLogger = LoggerFactory.getLogger( this.className() + "" ); this.pout().print( "[System] [TracerReassignment] Slf4j>\n" ); } private void load_this_class_config() { this.mjoGlobalConfig.addParentPath( this.getWorkingPath() ); this.mjoSystemConfig = this.mjoGlobalConfig.getChild( "System" ); this.mObjectInjector = DirectObjectInjector.instance( true, Tritium.class ); this.mObjectInjector.inject( this.mjoSystemConfig, Tritium.class, this ); this.mObjectInjector.inject( this.mjoSystemConfig, Hydra.class, this ); this.mServiceHierarchy = Hierarchy.queryHierarchy( this.mjoSystemConfig.optString( "ServiceArch" ) ); this.mTracerConfigurator = new LoggingConfigurator( this ); this.mTracerConfigurator.apply(); } protected void prepare_system_skeleton_before() { } protected void prepare_system_skeleton() { this.infoLifecycle( " Skeleton Initialization", LogStatuses.StatusStart ); this.prepare_system_skeleton_before(); this.mTracerScope = new GenericTracerScope( this ); this.mPrimaryConfigScope = new TritiumConfigScope( ConfigScope.KeyGlobal, this, this.getGlobalConfig() ); this.mMiddlewareDirector = new KnittedMiddlewareDirector( this ); this.mServersScope = new ServersScope( this ); this.mStorageSystem = new StorageSystem( this ); this.mSystemPrimaryDaemon = new SystemDaemon( this ); this.mDispenserCenter = new GenericResourceDispenserCenter( this ); this.getComponentManager().addComponent( this.mMiddlewareDirector ); this.getComponentManager().addComponent( this.mPrimaryConfigScope ); this.getComponentManager().addComponent( this.mServersScope ); this.getComponentManager().addComponent( this.mStorageSystem ); this.getComponentManager().addComponent( this.mTracerScope ); this.getComponentManager().addComponent( this.mSystemPrimaryDaemon ); this.getComponentManager().addComponent( this.mDispenserCenter ); //Debug.trace( this.getComponentManager().getComponents() ); //Debug.echo( ( (JSONObject)this.getGlobalConfigScope().thisScope() ).toJSONStringI(4) ); super.prepare_system_skeleton(); this.infoLifecycle( " Skeleton Initialization", LogStatuses.StatusReady ); } @Override protected void loadConfig() { try { Map map = this.getStartupCommandMap(); String[] args = map.get( "workingPath" ); if( args != null && args.length > 0 ) { this.mWorkingPath = Path.of( args[ 0 ] ); } else { this.mWorkingPath = Path.of( this.getRuntimeContextPath() ); } args = map.get( "config" ); if( args != null && args.length > 0 ) { this.mPrimaryConfigPath = Path.of( args[ 0 ] ); } else { this.mPrimaryConfigPath = this.getWorkingPath().resolve( Tritium.MAIN_CONFIG_FILE ); } this.mjoGlobalConfig = (JSONSystemConfig) ( new JSONSystemConfig( this ) ).apply( this.mPrimaryConfigPath.toFile() ); } catch ( IOException e ) { this.handleKillException( e ); } } protected void traceSystemBootingInfo() { this.pout().print( "\u001B[34m>>> System Booting...\u001B[0m\n\n" ); } protected void loadTracer() { this.mConsole = new Tracerson(); } @Override protected void onlyLoadTaskManager() { this.mTaskManager = new MasterServgramOrchestrator( this ); this.mShardDynamicFactory = new GenericDynamicFactory( this.mTaskManager.getClassLoader() ); } protected void traceSubsystemWelcomeInfo() { this.pout().print( "---------------------------------------------------------------\n" ); } @Override protected void traceWelcomeInfo() { this.pout().print( "---------------------------------------------------------------\n" ); this.pout().print( "\u001B[31mBean Nuts Pinecone Ursus for Java\u001B[0m\n" ); this.pout().print( "\u001B[31mHydra Kingdom Framework (Tritium, Hydra Empire) \u001B[0m\n" ); this.pout().print( "\u001B[32mCopyright(C) 2008-2028 Bean Nuts Foundation. All rights reserved.\u001B[0m\n" ); this.pout().print( "---------------------------------------------------------------\n" ); this.pout().print( "\u001B[31mDragon King\u001B[0m\n" ); this.pout().print( "\u001B[32mWebsit: https://www.dragonking.cn/ \u001B[0m\n" ); this.traceSubsystemWelcomeInfo(); this.traceSystemBootingInfo(); this.prepare_system_log4j_logger(); this.infoLifecycle( "Initialization", LogStatuses.StatusStart ); } protected void traceSystemInfo() { LocalDateTime now = LocalDateTime.now(); this.console().echo( "----------------------System Information-----------------------\n" ); this.console().echo( "MinionName : " + this.mMinionName, "\n" ); this.console().echo( "NutletName : " + Tritium.NUTLET_NAME , "\n" ); this.console().echo( "Version : " + Tritium.VERSION, "\n" ); this.console().echo( "ReleaseDate: " + Tritium.RELEASE_DATE, "\n" ); this.console().echo( "ServiceID : " + this.mServiceID, "\n" ); this.console().echo( "ServiceArch: " + this.mServiceHierarchy.getName(), "\n" ); this.console().echo( "RuntimePath: " + this.getRuntimePath(), "\n" ); this.console().echo( "ContextPath: " + this.getRuntimeContextPath(), "\n" ); this.console().echo( "PrimaryConf: " + this.mPrimaryConfigPath.toString(), "\n" ); this.console().echo( "StartTime : " + now.format( DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss:SSS") ), "\n" ); this.console().echo( "---------------------------------------------------------------\n" ); } public Tritium( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Tritium( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); this.load_this_class_config(); this.prepare_system_skeleton(); this.loadTracer(); this.dispatchStartupCommand(); this.traceSystemInfo(); this.infoLifecycle( "Initialization", LogStatuses.StatusReady ); this.console().echo( "\n\n" ); this.console().getOut().flush(); } @Override public Logger getLogger() { return this.mLogger; } @Override public Tritium infoLifecycle(String szWhat, String szStateOrExtra ) { this.getLogger().info( "[SystemLifecycle] [{}] <{}>", szWhat, szStateOrExtra ); return this; } @Override public Hierarchy getServiceArch() { return this.mServiceHierarchy; } @Override public boolean isTopmostArchy() { return this.getServiceArch() == this.getTopmostArchy(); } @Override public Hierarchy getTopmostArchy() { return Hierarchy.H_MASTER; } @Override public Hierarchy getBottommostArchy() { return Hierarchy.H_SLAVE; } @Override public boolean isBottommostArchy() { return this.getServiceArch() == this.getBottommostArchy(); } @Override public SystemDaemon getSystemDaemon() { return this.mSystemPrimaryDaemon; } @Override public ServersScope getServersScope() { return this.mServersScope; } @Override public StorageSystem getStorageSystem() { return this.mStorageSystem; } @Override public Slf4jTracerScope getTracerScope() { return this.mTracerScope; } @Override public ResourceDispenserCenter getDispenserCenter() { return this.mDispenserCenter; } @Override public MultiScopeMap getGlobalConfigScope() { return this.getPrimaryConfigScope().getScopeMap(); } @Override public ConfigScope getPrimaryConfigScope() { return this.mPrimaryConfigScope; } public boolean getMasterQuery() { return this.mMasterQuery; } public boolean isKingMasterQuery() { return this.getMasterQuery() && (this.isTopmostArchy() || this.getServiceArch() == Hierarchy.H_PALADIN); } @Override public InterWareDirector getMiddlewareDirector() { return this.mMiddlewareDirector; } public ServgramOrchestrator getServgramOrchestrator() { return (ServgramOrchestrator) this.mTaskManager; } @Override public DynamicFactory getShardDynamicFactory() { return this.mShardDynamicFactory; } public Path getPrimaryConfigsPath() { return this.getWorkingPath().resolve( Tritium.SETUP_PATH ); } public void vitalize () throws Exception { this.getServgramOrchestrator().tracer().info( "[Lifecycle] " ); this.getServgramOrchestrator().orchestrate(); //( new Heistron( "Heist", this )).execute(); // LocalHeistium heistium = new LocalHeistium( "Test", this, 5, null ); // heistium.joinStartMultiTasks(); // RangedPage64 page64 = new RangedPage64( 0, 1000,0 ); // DirectPagePool pagePool = new DirectPagePool( LocalTaskPage.class ); // // LocalMultiActiveTaskPageProducer producer = new LocalMultiActiveTaskPageProducer( new FixedPageDivider64( page64, pagePool, 100 ), page64.getId() + 1 ); // LocalSingleTaskPageConsumer consumer = new LocalSingleTaskPageConsumer( producer ); // // consumer.consume(); } void testBunny() throws Exception { RabbitMQClient bunny = new RabbitMQClient( this, this.getMiddlewareDirector().getMiddlewareConfig().optJSONObject( "Messengers" ).optJSONObject( "RabbitMQKingpin" ) ); bunny.toListen(); Debug.echo( bunny ); } } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/ally/IndexableManager.java ================================================ package com.pinecone.tritium.ally; public class IndexableManager { } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/ally/messengers/MessagersManager.java ================================================ package com.pinecone.tritium.ally.messengers; import com.pinecone.framework.system.ProvokeHandleException; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.unit.LinkedTreeMap; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.JSONGet; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.system.centrum.UniformCentralSystem; import com.pinecone.hydra.umct.MessageExpress; import com.pinecone.hydra.umct.Messagram; import com.pinecone.hydra.servgram.Servgram; import com.pinecone.hydra.system.ArchSystemAutoAssembleComponent; import com.pinecone.hydra.system.HyComponent; import com.pinecone.hydra.system.HyHierarchy; import com.pinecone.hydra.system.component.LogStatuses; import com.pinecone.hydra.umc.msg.MessageNode; import com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter; import com.pinecone.hydra.umc.wolf.WolfMCNode; import com.pinecone.hydra.umc.wolf.server.WolfMCServer; import com.pinecone.hydra.umct.UMCTExpressHandler; import com.pinecone.tritium.system.InterWareDirector; import com.pinecone.tritium.system.TritiumSystem; import java.io.IOException; import java.nio.file.Path; import java.util.Collection; import java.util.Map; public class MessagersManager extends ArchSystemAutoAssembleComponent implements Pinenut, HyComponent { @JSONGet( "Messagers" ) protected JSONConfig mjoMessagersConf ; @JSONGet( "Messagers.Configs" ) protected JSONConfig mjoComponentConf ; @JSONGet( "Messagers.Messagers" ) protected JSONConfig mjoMessagers ; @JSONGet( "Messagers.Configs.Enable" ) protected boolean mbEnable ; protected Map mMessagerComponent ; public MessagersManager( Namespace name, HyComponent parent ) { super( name, parent.getSystem(), parent.getSystem().getComponentManager(), parent ); InterWareDirector parentManager = (InterWareDirector) parent; this.getSystem().getPrimaryConfigScope().autoInject( MessagersManager.class, parentManager.getMiddlewareConfig() , this ); this.mMessagerComponent = new LinkedTreeMap<>(); this.prepareInstanceMessagers(); this.infoLifecycleInitializationDone(); } public MessagersManager( HyComponent parent ) { this( null, parent ); } protected void prepareInstanceProcessum( Object node ) { if( node instanceof Processum ) { this.getSystem().getTaskManager().add( (Processum)node ); } } protected void executeInnerServgram( Object node, JSONObject conf ) { HyHierarchy hierarchy = this.getSystem().getServiceArch(); if( node instanceof Servgram ) { boolean bIsRecipient = conf.optBoolean( "IsRecipient" ); if( bIsRecipient ) { String[] as = this.getSystem().getStartupCommandMap().get( "TestWolfMCClient" ); if( as != null && as.length > 0 && as[0].equals( "true" ) && node instanceof WolfMCServer ){ return; } } if( !bIsRecipient ) { if( node instanceof MessageNode ) { boolean bAutoStartInMasterMode = conf.optBoolean( "AutoStartInMasterMode" ); if( hierarchy.isDominantClass() && !bAutoStartInMasterMode ) { return; } } } try{ ((Servgram) node).execute(); } catch ( Exception e ) { throw new ProxyProvokeHandleException( e ); } } } protected void prepareInstanceMessagers() { for ( Object o : this.mjoMessagers.entrySet() ) { Map.Entry kv = (Map.Entry) o; Object ov = kv.getValue(); if ( ov instanceof String ) { try { ov = this.mjoMessagers.fromPath( Path.of( (String) ov ) ); } catch ( IOException e ) { throw new ProxyProvokeHandleException( e ); } } JSONObject val = (JSONObject) ov; this.mObjectOverrider.override( val, this.mjoComponentConf, false ); try { String szEngine = val.optString( "Engine" ); String szInsNam = (String) kv.getKey(); boolean bEnable = val.optBoolean( "Enable" ); boolean bCentralManage = val.optBoolean( "CentralManage" ); if ( bEnable ) { TritiumSystem system = this.getSystem(); Object node = null; if ( system instanceof UniformCentralSystem ) { UniformCentralSystem uSystem = (UniformCentralSystem) system; long nodeId = uSystem.getSystemGuidAllocator72().nextGUIDi64(); node = this.mUniformFactory.loadInstance( szEngine, null, new Object[] { nodeId, szInsNam, this.getSystem(), val } ); } if ( node == null ) { node = this.mUniformFactory.loadInstance( szEngine, null, new Object[] { szInsNam, this.getSystem(), val } ); } if ( node instanceof MessageNode ) { this.mMessagerComponent.put( szInsNam, (MessageNode)node ); this.prepareMessagersMsgHandler( szInsNam, (MessageNode)node, val ); } else if ( node instanceof Messagram ) { this.mMessagerComponent.put( szInsNam, (Messagram)node ); } else { throw new IllegalArgumentException( "Illegal message node engine, should be `MessageNode/Messagram`: " + szEngine ); } this.prepareInstanceProcessum( node ); if ( bCentralManage ) { this.executeInnerServgram( node, val ); } } } catch ( Exception e ) { throw new ProvokeHandleException( e ); } } //Debug.fmt( 2, this.mjoMessagersConf ); } protected MessageExpress getMessageHandlerByName( String name ) { for( Map.Entry kv: this.mMessagerComponent.entrySet() ) { Pinenut p = kv.getValue(); if( p instanceof Messagram ) { Messagram messagram = (Messagram) p; MessageExpress me = messagram.getExpressByName( name ); if( me != null ) { return me; } } } return null; } protected void prepareMessagersMsgHandler( String szInsNam, MessageNode node, JSONObject conf ) { String szMessageHandler = conf.optString( "MessageHandler" ); if( !StringUtils.isEmpty(szMessageHandler) ) { MessageExpress me; if( szMessageHandler.contains( "." ) ) { try { Object o = this.mUniformFactory.loadInstance( szMessageHandler, null, null ); if( o instanceof MessageExpress ){ me = (MessageExpress) o; } else { throw new IllegalArgumentException( "Illegal message handler, should be `MessageExpress`: " + szMessageHandler ); } } catch ( Exception e ) { throw new ProvokeHandleException( e ); } } else { me = this.getMessageHandlerByName( szMessageHandler ); } if( me == null ) { throw new IllegalArgumentException( "Illegal message handler, can`t found: " + szMessageHandler ); } if( node instanceof WolfMCNode ) { if( me instanceof UlfAsyncMsgHandleAdapter ) { ((WolfMCNode) node).apply( (UlfAsyncMsgHandleAdapter)me ); } else { ((WolfMCNode) node).apply( UlfAsyncMsgHandleAdapter.wrap( (UMCTExpressHandler) me ) ); } this.infoCriticalOperation( "SetMessageExpress(`" + szMessageHandler + "`) ==> (`" + szInsNam + "`)", LogStatuses.StatusDone ); } } } @Override public TritiumSystem getSystem() { return (TritiumSystem) super.getSystem(); } public boolean isEnable() { return this.mbEnable; } public JSONObject getMessagers() { return this.mjoMessagers; } @SuppressWarnings( "unchecked" ) public Collection messagersNames() { return (Collection)this.getMessagers().values(); } public Pinenut getComponentByName (String szName ) { return this.mMessagerComponent.get( szName ); } public MessageNode getMessageNodeByName ( String szName ) { Pinenut p = this.getComponentByName( szName ); if( p instanceof MessageNode ) { return (MessageNode) p; } return null; } public Messagram getMessagramByName ( String szName ) { Pinenut p = this.getComponentByName( szName ); if( p instanceof Messagram ) { return (Messagram) p; } return null; } public Pinenut terminate( String szName ) { Pinenut node = this.getComponentByName( szName ); if( node != null ) { if( node instanceof Servgram ) { ((Servgram) node).terminate(); } else if( node instanceof Processum ) { ((Processum) node).apoptosis(); } this.mMessagerComponent.remove( szName ); } return node; } public int nodesSize() { return this.mMessagerComponent.size(); } } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/ally/rdb/DruidDataSourceFactory.java ================================================ package com.pinecone.tritium.ally.rdb; import java.sql.SQLException; import java.util.Properties; import javax.sql.DataSource; import org.apache.ibatis.datasource.DataSourceFactory; import com.alibaba.druid.pool.DruidDataSource; import com.pinecone.framework.system.ProvokeHandleException; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.json.homotype.MapStructure; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.pinecone.slime.source.rdb.RelationalDatabase; public class DruidDataSourceFactory implements DataSourceFactory, Pinenut { protected UniformRDBClient rdbClient; @MapStructure("Ibatis.DruidConfig.initial-size") protected int initialSize = 1; @MapStructure("Ibatis.DruidConfig.min-idle") protected int minIdle = 1; @MapStructure("Ibatis.DruidConfig.max-active") protected int maxActive = 20; @MapStructure("Ibatis.DruidConfig.max-wait") protected long maxWait = 60000; @MapStructure("Ibatis.DruidConfig.time-between-eviction-runs-millis") protected long timeBetweenEvictionRunsMillis = 60000; @MapStructure("Ibatis.DruidConfig.min-evictable-idle-time-millis") protected long minEvictableIdleTimeMillis = 300000; @MapStructure("Ibatis.DruidConfig.validation-query") protected String validationQuery = "SELECT 1"; @MapStructure("Ibatis.DruidConfig.test-while-idle") protected boolean testWhileIdle = true; @MapStructure("Ibatis.DruidConfig.test-on-borrow") protected boolean testOnBorrow = false; @MapStructure("Ibatis.DruidConfig.test-on-return") protected boolean testOnReturn = false; @MapStructure("Ibatis.DruidConfig.pool-prepared-statements") protected boolean poolPreparedStatements = true; @MapStructure("Ibatis.DruidConfig.max-pool-prepared-statement-per-connection-size") protected int maxPoolPreparedStatementPerConnectionSize = 20; @MapStructure("Ibatis.DruidConfig.keep-alive") protected boolean keepAlive = true; @MapStructure("Ibatis.DruidConfig.connection-error-retry-attempts") protected int connectionErrorRetryAttempts = 3; @MapStructure("Ibatis.DruidConfig.break-after-acquire-failure") protected boolean breakAfterAcquireFailure = false; @MapStructure("Ibatis.DruidConfig.filters") protected String filters; public DruidDataSourceFactory( UniformRDBClient rdbClient ) { this.rdbClient = rdbClient; IbatisClient ibatisClient = (IbatisClient) rdbClient; rdbClient.getRDBManager().getSystem().getPrimaryConfigScope().autoInject( DruidDataSourceFactory.class, ibatisClient.getClientConf(), this ); } @Override public void setProperties(Properties properties) { } @Override public DataSource getDataSource() { IbatisClient ibatisClient = (IbatisClient) this.rdbClient; RelationalDatabase rdb = (RelationalDatabase) this.rdbClient; String driver = ibatisClient.getJDBCDriverName(); String url = ibatisClient.getJDBCURL(); String username = rdb.getUsername(); String password = rdb.getPassword(); DruidDataSource ds = new DruidDataSource(); /* * 基础 JDBC */ ds.setDriverClassName(driver); ds.setUrl(url); ds.setUsername(username); ds.setPassword(password); /* * 连接池参数 */ ds.setInitialSize(this.initialSize); ds.setMinIdle(this.minIdle); ds.setMaxActive(this.maxActive); ds.setMaxWait(this.maxWait); /* * 连接回收 */ ds.setTimeBetweenEvictionRunsMillis(this.timeBetweenEvictionRunsMillis); ds.setMinEvictableIdleTimeMillis(this.minEvictableIdleTimeMillis); /* * 连接检测 */ ds.setValidationQuery(this.validationQuery); ds.setTestWhileIdle(this.testWhileIdle); ds.setTestOnBorrow(this.testOnBorrow); ds.setTestOnReturn(this.testOnReturn); /* * PS cache */ ds.setPoolPreparedStatements(this.poolPreparedStatements); ds.setMaxPoolPreparedStatementPerConnectionSize( this.maxPoolPreparedStatementPerConnectionSize ); ds.setKeepAlive(this.keepAlive); ds.setConnectionErrorRetryAttempts(this.connectionErrorRetryAttempts); ds.setBreakAfterAcquireFailure(this.breakAfterAcquireFailure); if ( this.filters != null ) { try { ds.setFilters( this.filters ); } catch ( SQLException e ) { throw new ProvokeHandleException( e ); } } this.rdbClient.getRDBManager().getLogger().info( "[Lifecycle] New druid-data-source created (`{}`). ", this.rdbClient.getInstanceName() ); return ds; } } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/ally/rdb/GenericIbatisClient.java ================================================ package com.pinecone.tritium.ally.rdb; import javax.sql.DataSource; import java.sql.SQLException; import java.util.Set; import java.util.Collection; import java.util.List; import java.util.ArrayList; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.io.IOException; import java.io.InputStream; import java.lang.annotation.Annotation; import java.sql.Connection; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.unit.LinkedTreeSet; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.MapStructure; import com.pinecone.framework.util.lang.ClassScope; import com.pinecone.framework.util.lang.GenericClassScopeSet; import com.pinecone.framework.util.lang.NamespaceCollector; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import com.pinecone.slime.source.DAOScanner; import com.pinecone.slime.source.DataAccessObject; import com.pinecone.slime.source.XMLResourceScanner; import com.pinecone.slime.source.rdb.ArchRelationalDatabase; import org.apache.ibatis.binding.BindingException; import org.apache.ibatis.builder.xml.XMLMapperBuilder; import org.apache.ibatis.datasource.DataSourceFactory; import org.apache.ibatis.datasource.pooled.PooledDataSource; import org.apache.ibatis.mapping.Environment; import org.apache.ibatis.session.*; import org.apache.ibatis.transaction.TransactionFactory; import org.slf4j.Logger; public class GenericIbatisClient extends ArchRelationalDatabase implements IbatisClient, UniformRDBClient { protected String mszInstanceName ; protected SqlSessionFactory mSqlSessionFactory ; protected Configuration mConfiguration ; protected DataSource mDataSource ; protected Environment mEnvironment ; @MapStructure( "Ibatis" ) protected JSONObject mjoIbatisConf ; @MapStructure( "JDBC.Driver" ) protected String mszJDBCDriverName ; @MapStructure( "JDBC.ExURL" ) protected String mszJDBCExURL ; protected JSONObject mjoClientConf ; protected RDBManager mRDBManager ; @MapStructure( "Ibatis.Environment" ) protected String mszEnvironment ; @MapStructure( "Ibatis.DataSource" ) protected String mszDataSource ; @MapStructure( "Ibatis.TransactionFactory" ) protected String mszTransactionFactory ; protected Logger mLogger ; @MapStructure( "Ibatis.PooledConfig.InitialSize" ) protected int mnInitialSize = 0 ; @MapStructure( "Ibatis.PooledConfig.MaxActive" ) protected int mnMaxActive = 20 ; @MapStructure( "Ibatis.PooledConfig.MaxIdle" ) protected int mnMaxIdle = 20 ; @MapStructure( "Ibatis.PooledConfig.MinIdle" ) protected int mnMinIdle = 1 ; @MapStructure( "Ibatis.PooledConfig.MaxWait" ) protected int mnMaxWait = 60000 ; @MapStructure( "Ibatis.DataAccessObject.Scanner" ) protected String mszDAOScanner ; @MapStructure( "Ibatis.DataAccessObject.XMLScanner" ) protected String mszXMLScanner ; @MapStructure( "Ibatis.DataAccessObject.ScanScopes" ) protected Collection mScannerScopes ; protected DAOScanner mDAOScanner ; protected XMLResourceScanner mXMLScanner ; protected DataSourceFactory mDataSourceFactory ; protected final ReadWriteLock mAddScopeLock = new ReentrantReadWriteLock(); public GenericIbatisClient( RDBManager manager, String szInstanceName ) { this.mRDBManager = manager; this.mszInstanceName = szInstanceName; this.mjoClientConf = this.mRDBManager.getDatabases().optJSONObject( szInstanceName ); this.mRDBManager.getSystem().getPrimaryConfigScope().autoInject( ArchRelationalDatabase.class, this.mjoClientConf, this ); this.mRDBManager.getSystem().getPrimaryConfigScope().autoInject( GenericIbatisClient.class, this.mjoClientConf, this ); this.mLogger = this.getRDBManager().getSystem().getTracerScope().newLogger( this.className() ); this.prepareIbatisSubsystem(); } protected void prepareKernelXMLList() { ClassLoader classLoader = this.getRDBManager().getSystem().getTaskManager().getClassLoader(); ClassScope classScope = new GenericClassScopeSet( classLoader ); if ( this.mszXMLScanner == null ) { this.mszXMLScanner = "com.pinecone.slime.jelly.source.ibatis.IbatisXMLResourceScanner"; } Object ds = this.getRDBManager().getSharedUniformFactory().optLoadInstance( this.mszXMLScanner, new Object[]{ classScope, classLoader } ); if ( ds instanceof XMLResourceScanner) { this.mXMLScanner = (XMLResourceScanner)ds; } else { throw new IllegalArgumentException( "Illegal class scanner, should be `ClassScanner`: " + this.mszXMLScanner ); } } protected void prepareDAOMapperList() { ClassLoader classLoader = this.getRDBManager().getSystem().getTaskManager().getClassLoader(); ClassScope classScope = new GenericClassScopeSet( classLoader ); Object ds = this.getRDBManager().getSharedUniformFactory().optLoadInstance( this.mszDAOScanner, new Object[]{ classScope, classLoader } ); if ( ds instanceof DAOScanner ) { this.mDAOScanner = (DAOScanner)ds; } else { throw new IllegalArgumentException( "Illegal class scanner, should be `ClassScanner`: " + this.mszDAOScanner ); } Set scopes = new LinkedTreeSet<>(); if ( this.mScannerScopes != null && !this.mScannerScopes.isEmpty() ) { try { List candidates = new ArrayList<>(); for ( String sz : this.mScannerScopes ) { scopes.add( sz ); this.mDAOScanner.scan( sz, true, candidates ); } for ( String sz : candidates ) { this.addMapper( classLoader.loadClass( sz ) ); } } catch ( IOException | ClassNotFoundException e ) { throw new ProxyProvokeHandleException( e ); } } this.mScannerScopes = scopes; } protected void prepareIbatisSubsystem() { this.mLogger.info( "[Lifecycle] [RDBClient::PrepareIbatisSubsystem::" + this.mszInstanceName + "] " ); String szJDBCUrl = this.getJDBCURL(); Object ds = this.getRDBManager().getSharedUniformFactory().optLoadInstance( this.mszDataSource, new Object[] { this } ); if ( ds == null ) { ds = this.getRDBManager().getSharedUniformFactory().optLoadInstance( this.mszDataSource, new Object[] { this.mszJDBCDriverName, szJDBCUrl, this.getUsername(), this.getPassword() } ); if ( ds instanceof DataSource ) { this.mDataSource = (DataSource) ds; if ( ds instanceof PooledDataSource ) { PooledDataSource pds = (PooledDataSource) ds; pds.setPoolMaximumActiveConnections( this.mnMaxActive ); pds.setPoolMaximumIdleConnections( this.mnMaxIdle ); pds.setPoolTimeToWait( this.mnMaxWait ); } } else { ds = null; } } else { this.mDataSourceFactory = (DataSourceFactory) ds; this.mDataSource = this.mDataSourceFactory.getDataSource(); } if ( ds == null ) { throw new IllegalArgumentException( "Illegal data source, should be `DataSource` / `DataSourceFactory`: " + this.mszJDBCDriverName ); } TransactionFactory transactionFactory; Object tf = this.getRDBManager().getSharedUniformFactory().optLoadInstance( this.mszTransactionFactory, null ); if ( tf instanceof TransactionFactory ) { transactionFactory = (TransactionFactory) tf; } else { throw new IllegalArgumentException( "Illegal transaction factory, should be `TransactionFactory`: " + this.mszTransactionFactory ); } this.mEnvironment = new Environment( this.mszEnvironment, transactionFactory, this.mDataSource ); this.mConfiguration = new Configuration( this.mEnvironment ); this.mSqlSessionFactory = new SqlSessionFactoryBuilder().build( this.mConfiguration ); this.prepareDAOMapperList(); this.prepareKernelXMLList(); this.mLogger.info( "[Lifecycle] [RDBClient::PrepareIbatisSubsystem::" + this.mszInstanceName + "] " ); } @Override public String getJDBCURL() { if ( this.mszJDBCExURL == null ) { this.mszJDBCExURL = ""; } if ( !this.mszJDBCExURL.startsWith( "&" ) ) { this.mszJDBCExURL = "&" + this.mszJDBCExURL; } return super.getJDBCURL() + this.mszJDBCExURL; } @Override public String getInstanceName() { return this.mszInstanceName; } @Override public Configuration getConfiguration() { return this.mConfiguration; } @Override public DataSource getDataSource() { return this.mDataSource; } @Override public Environment getEnvironment() { return this.mEnvironment; } @Override public JSONObject getIbatisConf() { return this.mjoIbatisConf; } @Override public String getJDBCDriverName() { return this.mszJDBCDriverName; } @Override public JSONObject getClientConf() { return this.mjoClientConf; } @Override public DAOScanner getDAOScanner() { return this.mDAOScanner; } @Override public void addMapper( Class type ) { try { this.mConfiguration.addMapper( type ); } catch ( BindingException ignore ) { // Do nothing. } } @Override public SqlSessionFactory getSqlSessionFactory() { return this.mSqlSessionFactory; } @Override public SqlSession openSession() { SqlSession sqlSession = this.mSqlSessionFactory.openSession(); return sqlSession; } @Override public SqlSession openSession( boolean autoCommit ) { SqlSession sqlSession = this.mSqlSessionFactory.openSession(autoCommit); return sqlSession; } @Override public SqlSession openSession( Connection connection ) { SqlSession sqlSession = this.mSqlSessionFactory.openSession(connection); return sqlSession; } @Override public SqlSession openSession( TransactionIsolationLevel level ) { SqlSession sqlSession = this.mSqlSessionFactory.openSession(level); return sqlSession; } @Override public SqlSession openSession( ExecutorType execType ) { SqlSession sqlSession = this.mSqlSessionFactory.openSession(execType); return sqlSession; } @Override public SqlSession openSession( ExecutorType execType, boolean autoCommit ) { SqlSession sqlSession = this.mSqlSessionFactory.openSession(execType, autoCommit); return sqlSession; } @Override public SqlSession openSession( ExecutorType execType, TransactionIsolationLevel level ) { SqlSession sqlSession = this.mSqlSessionFactory.openSession(execType, level); return sqlSession; } @Override public SqlSession openSession( ExecutorType execType, Connection connection ) { SqlSession sqlSession = this.mSqlSessionFactory.openSession( execType, connection ); return sqlSession; } protected void free0( SqlSession sqlSession ) { sqlSession.commit(); sqlSession.close(); } @Override public RDBManager getRDBManager() { return this.mRDBManager; } @Override public DAOScanner getDataAccessObjectScanner() { return this.mDAOScanner; } @Override public boolean hasOwnDataAccessObject( Class clazz ) { Annotation[] annotations = clazz.getAnnotations(); for ( Annotation annotation : annotations ) { if ( annotation instanceof DataAccessObject ) { String s = ((DataAccessObject) annotation).scope(); if ( s.isEmpty() || s.equals( this.getInstanceName() ) ) { return true; } } else if ( annotation instanceof IbatisDataAccessObject ) { String s = ((IbatisDataAccessObject) annotation).scope(); if ( s.isEmpty() || s.equals( this.getInstanceName() ) ) { return true; } } } return false; } private List > addDataAccessObjectScope0( String szPacketName, boolean bIgnoreOwnedChecked, List candidates, ClassLoader classLoader ) throws IOException, ClassNotFoundException { this.mScannerScopes.add( szPacketName ); this.mDAOScanner.scan( szPacketName, true, candidates ); List > candidateClasses = new ArrayList<>(); for ( String sz : candidates ) { Class clazz = classLoader.loadClass( sz ); if ( bIgnoreOwnedChecked || this.hasOwnDataAccessObject( clazz ) ) { candidateClasses.add( clazz ); this.addMapper( clazz ); } } return candidateClasses; } @Override public List > addDataAccessObjectScope( String szPacketName, boolean bIgnoreOwnedChecked ) { ClassLoader classLoader = this.getRDBManager().getSystem().getTaskManager().getClassLoader(); try { List candidates = new ArrayList<>(); this.mAddScopeLock.writeLock().lock(); try { return this.addDataAccessObjectScope0( szPacketName, bIgnoreOwnedChecked, candidates, classLoader ); } finally { this.mAddScopeLock.writeLock().unlock(); } } catch ( IOException | ClassNotFoundException e ) { throw new ProxyProvokeHandleException( e ); } } @Override public List > addDataAccessObjectScopeNoneSync( String szPacketName, boolean bIgnoreOwnedChecked ) { ClassLoader classLoader = this.getRDBManager().getSystem().getTaskManager().getClassLoader(); try { List candidates = new ArrayList<>(); return this.addDataAccessObjectScope0( szPacketName, bIgnoreOwnedChecked, candidates, classLoader ); } catch ( IOException | ClassNotFoundException e ) { throw new ProxyProvokeHandleException( e ); } } @Override public List > addDataAccessObjectScope( String szPacketName ) { return this.addDataAccessObjectScope( szPacketName, false ); } private void addXMLObjectScope0( String szPacketName, List candidates, ClassLoader classLoader ) throws IOException { this.mScannerScopes.add( szPacketName ); this.mXMLScanner.scan( szPacketName, true, candidates ); for ( String szResource : candidates ) { String szResourcePath = szResource.replace( NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR, NamespaceCollector.RESOURCE_NAME_SEPARATOR ) + ".xml"; if ( !this.mConfiguration.isResourceLoaded( szResourcePath ) ) { InputStream inputStream = classLoader.getResourceAsStream( szResourcePath ); if ( inputStream == null ) { continue; } XMLMapperBuilder xmlMapperBuilder = new XMLMapperBuilder( inputStream, this.mConfiguration, szResourcePath, this.mConfiguration.getSqlFragments() ); xmlMapperBuilder.parse(); this.mConfiguration.addLoadedResource( szResourcePath ); } } } @Override public void addXMLObjectScope( String szPacketName ) { ClassLoader classLoader = this.getRDBManager().getSystem().getTaskManager().getClassLoader(); try { List candidates = new ArrayList<>(); this.mAddScopeLock.writeLock().lock(); try { this.addXMLObjectScope0( szPacketName, candidates, classLoader ); } finally { this.mAddScopeLock.writeLock().unlock(); } } catch ( IOException e ) { throw new ProxyProvokeHandleException( e ); } } @Override public void addXMLObjectScopeNoneSync( String szPacketName ) { ClassLoader classLoader = this.getRDBManager().getSystem().getTaskManager().getClassLoader(); try { List candidates = new ArrayList<>(); this.addXMLObjectScope0( szPacketName, candidates, classLoader ); } catch ( IOException e ) { throw new ProxyProvokeHandleException( e ); } } @Override public void close() throws ProxyProvokeHandleException { if ( this.mDataSource != null ) { if ( this.mDataSource instanceof PooledDataSource ) { ((PooledDataSource) this.mDataSource).forceCloseAll(); } } } @Override public boolean isTerminated() throws ProxyProvokeHandleException { try { if ( this.mDataSource != null ) { if ( this.mDataSource instanceof PooledDataSource ) { return ((PooledDataSource) this.mDataSource).getPoolState().getActiveConnectionCount() == 0; } return this.mDataSource.getConnection().isClosed(); } return true; } catch ( SQLException e ) { throw new ProxyProvokeHandleException( e ); } } } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/ally/rdb/RDBManager.java ================================================ package com.pinecone.tritium.ally.rdb; import com.pinecone.framework.system.ProvokeHandleException; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.unit.LinkedTreeMap; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.JSONGet; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.system.ArchSystemAutoAssembleComponent; import com.pinecone.hydra.system.HyComponent; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.slime.source.rdb.RDBClient; import com.pinecone.tritium.system.InterWareDirector; import com.pinecone.tritium.system.TritiumSystem; import java.util.Collection; import java.util.Map; public class RDBManager extends ArchSystemAutoAssembleComponent implements Pinenut, HyComponent { @JSONGet( "RDBs" ) protected JSONObject mjoRDBsConf ; @JSONGet( "RDBs.Configs" ) protected JSONObject mjoComponentConf ; @JSONGet( "RDBs.Databases" ) protected JSONObject mjoDatabases ; @JSONGet( "RDBs.Configs.JDBC.Driver" ) protected String mszJDBCDriverName ; @JSONGet( "RDBs.Configs.Ibatis.Client" ) protected String mszIBatisClient ; @JSONGet( "RDBs.Configs.Enable" ) protected boolean mbEnable ; protected Map mRDBClientComponent ; public RDBManager( Namespace name, HyComponent parent ) { super( name, parent.getSystem(), parent.getSystem().getComponentManager(), parent ); Hydrogen system = parent.getSystem(); InterWareDirector parentManager = (InterWareDirector) parent; this.getSystem().getPrimaryConfigScope().autoInject( RDBManager.class, parentManager.getMiddlewareConfig() , this ); this.mRDBClientComponent = new LinkedTreeMap<>(); this.prepareInstanceClient(); this.infoLifecycleInitializationDone(); } public RDBManager( HyComponent parent ) { this( null, parent ); } protected void prepareInstanceClient() { for( Object o : this.mjoDatabases.entrySet() ) { Map.Entry kv = (Map.Entry) o; JSONObject val = (JSONObject) kv.getValue(); this.mObjectOverrider.override( val, this.mjoComponentConf, false ); try{ String szEngine = val.optString( "Engine" ); String szInsNam = (String) kv.getKey(); boolean bEnable = val.optBoolean( "Enable" ); if( bEnable ) { Object client = this.mUniformFactory.loadInstance( szEngine, null, new Object[] { this, szInsNam } ); if( client instanceof RDBClient ){ this.mRDBClientComponent.put( szInsNam, (RDBClient)client ); } else { throw new IllegalArgumentException( "Illegal client engine, should be `RDBClient`: " + szEngine ); } } } catch ( Exception e ) { throw new ProvokeHandleException( e ); } } //Debug.fmt( 2, this.mjoDatabases ); } @Override public TritiumSystem getSystem() { return (TritiumSystem) super.getSystem(); } public String getJDBCDriverName() { return this.mszJDBCDriverName; } public JSONObject getComponentConf() { return this.mjoComponentConf; } @Override public DynamicFactory getSharedUniformFactory() { return this.mUniformFactory; } public boolean isEnable() { return this.mbEnable; } public JSONObject getDatabases() { return this.mjoDatabases; } @SuppressWarnings( "unchecked" ) public Collection databasesNames() { return (Collection)this.getDatabases().values(); } public RDBClient getRDBClientByName ( String szName ) { return this.mRDBClientComponent.get( szName ); } public RDBClient terminate( String szName ) { RDBClient client = this.getRDBClientByName( szName ); if( client != null ) { client.close(); if( client.isTerminated() ) { this.mRDBClientComponent.remove( szName ); } else { return null; } } return client; } public int clientSize() { return this.mRDBClientComponent.size(); } } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/ally/rdb/UniformRDBClient.java ================================================ package com.pinecone.tritium.ally.rdb; import com.pinecone.slime.source.rdb.RDBClient; public interface UniformRDBClient extends RDBClient { RDBManager getRDBManager(); } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/messagron/HeistMessage.java ================================================ package com.pinecone.tritium.messagron; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.umct.ArchMessagram; import com.pinecone.hydra.umct.UMCConnection; public class HeistMessage extends Messageletson { public HeistMessage(UMCConnection msgPackage, ArchMessagram servtron ) { super( msgPackage, servtron ); } @Override public void dispatch() { Debug.trace( this.$_MSG() ); } @Override public void terminate(){ } } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/messagron/Messageletson.java ================================================ package com.pinecone.tritium.messagron; import com.pinecone.hydra.umct.ArchMessagram; import com.pinecone.hydra.umct.UMCConnection; import com.pinecone.hydra.umct.JSONLetMsgDeliver; import com.pinecone.hydra.umct.ArchMessagelet; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.hydra.umc.msg.UMCMessage; import java.util.Map; public abstract class Messageletson extends ArchMessagelet { protected JSONObject mLetLocal = new JSONMaptron(); public Messageletson(UMCConnection msgPackage, ArchMessagram servtron ) { super( msgPackage, servtron ); this.mUMCReceiver = this.getMessagePackage().getReceiver(); this.mUMCTransmit = this.getMessagePackage().getTransmit(); } // PHP Style @Override protected Map $_MSG() { return this.getReceivedMessage().getHead().evalMapExtraHead(); } @Override public UMCMessage getReceivedMessage() { return this.getMessagePackage().getMessage(); } @Override public JSONLetMsgDeliver getMessageDeliver() { return (JSONLetMsgDeliver)super.getMessageDeliver(); } @Override public JSONObject getLetLocal() { return this.mLetLocal; } @Override public String toJSONString() { return this.getLetLocal().toJSONString(); } } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/messagron/Messagron.java ================================================ package com.pinecone.tritium.messagron; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.umct.IntegratedMessagram; import java.util.Map; public class Messagron extends IntegratedMessagram { public Messagron( String szName, Processum parent, Map config ) { super( szName, parent, config ); } @Override public String getLetsNamespace() { return this.getClass().getPackageName() + "."; } } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/BasicServer.java ================================================ package com.pinecone.tritium.system; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.server.ArchServer; public class BasicServer extends ArchServer { protected ServersScope serversScope; BasicServer( ServersScope scope, JSONObject prototype ) { this.serversScope = scope; this.extras = prototype; this.serversScope.getServerInjector().typeInject( this.getExtras(), this ); } } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/ConfigScope.java ================================================ package com.pinecone.tritium.system; import java.util.Map; import com.pinecone.framework.system.homotype.StereotypicInjector; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.unit.MultiScopeMap; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.hydra.config.MapConfigReinterpreter; import com.pinecone.hydra.system.HyComponent; public interface ConfigScope extends Pinenut, HyComponent { String KeyGlobal = "Global" + TritiumConfigScope.class.getSimpleName(); JSONConfig getProtoConfig(); MultiScopeMap getScopeMap(); MapConfigReinterpreter getMapConfigReinterpreter(); MapConfigReinterpreter newMapConfigReinterpreter() ; StereotypicInjector autoInject(Class stereotype, Object config, Object instance ) ; StereotypicInjector autoInject(Class stereotype, Map config, Object instance ) ; StereotypicInjector autoConstruct( Class stereotype, Object config, Object instance ) ; StereotypicInjector autoConstruct( Class stereotype, Map config, Object instance ) ; } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/Hierarchy.java ================================================ package com.pinecone.tritium.system; import com.pinecone.hydra.system.HyHierarchy; public enum Hierarchy implements HyHierarchy { H_MASTER ( "Master" ), H_PALADIN ( "Paladin" ), H_MINION ( "Minion" ), H_SLAVE ( "Slave" ); private final String value; Hierarchy( String value ){ this.value = value; } @Override public String getName(){ return this.value; } public static String queryName( Hierarchy hierarchy ) { return hierarchy.getName(); } public static Hierarchy queryHierarchy( String sz ) { return Hierarchy.valueOf( "H_" + sz.toUpperCase() ); } @Override public boolean isDominantClass() { return this == Hierarchy.H_MASTER || this == Hierarchy.H_PALADIN; } @Override public boolean isWorkerClass() { return this == Hierarchy.H_MINION || this == Hierarchy.H_SLAVE; } } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/InterWareDirector.java ================================================ package com.pinecone.tritium.system; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.hydra.system.HyComponent; import com.pinecone.hydra.ware.MiddlewareDirector; import com.pinecone.tritium.ally.rdb.RDBManager; import com.pinecone.tritium.ally.messengers.MessagersManager; public interface InterWareDirector extends MiddlewareDirector, HyComponent { @Override TritiumSystem getSystem(); JSONConfig getMiddlewareConfig(); @Override default JSONConfig getSectionConfig() { return this.getMiddlewareConfig(); } RDBManager getRDBManager(); // OLTP-RDB MessagersManager getMessagersManager(); } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/KnittedMiddlewareDirector.java ================================================ package com.pinecone.tritium.system; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.system.ArchSystemAutoAssembleComponent; import com.pinecone.hydra.system.HyComponent; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.ware.WareManager; import com.pinecone.tritium.ally.messengers.MessagersManager; import com.pinecone.tritium.ally.rdb.RDBManager; public class KnittedMiddlewareDirector extends ArchSystemAutoAssembleComponent implements InterWareDirector { protected JSONConfig mjoMiddlewareConf ; protected RDBManager mRDBManager; protected MessagersManager mMessagersManager; public KnittedMiddlewareDirector(Namespace name, Hydrogen system, HyComponent parent ) { super( name, system, system.getComponentManager(), parent ); this.mjoMiddlewareConf = (JSONConfig) system.getSystemConfig().getChild( "Middleware" ); //this.getSystem().getPrimaryConfigScope().autoInject( MiddlewareManager.class, this.mjoMiddlewareConf, this ); this.mRDBManager = new RDBManager( this ); this.mMessagersManager = new MessagersManager( this ); this.addChildComponent( this.mRDBManager ); this.addChildComponent( this.mMessagersManager ); this.infoLifecycleInitializationDone(); } public KnittedMiddlewareDirector(Hydrogen system, HyComponent parent ) { this( null, system, parent ); } public KnittedMiddlewareDirector( Hydrogen system ) { this( system, null ); } @Override public TritiumSystem getSystem() { return (TritiumSystem) super.getSystem(); } @Override public JSONConfig getMiddlewareConfig() { return this.mjoMiddlewareConf; } @Override public WareManager getManager( String name ) { return null; } @Override public RDBManager getRDBManager() { return this.mRDBManager; } @Override public MessagersManager getMessagersManager() { return this.mMessagersManager; } } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/MissionTerminateException.java ================================================ package com.pinecone.tritium.system; public class MissionTerminateException extends RuntimeException { public MissionTerminateException() { super(); } public MissionTerminateException( String message ) { super( message ); } public MissionTerminateException( String message, Throwable cause ) { super( message, cause ); } @Override public String toString() { return "[object MissionTerminateException]"; } public String prototypeName() { return "MissionTerminateException"; } } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/NomenclatureAllocator.java ================================================ package com.pinecone.tritium.system; import com.pinecone.framework.system.prototype.Pinenut; public interface NomenclatureAllocator extends Pinenut { } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/ServersScope.java ================================================ package com.pinecone.tritium.system; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.system.HyComponent; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.DirectObjectInjector; import com.pinecone.hydra.server.ArchServer; import com.pinecone.hydra.server.ArchServersCenter; import com.pinecone.hydra.server.Server; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.tritium.util.ConfigHelper; import java.io.IOException; import java.util.Map; public class ServersScope extends ArchServersCenter implements Pinenut { protected DirectObjectInjector mServerInjector; public ServersScope(Namespace name, Hydrogen system, HyComponent parent ) { super( name, system, parent ); this.mServerInjector = new DirectObjectInjector( ConfigHelper.fnToSmallHumpName, ArchServer.class ) ; this.fetchAll(); this.reinterpret(); } public ServersScope(Hydrogen system, HyComponent parent ) { this( null, system, parent ); } public ServersScope( Hydrogen system ) { this( system, null ); } @Override public TritiumSystem getSystem() { return (TritiumSystem) super.getSystem(); } @Override protected void loadConfig() { JSONConfig sys = (JSONConfig) this.getSystem().getSystemConfig(); Object jServers = sys.opt( "Servers" ); if( jServers instanceof String ) { try { this.serversConfig = sys.fromFile( this.getSystem().getWorkingPath().resolve( (String) jServers ).toFile() ); } catch ( IOException e ) { this.getSystem().handleKillException( e ); } } else { this.serversConfig = (JSONObject) jServers; } sys.put( "Servers", this.serversConfig ); } protected void reinterpret() { for ( Map.Entry kv: this.getNickNameMap().entrySet() ) { this.getSystem().getGlobalConfigScope().put( kv.getKey(), ( (BasicServer)kv.getValue() ).getLocalDomain() ); } } @Override protected Server newServer( JSONObject prototype ) { return new BasicServer( this, prototype ); } DirectObjectInjector getServerInjector() { return this.mServerInjector; } } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/StorageSystem.java ================================================ package com.pinecone.tritium.system; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.json.homotype.JSONGet; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.framework.util.lang.GenericDynamicFactory; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.system.ArchSystemCascadeComponent; import com.pinecone.hydra.system.HyComponent; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.template.TemplateParser; import com.pinecone.hydra.system.Hydrogen; import org.apache.commons.vfs2.CacheStrategy; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.FilesCache; import org.apache.commons.vfs2.impl.StandardFileSystemManager; import org.apache.commons.vfs2.provider.FileProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Map; public class StorageSystem extends ArchSystemCascadeComponent implements Pinenut, HyComponent { protected JSONObject mjoProtoConfig; @JSONGet( "PathScope.Reinterpret" ) protected JSONObject mjoToReinterpret; protected JSONObject mjoReinterpretedScope; @JSONGet( "Protocols" ) protected JSONObject mProtocols; protected DynamicFactory mFSProvidesFactory; @JSONGet( "CacheStrategy" ) protected String mszCacheStrategy; @JSONGet( "FilesCache" ) protected String mszFilesCache; private final StandardFileSystemManager mFileSystemManager; protected Logger mLogger; public StorageSystem(Namespace name, Hydrogen system, HyComponent parent ) { super( name, system, system.getComponentManager(), parent ); this.mFileSystemManager = new StandardFileSystemManager(); if( system instanceof TritiumSystem) { this.mLogger = ((TritiumSystem) system).getTracerScope().newLogger( this.className() ); } else { this.mLogger = LoggerFactory.getLogger( this.className() + "Logger" ); } this.loadConfig(); this.reinterpret(); this.prepareFileSystem(); } public StorageSystem(Hydrogen system, HyComponent parent ) { this( null, system, parent ); } public StorageSystem( Hydrogen system ) { this( system, null ); } protected void loadConfig() { JSONConfig sys = (JSONConfig) this.getSystem().getSystemConfig(); Object jPathScope = sys.opt( this.className() ); if( jPathScope instanceof String ) { try { this.mjoProtoConfig = sys.fromFile( this.getSystem().getWorkingPath().resolve( (String) jPathScope ).toFile() ); } catch ( IOException e ) { this.getSystem().handleKillException( e ); } } else { this.mjoProtoConfig = (JSONObject) jPathScope; } sys.put( this.className(), this.mjoProtoConfig ); this.getSystem().getPrimaryConfigScope().autoInject( StorageSystem.class, this.mjoProtoConfig, this ); } protected void reinterpret() { this.mjoReinterpretedScope = (JSONObject) this.getSystem().getGlobalConfigScope().thisScope(); for ( Map.Entry kv: this.mjoToReinterpret.entrySet() ) { if( this.mjoReinterpretedScope.hasOwnProperty( kv.getKey() ) ) { throw new IllegalArgumentException( "Illegal system config, duplicated config key." ); } if( kv.getValue() instanceof String ) { String szRaw = (String) kv.getValue(); TemplateParser parser = new TemplateParser( szRaw, this.mjoReinterpretedScope ); this.mjoReinterpretedScope.put( kv.getKey(), parser.eval() ); } else { throw new IllegalArgumentException( "Illegal system config, reinterpret key can not be object." ); } } } protected void prepareFileSystemProvides() { for( Object o : this.mProtocols.entrySet() ) { Map.Entry kv = (Map.Entry) o; JSONObject info = (JSONObject) kv.getValue(); String szProvide = info.optString( "Provide" ); boolean bDone = true; if( !StringUtils.isEmpty( szProvide ) ) { // Empty for defaults, e.g. `file:///` Object provide = this.mFSProvidesFactory.optLoadInstance( szProvide, null, null ); if( provide instanceof FileProvider ) { try{ this.mFileSystemManager.addProvider( kv.getKey().toString(), (FileProvider)provide ); } catch ( FileSystemException e ) { this.mLogger.warn( "[AddFileSystemProviderCompromised] [FileSystemException] " + e.getMessage() + ">" ); bDone = false; } } else { this.mLogger.warn( "[BadAddFileSystemProvider] [Illegal provider or null] <" + kv.getKey() + "::`" + szProvide + "`>" ); bDone = false; } } if( bDone ) { szProvide = StringUtils.isEmpty( szProvide ) ? "Default" : szProvide; this.mLogger.info( "[AddFileSystemProvider] (" + kv.getKey() + "::`" + szProvide + "`) " ); } } } protected void prepareFileSystemCache() { CacheStrategy strategy = StringUtils.isEmpty( this.mszCacheStrategy ) ? CacheStrategy.ON_CALL : CacheStrategy.valueOf( this.mszCacheStrategy ); try{ this.mFileSystemManager.setCacheStrategy( strategy ); } catch ( FileSystemException e ) { this.mLogger.warn( "[SetCacheStrategy] [Compromised] " + e.getMessage() + ">" ); } if( !StringUtils.isEmpty( this.mszFilesCache ) ) { Object cache = this.mFSProvidesFactory.optLoadInstance( this.mszFilesCache, null, null ); if( cache instanceof FilesCache ) { try{ this.mFileSystemManager.setFilesCache( (FilesCache) cache ); } catch ( FileSystemException e ) { this.mLogger.warn( "[SetFilesCacheCompromised] [FileSystemException] " + e.getMessage() + ">" ); } } else { this.mLogger.warn( "[SetFilesCacheCompromised] [Illegal FilesCache or null] <`" + this.mszFilesCache + "`>" ); } } } protected void prepareFileSystem() { this.mFSProvidesFactory = new GenericDynamicFactory ( this.getSystem().getTaskManager().getClassLoader() ); this.prepareFileSystemProvides(); this.prepareFileSystemCache(); } public JSONObject getProtoConfig() { return this.mjoProtoConfig; } public JSONObject getReinterpretedScope() { return this.mjoReinterpretedScope; } public JSONObject getToReinterpret() { return this.mjoToReinterpret; } @Override public TritiumSystem getSystem() { return (TritiumSystem) super.getSystem(); } public StandardFileSystemManager getFileSystemManager() { return this.mFileSystemManager; } } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/SystemDaemon.java ================================================ package com.pinecone.tritium.system; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.auto.ArchInstructation; import com.pinecone.hydra.auto.PeriodicAutomaton; import com.pinecone.hydra.auto.PeriodicAutomatron; import com.pinecone.hydra.system.ArchSystemCascadeComponent; import com.pinecone.hydra.system.HyComponent; import com.pinecone.hydra.system.Hydrogen; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; public class SystemDaemon extends ArchSystemCascadeComponent implements Pinenut, HyComponent { protected long mnSurveillanceTime; protected JSONObject mDaemonConfig; protected PeriodicAutomatron mAutomatron; protected Logger mLogger; public SystemDaemon(Namespace name, Hydrogen system, HyComponent parent ) { super( name, system, system.getComponentManager(), parent ); this.loadConfig(); this.mLogger = LoggerFactory.getLogger( String.format( "Tracer<%s>", this.className() ) ); this.mAutomatron = new PeriodicAutomaton( this.className(), system, this.mnSurveillanceTime, true ); this.mAutomatron.command( new ArchInstructation() { boolean mbStarted = false; @Override public void execute() throws Exception { if( !this.mbStarted ) { SystemDaemon.this.infoLifecycle( "DaemonStarted", "Start" ); this.mbStarted = true; SystemDaemon.this.mAutomatron.withdraw( this ); } } }); this.getSystem().getTaskManager().add( this.mAutomatron ); this.mAutomatron.start(); } public SystemDaemon(Hydrogen system, HyComponent parent ) { this( null, system, parent ); } public SystemDaemon( Hydrogen system ) { this( system, null ); } protected void loadConfig() { JSONConfig sys = (JSONConfig) this.getSystem().getSystemConfig(); Object jDaemon = sys.opt( "SystemDaemon" ); if( jDaemon instanceof String ) { try { this.mDaemonConfig = sys.fromFile( this.getSystem().getWorkingPath().resolve( (String) jDaemon ).toFile() ); } catch ( IOException e ) { this.getSystem().handleKillException( e ); } } else { this.mDaemonConfig = (JSONObject) jDaemon; } this.mnSurveillanceTime = this.mDaemonConfig.optLong( "SurveillanceTime" ); } public PeriodicAutomatron getAutomatron() { return this.mAutomatron; } protected SystemDaemon infoLifecycle( String szWhat, String szStateOrExtra ) { this.mLogger.info( "[Lifecycle] [{}] <{}>", szWhat, szStateOrExtra ); return this; } } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/TritiumConfigScope.java ================================================ package com.pinecone.tritium.system; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.system.construction.UnifyStructureInjector; import com.pinecone.framework.system.homotype.StereotypicInjector; import com.pinecone.framework.unit.MultiScopeMap; import com.pinecone.framework.unit.MultiScopeMaptron; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.homotype.AnnotatedObjectInjector; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.config.MapConfigReinterpreter; import com.pinecone.hydra.config.ScopedMapConfigReinterpreter; import com.pinecone.hydra.system.ArchSystemCascadeComponent; import com.pinecone.hydra.system.HyComponent; import com.pinecone.hydra.system.Hydrogen; import java.util.Map; public class TritiumConfigScope extends ArchSystemCascadeComponent implements ConfigScope { protected JSONConfig mProtoConfig; protected MultiScopeMap mConfigScope; protected MapConfigReinterpreter mConfigReinterpreter; public TritiumConfigScope(Namespace name, Hydrogen system, HyComponent parent, JSONConfig config ) { super( name, system, system.getComponentManager(), parent ); this.mConfigScope = new MultiScopeMaptron<>( new JSONMaptron() ); this.mProtoConfig = config; this.reinterpret_conf_default(); this.mConfigReinterpreter = new ScopedMapConfigReinterpreter( this.getScopeMap() ); } public TritiumConfigScope(Hydrogen system, HyComponent parent, JSONConfig config ) { this( (Namespace) null, system, parent, config ); } public TritiumConfigScope(Hydrogen system, JSONConfig config ) { this( system, null,config ); } public TritiumConfigScope(String name, Hydrogen system, HyComponent parent, JSONConfig config ) { this( system, parent, config ); this.setTargetingName( name ); } public TritiumConfigScope(String name, Hydrogen system, JSONConfig config ) { this( name, system, null, config ); } protected void reinterpret_conf_default() { this.mConfigScope.setName( "GlobalConfigScope" ); for ( Map.Entry kv: this.getProtoConfig().entrySet() ) { this.mConfigScope.put( kv.getKey(), kv.getValue() ); } } @Override public JSONConfig getProtoConfig() { return this.mProtoConfig; } @Override public MultiScopeMap getScopeMap() { return this.mConfigScope; } @Override public MapConfigReinterpreter getMapConfigReinterpreter() { return this.mConfigReinterpreter; } @Override public MapConfigReinterpreter newMapConfigReinterpreter() { return new ScopedMapConfigReinterpreter( this.getScopeMap() ); } @Override public StereotypicInjector autoInject( Class stereotype, Object config, Object instance ) { AnnotatedObjectInjector injector = new AnnotatedObjectInjector( stereotype ); try{ injector.inject( config, instance ); return injector; } catch ( Exception e ){ throw new ProxyProvokeHandleException( e ); } } @Override public StereotypicInjector autoInject( Class stereotype, Map config, Object instance ) { AnnotatedObjectInjector injector = new AnnotatedObjectInjector( stereotype ); try{ injector.inject( config, instance ); } catch ( Exception e ){ throw new ProxyProvokeHandleException( e ); } return injector; } @Override public StereotypicInjector autoConstruct( Class stereotype, Object config, Object instance ) { UnifyStructureInjector injector = new UnifyStructureInjector( stereotype, ( (TritiumSystem)this.getSystem()).getDispenserCenter().getInstanceDispenser() ); try{ injector.inject( config, instance ); return injector; } catch ( Exception e ){ throw new ProxyProvokeHandleException( e ); } } @Override public StereotypicInjector autoConstruct( Class stereotype, Map config, Object instance ) { UnifyStructureInjector injector = new UnifyStructureInjector( stereotype, ( (TritiumSystem)this.getSystem()).getDispenserCenter().getInstanceDispenser() ); try{ injector.inject( config, instance ); } catch ( Exception e ){ throw new ProxyProvokeHandleException( e ); } return injector; } } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/TritiumSystem.java ================================================ package com.pinecone.tritium.system; import com.pinecone.framework.system.PrimarySystem; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.hydra.system.component.ResourceDispenserCenter; import com.pinecone.hydra.system.subsystem.KernelMicroSystemCabinet; import com.pinecone.hydra.system.types.HydraKingdom; public interface TritiumSystem extends HydraKingdom, PrimarySystem { InterWareDirector getMiddlewareDirector(); SystemDaemon getSystemDaemon(); ServersScope getServersScope() ; StorageSystem getStorageSystem() ; ConfigScope getPrimaryConfigScope() ; ResourceDispenserCenter getDispenserCenter(); KernelMicroSystemCabinet getKernelMicroSystemCabinet(); DynamicFactory getShardDynamicFactory(); } ================================================ FILE: Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/util/ConfigHelper.java ================================================ package com.pinecone.tritium.util; import com.pinecone.framework.system.functions.Function; public class ConfigHelper { public static final Function fnToSmallHumpName = (Object... arg )->{ return ConfigHelper.toSmallHumpName( arg[0] ); }; public static final Function fnToBigHumpName = (Object... arg )->{ return ConfigHelper.toBigHumpName( arg[0] ); }; public static String toSmallHumpName( String sz ) { StringBuilder sb = new StringBuilder(); sb.append( sz ); sb.setCharAt( 0, Character.toLowerCase( sb.charAt(0) ) ); return sb.toString(); } public static String toBigHumpName( String sz ) { StringBuilder sb = new StringBuilder(); sb.append( sz ); sb.setCharAt( 0, Character.toUpperCase( sb.charAt(0) ) ); return sb.toString(); } public static String toSmallHumpName( Object sz ) { return ConfigHelper.toSmallHumpName( (String) sz ); } public static String toBigHumpName( Object sz ) { return ConfigHelper.toBigHumpName( (String) sz ); } } ================================================ FILE: Hydra/hydra-system-tritium/src/main/resources/logback.xml ================================================ %d{HH:mm:ss.SSS} [%thread] [%level] %logger{36}: %msg%n ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/major/TestTritium.java ================================================ package com.major; import com.pinecone.Pinecone; import com.pinecone.tritium.Tritium; public class TestTritium { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ Tritium tritium = (Tritium) Pinecone.sys().getTaskManager().add( new Tritium( args, Pinecone.sys() ) ); tritium.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/mc/JesusChrist.java ================================================ package com.mc; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.tritium.Tritium; public class JesusChrist extends Tritium { public JesusChrist( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public JesusChrist( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } public void vitalize () throws Exception { } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/mc/TestMCClient.java ================================================ package com.mc; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import io.netty.channel.ChannelHandlerContext; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umct.WolfMCExpress; import com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter; import com.pinecone.hydra.umc.wolf.UlfInformMessage; import com.pinecone.hydra.umc.wolf.client.WolfMCClient; import com.pinecone.tritium.messagron.Messagron; class Jesus extends JesusChrist { public Jesus( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Jesus( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { this.testClient(); } public void testClient() throws Exception { Messagron servtron = new Messagron( "", this, new JSONMaptron( "{\n" + " \"Engine\" : \"com.pinecone.tritium.messagron.Messagron\",\n" + " \"Enable\" : true,\n" + " \"ExpressFactory\" : \"com.pinecone.framework.util.lang.GenericDynamicFactory\",\n" + "\n" + " \"Expresses\" : {\n" + " \"WolfMCExpress\": {\n" + " \"Engine\": \"com.pinecone.hydra.umct.WolfMCExpress\"\n" + " }\n" + " }\n" + "}" ) ); WolfMCClient wolf = new WolfMCClient( "", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( "Messagers.Messagers.WolfMCKingpin" ) ); wolf.apply( new WolfMCExpress( servtron ) ).execute(); JSONObject jsonObject = new JSONMaptron( "{Messagelet:'ServiceCenter', 'do': 'queryHeistConfTPL', 'heist': 'NeteaseMusic', 'instance': 'RavageAlbums'}" ); // Debug.trace( wolf.sendSyncMsg( new UlfBytesTransferMessage( new JSONMaptron( "{Messagelet:'ServiceCenter', 'do': '1'}" ), "test 12345678 Messagers.Messagers.WolfMCKingpin 1 fuck me" ) ) ); // Debug.trace( wolf.sendSyncMsg( new UlfBytesTransferMessage( new JSONMaptron( "{Messagelet:'ServiceCenter', 'do': '2'}" ), "test 12345678 Messagers.Messagers.WolfMCKingpin 2 fuck me" ) ) ); // Debug.trace( wolf.sendSyncMsg( new UlfBytesTransferMessage( new JSONMaptron( "{Messagelet:'ServiceCenter', 'do': '3'}" ), "test 12345678 Messagers.Messagers.WolfMCKingpin 3 fuck me" ) ) ); // Debug.trace( wolf.sendSyncMsg( new UlfBytesTransferMessage( new JSONMaptron( "{Messagelet:'ServiceCenter', 'do': '4'}" ), "test 12345678 Messagers.Messagers.WolfMCKingpin 4 fuck me" ) ) ); // Debug.trace( wolf.sendSyncMsg( new UlfBytesTransferMessage( new JSONMaptron( "{Messagelet:'ServiceCenter', 'do': '5'}" ), "test 12345678 Messagers.Messagers.WolfMCKingpin 5 fuck me" ) ) ); // Debug.trace( wolf.sendSyncMsg( new UlfBytesTransferMessage( new JSONMaptron( "{Messagelet:'ServiceCenter', 'do': '6'}" ), "test 12345678 Messagers.Messagers.WolfMCKingpin 6 fuck he" ) ) ); // Debug.trace( wolf.sendSyncMsg( new UlfBytesTransferMessage( new JSONMaptron( "{Messagelet:'ServiceCenter', 'do': '7'}" ), "test 12345678 Messagers.Messagers.WolfMCKingpin 7 fuck she" ) ) ); // Debug.trace( wolf.sendSyncMsg( new UlfBytesTransferMessage( new JSONMaptron( "{Messagelet:'ServiceCenter', 'do': '8'}" ), "test 12345678 Messagers.Messagers.WolfMCKingpin 8 fuck it" ) ) ); // Debug.trace( wolf.sendSyncMsg( new UlfBytesTransferMessage( new JSONMaptron( "{Messagelet:'ServiceCenter', 'do': '9'}" ), "test 12345678 Messagers.Messagers.WolfMCKingpin 9 fuck those" ) ) ); // JSONObject jo = new JSONMaptron( "{'do': 'Morning' }" ); // try ( ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); ObjectOutputStream objectStream = new ObjectOutputStream(byteStream) ) { // objectStream.writeObject( jo ); // byte[] bytes = byteStream.toByteArray(); // Debug.trace( wolf.sendSyncMsg( new UlfInformMessage( bytes, 0xAEF2048 ) ) ); // } Debug.trace( wolf.sendSyncMsg( new UlfInformMessage( jsonObject ) ).getHead().getExtraHead() ); Debug.trace( wolf.sendSyncMsg( new UlfInformMessage( jsonObject ) ) ); //wolf.sendAsynMsg( new UlfInformMessage( jsonObject ) ); wolf.sendAsynMsg( new UlfInformMessage(jsonObject), new UlfAsyncMsgHandleAdapter() { @Override public void onSuccessfulMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception { Debug.trace( "Ajax" ,msg ); Debug.trace( msg.getHead() ); } }); //wolf.sendAsynMsg( new UlfMCMessage( jsonObject ) ); //wolf.sendAsynMsg( new UlfMCMessage( jsonObject ) ); wolf.sendAsynMsg( new UlfInformMessage(jsonObject), new UlfAsyncMsgHandleAdapter() { @Override public void onSuccessfulMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception { Debug.trace( "fuck javascript" ,msg ); } }); this.getTaskManager().add( wolf ); this.getTaskManager().syncWaitingTerminated(); } } public class TestMCClient { public static void main( String[] args ) throws Exception { //String[] as = args; String[] as = new String[]{ "TestWolfMCClient=true" }; Pinecone.init( (Object...cfg )->{ Jesus jesus = (Jesus) Pinecone.sys().getTaskManager().add( new Jesus( as, Pinecone.sys() ) ); jesus.vitalize(); return 0; }, (Object[]) as ); } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/mc/TestMCServer.java ================================================ package com.mc; import com.pinecone.hydra.umc.wolf.UlfStreamTransferMessage; import com.pinecone.hydra.umct.WolfMCExpress; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter; import com.pinecone.hydra.umc.wolf.server.WolfMCServer; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.tritium.messagron.Messagron; import io.netty.channel.ChannelHandlerContext; class Christ extends JesusChrist { public Christ( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Christ( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { //this.testServer(); // this.testSystemServer(); this.testServerCos(); } public void testServer() throws Exception { Messagron messagron = new Messagron( "", this, new JSONMaptron() ); WolfMCServer wolf = new WolfMCServer( "", this, new JSONMaptron("{host: \"0.0.0.0\",\n" + "port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}") ); WolfMCExpress express = new WolfMCExpress( messagron ); wolf.apply( express ); wolf.execute(); this.getTaskManager().add( wolf ); this.getTaskManager().syncWaitingTerminated(); } public void testSystemServer() throws Exception { // WolfMCServer wolf = (WolfMCServer)this.getMiddlewareDirector().getMessagersManager().getMessageNodeByName( "WolfKing" ); // wolf.execute(); // // this.getTaskManager().add( wolf ); this.getTaskManager().syncWaitingTerminated(); } public void testServerCos() throws Exception { Messagron messagron = new Messagron( "", this, new JSONMaptron() ); WolfMCServer wolf = new WolfMCServer( "", this, new JSONMaptron("{host: \"0.0.0.0\",\n" + "port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}") ); wolf.apply( new UlfAsyncMsgHandleAdapter() { public void onSuccessfulMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception { Debug.redf( rawMsg, new String( ( (UlfStreamTransferMessage) rawMsg).getBody().readAllBytes() ) ); // UlfStreamTransferMessage mc = (UlfStreamTransferMessage) rawMsg; // Map jo = mc.getHead().getMapExtraHead(); // String dos = jo.get( "do" ).toString(); // if( dos.equals( "queryHeistConfTPL" ) ) { // Debug.trace( "hahahaha" ); // } // if( dos.equals( "xixi" ) ) { // Debug.trace( "xixi" ); // } } }); wolf.execute(); this.getTaskManager().add( wolf ); this.getTaskManager().syncWaitingTerminated(); } } public class TestMCServer { public static void main( String[] args ) throws Exception { //String szJson = FileUtils.readAll("J:/120KWordsPhonetics.json5"); Pinecone.init( (Object...cfg )->{ Christ christ = (Christ) Pinecone.sys().getTaskManager().add( new Christ( args, Pinecone.sys() ) ); christ.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/Bear.java ================================================ package com.protobuf; import java.util.List; import com.pinecone.framework.util.json.homotype.StructJSONEncoder; public class Bear { private String name; private int force; private List values; private String type; public String getName() { return name; } public void setName(String name) { this.name = name; } public int getForce() { return force; } public void setForce(int force) { this.force = force; } public List getValues() { return values; } public void setValues(List values) { this.values = values; } public String getType() { return type; } public void setType(String type) { this.type = type; } public String toJSONString() { return StructJSONEncoder.BasicEncoder.encode( this, true ); } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/Beaver.java ================================================ package com.protobuf; import com.pinecone.hydra.umct.stereotype.Iface; public interface Beaver { @Iface String cutting( String target ); } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/Monkey.java ================================================ package com.protobuf; import com.pinecone.framework.util.json.homotype.GenericBeanJSONEncoder; public class Monkey { public String name; public String getName() { return this.name; } public void setName(String name) { this.name = name; } public String toJSONString() { return GenericBeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/Parasite.java ================================================ package com.protobuf; import com.pinecone.framework.util.json.homotype.DirectJSONInjector; public class Parasite { public String name ; public long length; public int emnus; public Parasite() { } public String getName() { return this.name; } public long getLength() { return this.length; } public void setName( String name ) { this.name = name; } public void setLength( long length ) { this.length = length; } public String toJSONString() { return DirectJSONInjector.instance().inject( this ).toString(); } public String toString(){ return DirectJSONInjector.instance().inject( this ).toString(); } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/Rabbit.java ================================================ package com.protobuf; import com.pinecone.framework.util.json.homotype.GenericBeanJSONEncoder; public class Rabbit { public String name; public byte[] bytes; public Monkey monkey; public Monkey[] monkeys; public Rabbit sub; public boolean bool; public boolean isBool() { return this.bool; } public void setBool( boolean bool ) { this.bool = bool; } public Rabbit getSub() { return this.sub; } public void setSub( Rabbit sub ) { this.sub = sub; } public Monkey getMonkey() { return this.monkey; } public Monkey[] getMonkeys() { return this.monkeys; } public void setMonkey( Monkey monkey ) { this.monkey = monkey; } public void setMonkeys( Monkey[] monkeys ) { this.monkeys = monkeys; } public byte[] getBytes() { return this.bytes; } public void setBytes(byte[] bytes) { this.bytes = bytes; } public String getName() { return this.name; } public void setName(String name) { this.name = name; } public String toJSONString() { return GenericBeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/Raccoon.java ================================================ package com.protobuf; import java.util.List; import com.pinecone.hydra.umct.stereotype.Iface; //@Iface public interface Raccoon { @Iface String scratch( String target, int time ); @Iface // default String scratchA( String target, int time, byte[] bs ) { // return null; // } default String scratchA( String target, int time, Rabbit rabbit ) { return null; } @Iface default void scratchV( String target, int time ) { } @Iface default Rabbit[] scratchC( String target, int time, Rabbit[] more ) { return more; } @Iface default String[] scratchS( String target, int time, String[] more ) { return more; } @Iface default List scratchList( String target, int time, List more ) { return more; } @Iface default boolean scratchPrime( String target, int time ) { return time != 0; } @Iface default void scratchVoid() { } // @Iface( name = "scratchF1" ) // default String scratch( String target, long[] times ) { // return null; // } // // @Iface // default void nil() { // // } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/RaccoonController.java ================================================ package com.protobuf; import java.util.List; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.umct.AddressMapping; import com.pinecone.hydra.umct.stereotype.Controller; @Controller //@AddressMapping( {"/fox", "/vulpis"} ) @AddressMapping( "com.protobuf.Raccoon." ) public class RaccoonController { //@AddressMapping( { "/scratch", "/scratches" } ) //@AddressMapping() @AddressMapping( "scratch" ) public String scratch( String target, int time ) { Debug.whitef( "Raccoon invoked " + target + time ); return "Raccoon Scratch " + target + time; } @AddressMapping( "scratchA" ) public String scratchA( String target, int time, Rabbit map ) { Debug.bluef( "Raccoon invoked " + target + time ); Debug.bluef( map.getName(), map.bytes.length, (Object) map.bytes, map.getMonkey().name ); return "Raccoon Scratch " + target + time; } @AddressMapping( "scratchV" ) public void scratchV( String target, int time ) { Debug.bluef( "Raccoon invoked V" + target + time ); //return "Raccoon Scratch " + target + time; } @AddressMapping( "scratchC" ) public Rabbit[] scratchC( String target, int time, Rabbit[] list ) { Debug.bluef( "Raccoon invoked C" + target + time ); return list; } @AddressMapping( "scratchS" ) public String[] scratchS(String target, int time, String[] list ) { Debug.bluef( "Raccoon invoked S" + target + time ); return list; } @AddressMapping( "scratchList" ) public List scratchList(String target, int time, List list ) { Debug.bluef( "Raccoon invoked S" + target + time ); return list; } @AddressMapping( "scratchPrime" ) public boolean scratchPrime( String target, int time ) { Debug.bluef( "Raccoon invoked Prime" + target + time ); return time != 0; } @AddressMapping( "scratchVoid" ) public void scratchVoid() { Debug.bluef( "Raccoon invoked Void" ); } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/RaccoonKing.java ================================================ package com.protobuf; public class RaccoonKing implements Raccoon { @Override public String scratch( String target, int time ) { return "Scratch " + target + time; } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/RedBeaver.java ================================================ package com.protobuf; public class RedBeaver implements Beaver { @Override public String cutting( String target ) { return "A cute beaver is cutting a " + target; } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/RedRaccoon.java ================================================ package com.protobuf; import java.lang.reflect.Method; import java.util.List; import java.util.Set; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; import com.pinecone.framework.lang.field.FieldEntity; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.umct.husky.compiler.BytecodeIfaceCompiler; import com.pinecone.hydra.umct.husky.compiler.MethodDigest; import com.pinecone.hydra.umct.husky.compiler.MethodPrototype; import com.pinecone.hydra.umct.husky.function.ArgumentRequest; import com.pinecone.hydra.umct.husky.function.GenericArgumentRequest; import com.pinecone.ulf.util.protobuf.GenericFieldProtobufDecoder; import com.pinecone.ulf.util.protobuf.GenericFieldProtobufEncoder; import com.pinecone.ulf.util.protobuf.Options; import javassist.ClassPool; public class RedRaccoon implements Raccoon { //@Override public String scratch1( String target, int time ) { try{ Method[] methods = Raccoon.class.getMethods(); GenericArgumentRequest request = new GenericArgumentRequest( Raccoon.class.getName(), methods[0].getParameterTypes() ); GenericFieldProtobufEncoder encoder = new GenericFieldProtobufEncoder(); GenericFieldProtobufDecoder decoder = new GenericFieldProtobufDecoder(); Options options = new Options(); FieldEntity[] types = request.getSegments(); Descriptors.Descriptor descriptor = encoder.transform( types, "Args", Set.of(), options ); Debug.trace( descriptor.getFields() ); request.setField( 0, target ); request.setField( 1, time ); DynamicMessage message = encoder.encode( descriptor, types, Set.of(), options ); byte[] mb = message.toByteArray(); message = DynamicMessage.parseFrom( descriptor, mb ); request = new GenericArgumentRequest( Raccoon.class.getName(), methods[0].getParameterTypes() ); decoder.decodeEntries( request.getSegments(), descriptor, message, Set.of(), options ); Descriptors.Descriptor retDes = encoder.transform( String.class, null, Set.of() ); Debug.trace( retDes.getFields() ); DynamicMessage retMsg = encoder.encode( retDes, request.getField(0).getValue(), Set.of(), options ); DynamicMessage retDy = DynamicMessage.parseFrom( retDes, retMsg.toByteArray() ); String dm = decoder.decode( String.class, retDes, retDy, Set.of(), options ); Debug.info(dm); return "scratch " + dm; } catch ( InvalidProtocolBufferException e ) { return null; } } @Override public String scratch( String target, int time ) { try{ BytecodeIfaceCompiler inspector = new BytecodeIfaceCompiler( ClassPool.getDefault() ); List digests = inspector.compile( Raccoon.class, false ).getMethodDigests(); MethodPrototype methodPrototype = (MethodPrototype)digests.get(0); Descriptors.Descriptor argDes = methodPrototype.getArgumentsDescriptor(); Descriptors.Descriptor retDes = methodPrototype.getReturnDescriptor(); GenericFieldProtobufEncoder encoder = new GenericFieldProtobufEncoder(); GenericFieldProtobufDecoder decoder = new GenericFieldProtobufDecoder(); Options options = new Options(); ArgumentRequest request = methodPrototype.conformRequest( new Object[] { target, time } ); FieldEntity[] types = request.getSegments(); Debug.trace( argDes.getFields() ); DynamicMessage message = encoder.encode( argDes, types, Set.of(), options ); byte[] mb = message.toByteArray(); message = DynamicMessage.parseFrom( argDes, mb ); request = methodPrototype.conformRequest(); decoder.decodeEntries( request.getSegments(), argDes, message, Set.of(), options ); Debug.trace( retDes.getFields() ); DynamicMessage retMsg = encoder.encode( retDes, request.getField(0).getValue(), Set.of(), options ); DynamicMessage retDy = DynamicMessage.parseFrom( retDes, retMsg.toByteArray() ); String dm = decoder.decode( String.class, retDes, retDy, Set.of(), options ); Debug.info(dm); return "scratch " + dm; } catch ( InvalidProtocolBufferException e ) { return null; } } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/Rpc.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: rpc.proto package com.protobuf; public final class Rpc { private Rpc() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistryLite registry) { } public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions( (com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_RpcRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_RpcRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_RpcResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_RpcResponse_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\trpc.proto\"-\n\nRpcRequest\022\016\n\006method\030\001 \001(" + "\t\022\017\n\007payload\030\002 \001(\014\"=\n\013RpcResponse\022\014\n\004cod" + "e\030\001 \001(\005\022\017\n\007message\030\002 \001(\t\022\017\n\007payload\030\003 \001(" + "\014B\020\n\014com.protobufP\001b\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }); internal_static_RpcRequest_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_RpcRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_RpcRequest_descriptor, new java.lang.String[] { "Method", "Payload", }); internal_static_RpcResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_RpcResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_RpcResponse_descriptor, new java.lang.String[] { "Code", "Message", "Payload", }); } // @@protoc_insertion_point(outer_class_scope) } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/RpcRequest.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: rpc.proto package com.protobuf; /** * Protobuf type {@code RpcRequest} */ public final class RpcRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:RpcRequest) RpcRequestOrBuilder { private static final long serialVersionUID = 0L; // Use RpcRequest.newBuilder() to construct. private RpcRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private RpcRequest() { method_ = ""; payload_ = com.google.protobuf.ByteString.EMPTY; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new RpcRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.protobuf.Rpc.internal_static_RpcRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.protobuf.Rpc.internal_static_RpcRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.protobuf.RpcRequest.class, com.protobuf.RpcRequest.Builder.class); } public static final int METHOD_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object method_ = ""; /** * string method = 1; * @return The method. */ @java.lang.Override public java.lang.String getMethod() { java.lang.Object ref = method_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); method_ = s; return s; } } /** * string method = 1; * @return The bytes for method. */ @java.lang.Override public com.google.protobuf.ByteString getMethodBytes() { java.lang.Object ref = method_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); method_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAYLOAD_FIELD_NUMBER = 2; private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY; /** * bytes payload = 2; * @return The payload. */ @java.lang.Override public com.google.protobuf.ByteString getPayload() { return payload_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(method_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, method_); } if (!payload_.isEmpty()) { output.writeBytes(2, payload_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(method_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, method_); } if (!payload_.isEmpty()) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, payload_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.protobuf.RpcRequest)) { return super.equals(obj); } com.protobuf.RpcRequest other = (com.protobuf.RpcRequest) obj; if (!getMethod() .equals(other.getMethod())) return false; if (!getPayload() .equals(other.getPayload())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + METHOD_FIELD_NUMBER; hash = (53 * hash) + getMethod().hashCode(); hash = (37 * hash) + PAYLOAD_FIELD_NUMBER; hash = (53 * hash) + getPayload().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.protobuf.RpcRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.protobuf.RpcRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.protobuf.RpcRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.protobuf.RpcRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.protobuf.RpcRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.protobuf.RpcRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.protobuf.RpcRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.protobuf.RpcRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.protobuf.RpcRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.protobuf.RpcRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.protobuf.RpcRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.protobuf.RpcRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.protobuf.RpcRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code RpcRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:RpcRequest) com.protobuf.RpcRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.protobuf.Rpc.internal_static_RpcRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.protobuf.Rpc.internal_static_RpcRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.protobuf.RpcRequest.class, com.protobuf.RpcRequest.Builder.class); } // Construct using com.protobuf.RpcRequest.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; method_ = ""; payload_ = com.google.protobuf.ByteString.EMPTY; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.protobuf.Rpc.internal_static_RpcRequest_descriptor; } @java.lang.Override public com.protobuf.RpcRequest getDefaultInstanceForType() { return com.protobuf.RpcRequest.getDefaultInstance(); } @java.lang.Override public com.protobuf.RpcRequest build() { com.protobuf.RpcRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.protobuf.RpcRequest buildPartial() { com.protobuf.RpcRequest result = new com.protobuf.RpcRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.protobuf.RpcRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.method_ = method_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.payload_ = payload_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.protobuf.RpcRequest) { return mergeFrom((com.protobuf.RpcRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.protobuf.RpcRequest other) { if (other == com.protobuf.RpcRequest.getDefaultInstance()) return this; if (!other.getMethod().isEmpty()) { method_ = other.method_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPayload() != com.google.protobuf.ByteString.EMPTY) { setPayload(other.getPayload()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { method_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { payload_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object method_ = ""; /** * string method = 1; * @return The method. */ public java.lang.String getMethod() { java.lang.Object ref = method_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); method_ = s; return s; } else { return (java.lang.String) ref; } } /** * string method = 1; * @return The bytes for method. */ public com.google.protobuf.ByteString getMethodBytes() { java.lang.Object ref = method_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); method_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string method = 1; * @param value The method to set. * @return This builder for chaining. */ public Builder setMethod( java.lang.String value) { if (value == null) { throw new NullPointerException(); } method_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * string method = 1; * @return This builder for chaining. */ public Builder clearMethod() { method_ = getDefaultInstance().getMethod(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * string method = 1; * @param value The bytes for method to set. * @return This builder for chaining. */ public Builder setMethodBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); method_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY; /** * bytes payload = 2; * @return The payload. */ @java.lang.Override public com.google.protobuf.ByteString getPayload() { return payload_; } /** * bytes payload = 2; * @param value The payload to set. * @return This builder for chaining. */ public Builder setPayload(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } payload_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * bytes payload = 2; * @return This builder for chaining. */ public Builder clearPayload() { bitField0_ = (bitField0_ & ~0x00000002); payload_ = getDefaultInstance().getPayload(); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:RpcRequest) } // @@protoc_insertion_point(class_scope:RpcRequest) private static final com.protobuf.RpcRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.protobuf.RpcRequest(); } public static com.protobuf.RpcRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public RpcRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.protobuf.RpcRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/RpcRequestOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: rpc.proto package com.protobuf; public interface RpcRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:RpcRequest) com.google.protobuf.MessageOrBuilder { /** * string method = 1; * @return The method. */ java.lang.String getMethod(); /** * string method = 1; * @return The bytes for method. */ com.google.protobuf.ByteString getMethodBytes(); /** * bytes payload = 2; * @return The payload. */ com.google.protobuf.ByteString getPayload(); } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/RpcResponse.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: rpc.proto package com.protobuf; /** * Protobuf type {@code RpcResponse} */ public final class RpcResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:RpcResponse) RpcResponseOrBuilder { private static final long serialVersionUID = 0L; // Use RpcResponse.newBuilder() to construct. private RpcResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private RpcResponse() { message_ = ""; payload_ = com.google.protobuf.ByteString.EMPTY; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new RpcResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.protobuf.Rpc.internal_static_RpcResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.protobuf.Rpc.internal_static_RpcResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.protobuf.RpcResponse.class, com.protobuf.RpcResponse.Builder.class); } public static final int CODE_FIELD_NUMBER = 1; private int code_ = 0; /** * int32 code = 1; * @return The code. */ @java.lang.Override public int getCode() { return code_; } public static final int MESSAGE_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object message_ = ""; /** * string message = 2; * @return The message. */ @java.lang.Override public java.lang.String getMessage() { java.lang.Object ref = message_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); message_ = s; return s; } } /** * string message = 2; * @return The bytes for message. */ @java.lang.Override public com.google.protobuf.ByteString getMessageBytes() { java.lang.Object ref = message_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); message_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAYLOAD_FIELD_NUMBER = 3; private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY; /** * bytes payload = 3; * @return The payload. */ @java.lang.Override public com.google.protobuf.ByteString getPayload() { return payload_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (code_ != 0) { output.writeInt32(1, code_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, message_); } if (!payload_.isEmpty()) { output.writeBytes(3, payload_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (code_ != 0) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(1, code_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, message_); } if (!payload_.isEmpty()) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, payload_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.protobuf.RpcResponse)) { return super.equals(obj); } com.protobuf.RpcResponse other = (com.protobuf.RpcResponse) obj; if (getCode() != other.getCode()) return false; if (!getMessage() .equals(other.getMessage())) return false; if (!getPayload() .equals(other.getPayload())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + CODE_FIELD_NUMBER; hash = (53 * hash) + getCode(); hash = (37 * hash) + MESSAGE_FIELD_NUMBER; hash = (53 * hash) + getMessage().hashCode(); hash = (37 * hash) + PAYLOAD_FIELD_NUMBER; hash = (53 * hash) + getPayload().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.protobuf.RpcResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.protobuf.RpcResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.protobuf.RpcResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.protobuf.RpcResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.protobuf.RpcResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.protobuf.RpcResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.protobuf.RpcResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.protobuf.RpcResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.protobuf.RpcResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.protobuf.RpcResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.protobuf.RpcResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.protobuf.RpcResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.protobuf.RpcResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code RpcResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:RpcResponse) com.protobuf.RpcResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.protobuf.Rpc.internal_static_RpcResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.protobuf.Rpc.internal_static_RpcResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.protobuf.RpcResponse.class, com.protobuf.RpcResponse.Builder.class); } // Construct using com.protobuf.RpcResponse.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; code_ = 0; message_ = ""; payload_ = com.google.protobuf.ByteString.EMPTY; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.protobuf.Rpc.internal_static_RpcResponse_descriptor; } @java.lang.Override public com.protobuf.RpcResponse getDefaultInstanceForType() { return com.protobuf.RpcResponse.getDefaultInstance(); } @java.lang.Override public com.protobuf.RpcResponse build() { com.protobuf.RpcResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.protobuf.RpcResponse buildPartial() { com.protobuf.RpcResponse result = new com.protobuf.RpcResponse(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.protobuf.RpcResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.code_ = code_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.message_ = message_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.payload_ = payload_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.protobuf.RpcResponse) { return mergeFrom((com.protobuf.RpcResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.protobuf.RpcResponse other) { if (other == com.protobuf.RpcResponse.getDefaultInstance()) return this; if (other.getCode() != 0) { setCode(other.getCode()); } if (!other.getMessage().isEmpty()) { message_ = other.message_; bitField0_ |= 0x00000002; onChanged(); } if (other.getPayload() != com.google.protobuf.ByteString.EMPTY) { setPayload(other.getPayload()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { code_ = input.readInt32(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { message_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { payload_ = input.readBytes(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int code_ ; /** * int32 code = 1; * @return The code. */ @java.lang.Override public int getCode() { return code_; } /** * int32 code = 1; * @param value The code to set. * @return This builder for chaining. */ public Builder setCode(int value) { code_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * int32 code = 1; * @return This builder for chaining. */ public Builder clearCode() { bitField0_ = (bitField0_ & ~0x00000001); code_ = 0; onChanged(); return this; } private java.lang.Object message_ = ""; /** * string message = 2; * @return The message. */ public java.lang.String getMessage() { java.lang.Object ref = message_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); message_ = s; return s; } else { return (java.lang.String) ref; } } /** * string message = 2; * @return The bytes for message. */ public com.google.protobuf.ByteString getMessageBytes() { java.lang.Object ref = message_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); message_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string message = 2; * @param value The message to set. * @return This builder for chaining. */ public Builder setMessage( java.lang.String value) { if (value == null) { throw new NullPointerException(); } message_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * string message = 2; * @return This builder for chaining. */ public Builder clearMessage() { message_ = getDefaultInstance().getMessage(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * string message = 2; * @param value The bytes for message to set. * @return This builder for chaining. */ public Builder setMessageBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); message_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY; /** * bytes payload = 3; * @return The payload. */ @java.lang.Override public com.google.protobuf.ByteString getPayload() { return payload_; } /** * bytes payload = 3; * @param value The payload to set. * @return This builder for chaining. */ public Builder setPayload(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } payload_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * bytes payload = 3; * @return This builder for chaining. */ public Builder clearPayload() { bitField0_ = (bitField0_ & ~0x00000004); payload_ = getDefaultInstance().getPayload(); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:RpcResponse) } // @@protoc_insertion_point(class_scope:RpcResponse) private static final com.protobuf.RpcResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.protobuf.RpcResponse(); } public static com.protobuf.RpcResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public RpcResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.protobuf.RpcResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/RpcResponseOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: rpc.proto package com.protobuf; public interface RpcResponseOrBuilder extends // @@protoc_insertion_point(interface_extends:RpcResponse) com.google.protobuf.MessageOrBuilder { /** * int32 code = 1; * @return The code. */ int getCode(); /** * string message = 2; * @return The message. */ java.lang.String getMessage(); /** * string message = 2; * @return The bytes for message. */ com.google.protobuf.ByteString getMessageBytes(); /** * bytes payload = 3; * @return The payload. */ com.google.protobuf.ByteString getPayload(); } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/Slave.java ================================================ package com.protobuf; import java.util.Map; import com.pinecone.framework.util.json.homotype.StructJSONEncoder; public class Slave { public String name ; public long length; public int emnus; public Parasite parasite; public Map atts; public Object[] li; //public Slave child; //public List children; public Slave() { } public String getName() { return this.name; } public long getLength() { return this.length; } public void setName( String name ) { this.name = name; } public void setLength( long length ) { this.length = length; } public void setParasite2( Parasite parasite ) { this.parasite = parasite; } public Parasite getParasite() { return this.parasite; } public void setParasite( Parasite parasite ) { this.parasite = parasite; } public Map getAtts() { return this.atts; } public void setAtts(Map atts) { this.atts = atts; } // public List getChildren() { // return this.children; // } public String toJSONString() { return StructJSONEncoder.BasicEncoder.encode( this ); } public String toString(){ return StructJSONEncoder.BasicEncoder.encode( this ); } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/TestKafkaClient.java ================================================ package com.protobuf; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.hydra.umb.UMBClientException; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umb.UlfMBInformMessage; import com.pinecone.hydra.umb.UlfPackageMessageHandler; import com.pinecone.hydra.umb.broadcast.BroadcastConsumer; import com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer; import com.pinecone.hydra.umb.broadcast.BroadcastControlProducer; import com.pinecone.hydra.umb.broadcast.BroadcastProducer; import com.pinecone.hydra.umb.broadcast.UMCBroadcastConsumer; import com.pinecone.hydra.umb.broadcast.UMCBroadcastProducer; import com.pinecone.hydra.umb.kafka.KafkaClient; import com.pinecone.hydra.umb.kafka.WolfMCKafkaClient; import com.pinecone.hydra.umb.rocket.RocketClient; import com.pinecone.hydra.umb.rocket.RocketMQClient; import com.pinecone.hydra.umb.rocket.UlfRocketClient; import com.pinecone.hydra.umb.rocket.WolfMCRocketClient; import com.pinecone.hydra.umb.wolf.WolfMCBClient; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.UMCReceiver; import com.pinecone.hydra.umc.msg.UMCTransmit; import com.pinecone.hydra.umct.UMCTExpressHandler; import com.pinecone.hydra.umct.WolfMCExpress; import com.pinecone.tritium.Tritium; import org.apache.kafka.common.serialization.StringDeserializer; import org.apache.kafka.common.serialization.StringSerializer; import java.io.IOException; class Luben extends Tritium { public Luben( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Luben( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { //this.testFundamental(); //this.testWolfMB(); //this.testWolfMCTB(); //this.testKafka(); //this.testWolfKafka(); this.testWolfMCTBKafka(); } public void testFundamental() throws Exception { String nameSrvAddr = "localhost:9876"; String groupName = "testGroup"; String topic = "testTopic"; String tags = "*"; String keys = "testKeys"; String body = "This is a test message"; RocketClient client = new RocketMQClient( nameSrvAddr, groupName ); BroadcastConsumer consumer = client.createConsumer( topic ); consumer.start(new UlfPackageMessageHandler() { @Override public void onSuccessfulMsgReceived( byte[] body, Object[] args ) throws Exception { Debug.trace( new String( body ) ); } }); BroadcastProducer producer = client.createProducer(); producer.start(); producer.sendMessage( topic, body.getBytes() ); Debug.sleep( 100000 ); } public void testKafka() throws UMBClientException, UMBServiceException { String server = "localhost:9092"; String keySerializer = StringSerializer.class.getName(); String valueSerializer = StringSerializer.class.getName(); String topic = "testTopic"; String group = "testGroup"; String keyDeserializer = StringDeserializer.class.getName(); String valueDeserializer = StringDeserializer.class.getName(); String autoOffsetReset = "earliest"; KafkaClient kafkaClient = new KafkaClient( server ); byte[] bytes = new byte[100000]; for( int i=0; i< 100000; i++ ){ int j = 0; j = i % 128; bytes[i] = (byte) j; } BroadcastProducer producer = kafkaClient.createProducer(); producer.sendMessage( topic, bytes ); BroadcastConsumer consumer = kafkaClient.createConsumer(topic,group); consumer.start(new UlfPackageMessageHandler() { @Override public void onSuccessfulMsgReceived( byte[] body, Object[] args ) throws Exception { Debug.trace( body.length ); for( byte c : body ){ Debug.trace(c); } } }); } public void testWolfKafka() throws UMBServiceException, UMBClientException { String server = "localhost:9092"; String keySerializer = StringSerializer.class.getName(); String valueSerializer = StringSerializer.class.getName(); String topic = "testTopic"; String group = "testGroup"; String keyDeserializer = StringDeserializer.class.getName(); String valueDeserializer = StringDeserializer.class.getName(); String autoOffsetReset = "earliest"; WolfMCKafkaClient wolfMCKafkaClient = new WolfMCKafkaClient( server ); UMCBroadcastProducer producer = wolfMCKafkaClient.createUlfProducer(); producer.sendMessage( topic,"你好".getBytes() ); UMCBroadcastConsumer consumer = wolfMCKafkaClient.createUlfConsumer(topic, group); consumer.start( new UlfPackageMessageHandler() { @Override public void onSuccessfulMsgReceived( byte[] body, Object[] args ) throws Exception { Debug.trace( new String( body ) ); } } ); } public void testWolfMB() throws Exception { String nameSrvAddr = "localhost:9876"; String groupName = "testGroup"; String topic = "testTopic"; String tags = "*"; String keys = "testKeys"; UlfRocketClient client = new WolfMCRocketClient( nameSrvAddr, groupName ); UMCBroadcastConsumer consumer = client.createUlfConsumer( topic ); consumer.start(new UMCTExpressHandler() { @Override public void onSuccessfulMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception { if ( msg.evinceTransferMessage() != null ) { Debug.greenfs( msg.getHead(), new String( (byte[]) msg.evinceTransferMessage().getBody() ) ); } else { Debug.redf( msg.getHead() ); } } }); UMCBroadcastProducer producer = client.createUlfProducer(); producer.start(); producer.sendMessage( topic, new UlfMBInformMessage( new JSONMaptron( "{ path: '/user/getName ' }" ) ) ); //producer.sendMessage( topic, new UlfMBInformMessage( new JSONMaptron( "{ msg: 'Jesus, Mr.Garrison! ' }" ), 0xFA ) ); //producer.sendMessage( topic, new UlfBytesTransferMessage( new JSONMaptron( "{ msg: 'Jesus, Mr.Garrison! ' }" ), "fuck you" ) ); Debug.sleep( 100000 ); } public void testWolfMCTBKafka() throws IOException { String server = "b-serverkingpin:9092"; String keySerializer = StringSerializer.class.getName(); String valueSerializer = StringSerializer.class.getName(); String topic = "testTopic"; String group = "testGroup"; String keyDeserializer = StringDeserializer.class.getName(); String valueDeserializer = StringDeserializer.class.getName(); String autoOffsetReset = "earliest"; WolfMCBClient client = new WolfMCBClient(new WolfMCKafkaClient(server), "", this, WolfMCExpress.class); client.compile( Raccoon.class, false ); BroadcastControlProducer producer = client.createBroadcastControlProducer(); producer.start(); for ( int i = 0; i < 1e4; i++ ) { producer.issueInform( topic, "com.protobuf.Raccoon.scratch", "fuck you !", 2025 ); } Raccoon raccoon = producer.getIface( Raccoon.class, topic ); //raccoon.scratch("haha, I am XiaoMing", 5202 ); // Rabbit rabbit = new Rabbit(); // rabbit.name = "rabbit"; // rabbit.bytes = new byte[999*1024]; // Arrays.fill(rabbit.bytes, (byte)43); // Debug.bluef( raccoon.scratchA( "DP you!", 741741, rabbit ) ); BroadcastControlConsumer consumer = client.createBroadcastControlConsumer(topic,group); RaccoonController controller = new RaccoonController(); consumer.registerController( controller ); consumer.start(); Debug.sleep( 100000 ); } } public class TestKafkaClient { public static void main(String[] args) throws Exception { Pinecone.init( (Object...cfg )->{ Luben luben = (Luben) Pinecone.sys().getTaskManager().add( new Luben( args, Pinecone.sys() ) ); luben.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/TestProtobuf.java ================================================ package com.protobuf; import java.io.FileOutputStream; import java.lang.reflect.Method; import java.util.Map; import java.util.Set; import com.google.protobuf.ByteString; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.mc.JesusChrist; import com.pinecone.Pinecone; import com.pinecone.framework.lang.field.FieldEntity; import com.pinecone.framework.lang.field.GenericStructure; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.ClassUtils; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JSON; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.hydra.umc.msg.ChannelControlBlock; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter; import com.pinecone.hydra.umc.wolf.UlfInformMessage; import com.pinecone.hydra.umc.wolf.client.WolfMCClient; import com.pinecone.hydra.umc.wolf.server.WolfMCServer; import com.pinecone.hydra.umct.WolfMCExpress; import com.pinecone.hydra.umct.husky.compiler.BytecodeIfaceCompiler; import com.pinecone.hydra.umct.husky.function.GenericArgumentRequest; import com.pinecone.ulf.util.protobuf.GenericBeanProtobufDecoder; import com.pinecone.ulf.util.protobuf.GenericBeanProtobufEncoder; import com.pinecone.ulf.util.protobuf.GenericFieldProtobufDecoder; import com.pinecone.ulf.util.protobuf.GenericFieldProtobufEncoder; import com.pinecone.ulf.util.protobuf.Options; import com.pinecone.tritium.messagron.Messagron; import io.netty.channel.ChannelHandlerContext; import javassist.ClassPool; class DynamicProtobufBuilder { public static Descriptors.Descriptor buildRpcRequestDescriptor() throws Descriptors.DescriptorValidationException { DescriptorProtos.DescriptorProto rpcRequestProto = DescriptorProtos.DescriptorProto.newBuilder() .setName("RpcRequest") .addField(DescriptorProtos.FieldDescriptorProto.newBuilder() .setName("method") .setNumber(1) .setType(DescriptorProtos.FieldDescriptorProto.Type.TYPE_STRING)) .addField(DescriptorProtos.FieldDescriptorProto.newBuilder() .setName("payload") .setNumber(2) .setType(DescriptorProtos.FieldDescriptorProto.Type.TYPE_BYTES)) .build(); DescriptorProtos.FileDescriptorProto fileDescriptorProto = DescriptorProtos.FileDescriptorProto.newBuilder() .setName("rpc.proto") .addMessageType(rpcRequestProto) .build(); Descriptors.FileDescriptor fileDescriptor = Descriptors.FileDescriptor.buildFrom(fileDescriptorProto, new Descriptors.FileDescriptor[0]); return fileDescriptor.findMessageTypeByName("RpcRequest"); } public static Descriptors.Descriptor buildRpcResponseDescriptor() throws Descriptors.DescriptorValidationException { DescriptorProtos.DescriptorProto rpcResponseProto = DescriptorProtos.DescriptorProto.newBuilder() .setName("RpcResponse") .addField(DescriptorProtos.FieldDescriptorProto.newBuilder() .setName("code") .setNumber(1) .setType(DescriptorProtos.FieldDescriptorProto.Type.TYPE_INT32)) .addField(DescriptorProtos.FieldDescriptorProto.newBuilder() .setName("message") .setNumber(2) .setType(DescriptorProtos.FieldDescriptorProto.Type.TYPE_STRING)) .addField(DescriptorProtos.FieldDescriptorProto.newBuilder() .setName("payload") .setNumber(3) .setType(DescriptorProtos.FieldDescriptorProto.Type.TYPE_BYTES)) .build(); DescriptorProtos.FileDescriptorProto fileDescriptorProto = DescriptorProtos.FileDescriptorProto.newBuilder() .setName("rpc.proto") .addMessageType(rpcResponseProto) .build(); Descriptors.FileDescriptor fileDescriptor = Descriptors.FileDescriptor.buildFrom(fileDescriptorProto, new Descriptors.FileDescriptor[0]); return fileDescriptor.findMessageTypeByName("RpcResponse"); } } class Appleby extends JesusChrist { public Appleby( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Appleby( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { // RpcRequest request = RpcRequest.newBuilder() // .setMethod("haha") // .setPayload( ByteString.copyFrom( new byte[]{123} ) ) // .build(); // // byte[] serializedData = request.toByteArray(); // // RpcRequest deserializedReq = RpcRequest.parseFrom(serializedData); // Debug.trace( deserializedReq.getMethod() ); //this.testDynamic(); //this.testDynamicUMCT(); //this.testElementRroto(); //this.testFieldEntry(); //this.testReflect(); //this.testManualRPCServer(); //this.testManualRPCClient(); //this.testStructure(); //this.testIfacInspector(); } private void testDynamic() throws Exception { Descriptors.Descriptor rpcRequestDescriptor = DynamicProtobufBuilder.buildRpcRequestDescriptor(); Descriptors.Descriptor rpcResponseDescriptor = DynamicProtobufBuilder.buildRpcResponseDescriptor(); String method = "echo"; byte[] payload = "Dragon King".getBytes(); DynamicMessage request1 = DynamicMessage.newBuilder(rpcRequestDescriptor) .setField(rpcRequestDescriptor.findFieldByName("method"), method) .setField(rpcRequestDescriptor.findFieldByName("payload"), com.google.protobuf.ByteString.copyFrom(payload)) .build(); byte[] rd = request1.toByteArray(); DynamicMessage request = DynamicMessage.parseFrom(rpcRequestDescriptor, rd); String method1 = (String) request.getField(rpcRequestDescriptor.findFieldByName("method")); ByteString payload1 = (ByteString) request.getField(rpcRequestDescriptor.findFieldByName("payload")); DynamicMessage response = DynamicMessage.newBuilder(rpcResponseDescriptor) .setField(rpcResponseDescriptor.findFieldByName("code"), 200) .setField(rpcResponseDescriptor.findFieldByName("message"), "Success") .setField(rpcResponseDescriptor.findFieldByName("payload"), payload1) .build(); FileOutputStream ofs = new FileOutputStream( "e:/sss.bin" ); ofs.write( rd ); ofs.close(); Debug.greenf( rd ); Debug.infoSyn( ( (ByteString)DynamicMessage.parseFrom(rpcRequestDescriptor, rd).getField(rpcRequestDescriptor.findFieldByName("payload")) ).toStringUtf8() ); } private void testDynamicUMCT() throws Exception { Slave slave = JSON.unmarshal( "{ name:Slave, length:1234, parasite:{ name: parasite, length:20241102 }, atts: { key:val }, li:[1,2,3, 'ssss']," + "children: [{ name:SlaveChild, length:137, parasite:{ name: parasitec, length:20241117 } } ] }", Slave.class ); Debug.trace( 2, slave ); //// GenericBeanProtobufEncoder encoder = new GenericBeanProtobufEncoder(); Descriptors.Descriptor descriptor = encoder.transform( Slave.class, slave, Set.of() ); Debug.trace( descriptor.getFields() ); Options options = new Options(); DynamicMessage message = encoder.encode( descriptor, slave, Set.of(), options ); Debug.trace( message.getAllFields(), descriptor.findFieldByName( "parasite" ).getMessageType().getFields() ); byte[] rd = message.toByteArray(); DynamicMessage unmarshaled = DynamicMessage.parseFrom(descriptor, rd); Debug.trace( unmarshaled.getAllFields() ); GenericBeanProtobufDecoder decoder = new GenericBeanProtobufDecoder(); Map dm = decoder.decodeMap( descriptor, unmarshaled, Set.of(), options ); Debug.purplef( dm ); Slave neo = decoder.decode( Slave.class, descriptor, unmarshaled, Set.of(), options ); Debug.purplef( neo ); // Map bear = JSON.unmarshal( "{ name: 'William', force: 320, values: [1,2,3], type: grizzly, trait: { species: mammal } }", Map.class ); // Debug.trace( bear ); // Options options = new Options(); // Descriptors.Descriptor descriptor = encoder.transform( Map.class, bear, Set.of(), options ); // Debug.trace( descriptor.getFields() ); // Debug.trace( descriptor.findFieldByName( "values" ).isRepeated() ); // Debug.trace( descriptor.findFieldByName( "trait" ).getMessageType().getFields() ); // // DynamicMessage message = encoder.encode( descriptor, bear, Set.of(), options ); // Debug.trace( message.getAllFields(), descriptor.findFieldByName( "trait" ).getMessageType().getFields() ); // Debug.trace( message.getField( descriptor.findFieldByName( "values" ) ) ); // // byte[] rd = message.toByteArray(); // DynamicMessage unmarshaled = DynamicMessage.parseFrom(descriptor, rd); // Debug.trace( unmarshaled.getAllFields() ); // Debug.trace( unmarshaled.getField( descriptor.findFieldByName( "values" ) ) ); // // // GenericBeanProtobufDecoder decoder = new GenericBeanProtobufDecoder(); // Map dm = decoder.decodeMap( descriptor, unmarshaled, Set.of(), options ); // Debug.purplef( dm ); // FileOutputStream ofs = new FileOutputStream( "e:/sss.bin" ); // ofs.write( rd ); // ofs.close(); // Debug.greenf( rd ); // Bear bear = JSON.unmarshal( "{ name: 'William', force: 320, values: [1,2,3], type: grizzly }", Bear.class ); // Debug.trace( bear ); // Options options = new Options(); // Descriptors.Descriptor descriptor = encoder.transform( Bear.class, bear, Set.of(), options ); // Debug.trace( descriptor.findFieldByName( "values" ).isRepeated() ); // // DynamicMessage message = encoder.decode( descriptor, bear, Set.of(), options ); // Debug.trace( message.getAllFields() ); // Debug.trace( message.getField( descriptor.findFieldByName( "values" ) ) ); } private void testElementRroto() throws Exception { String sz = "miaomiao"; GenericBeanProtobufEncoder encoder = new GenericBeanProtobufEncoder(); Descriptors.Descriptor descriptor = encoder.transform( String.class, sz, Set.of() ); Debug.trace( descriptor.getFields() ); Options options = new Options(); DynamicMessage message = encoder.encode( descriptor, sz, Set.of(), options ); Debug.trace( message.getAllFields() ); GenericBeanProtobufDecoder decoder = new GenericBeanProtobufDecoder(); var dm = decoder.decode( String.class, descriptor, message, Set.of(), options ); Debug.purplef( dm ); } private void testFieldEntry() throws Exception { GenericFieldProtobufEncoder encoder = new GenericFieldProtobufEncoder(); Map bear = JSON.unmarshal( "{ name: 'William', force: 320, values: [1,2,3], type: grizzly, trait: { species: mammal } }", Map.class ); Debug.trace( bear ); Options options = new Options(); FieldEntity[] entities = FieldEntity.from( bear ); Descriptors.Descriptor descriptor = encoder.transform( entities, "Args", Set.of(), options ); Debug.trace( descriptor.getFields() ); Debug.trace( descriptor.findFieldByName( "values" ).isRepeated() ); Debug.trace( descriptor.findFieldByName( "trait" ).getMessageType().getFields() ); DynamicMessage message = encoder.encode( descriptor, entities, Set.of(), options ); Debug.trace( message.getAllFields(), descriptor.findFieldByName( "trait" ).getMessageType().getFields() ); Debug.trace( message.getField( descriptor.findFieldByName( "values" ) ) ); byte[] rd = message.toByteArray(); DynamicMessage unmarshaled = DynamicMessage.parseFrom(descriptor, rd); Debug.trace( unmarshaled.getAllFields() ); Debug.trace( unmarshaled.getField( descriptor.findFieldByName( "values" ) ) ); FieldEntity[] types = FieldEntity.typeFrom( bear ); GenericFieldProtobufDecoder decoder = new GenericFieldProtobufDecoder(); Map.Entry[] kvs = decoder.decodeEntries( descriptor, unmarshaled, Set.of(), options ); Debug.trace( kvs ); decoder.decodeEntries( types, descriptor, unmarshaled, Set.of(), options ); Debug.trace( types[4].getType() ); Object[] vals = decoder.decodeValues( types, descriptor, unmarshaled, Set.of(), options ); Debug.trace( vals ); } private void testReflect() throws Exception { GenericFieldProtobufEncoder encoder = new GenericFieldProtobufEncoder(); Options options = new Options(); GenericFieldProtobufDecoder decoder = new GenericFieldProtobufDecoder(); Method[] methods = Raccoon.class.getMethods(); for ( Method method : methods ) { FieldEntity[] types = FieldEntity.from( method.getParameterTypes() ); Debug.trace( types ); types[ 0 ].setValue( "red_raccoon" ); types[ 1 ].setValue( 12345L ); Descriptors.Descriptor descriptor = encoder.transform( types, "Args", Set.of(), options ); Debug.trace( descriptor.getFields() ); DynamicMessage message = encoder.encode( descriptor, types, Set.of(), options ); Debug.trace( message.getAllFields() ); types = FieldEntity.from( method.getParameterTypes() ); Object[] vals = decoder.decodeValues( types, descriptor, message, Set.of(), options ); Debug.trace( vals ); } } private void testManualRPCServer() throws Exception { Messagron messagron = new Messagron( "", this, new JSONMaptron() ); WolfMCServer wolf = new WolfMCServer( "", this, new JSONMaptron("{host: \"0.0.0.0\",\n" + "port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}") ); Method[] methods = Raccoon.class.getMethods(); Class retType = methods[0].getReturnType(); GenericFieldProtobufEncoder encoder = new GenericFieldProtobufEncoder(); Options options = new Options(); String sz = "xixihaha"; Descriptors.Descriptor descriptor = encoder.transform( String.class, sz, Set.of() ); Debug.trace( descriptor.getFields() ); DynamicMessage message = encoder.encode( descriptor, sz, Set.of(), options ); Debug.trace( message.getAllFields() ); wolf.apply( new UlfAsyncMsgHandleAdapter() { public void onSuccessfulMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception { UlfInformMessage mc = (UlfInformMessage) rawMsg; byte[]bytes = (byte[]) mc.getHead().getExtraHead(); Method[] methods = Raccoon.class.getMethods(); FieldEntity[] types = FieldEntity.from( methods[0].getParameterTypes() ); Debug.trace( types ); GenericFieldProtobufEncoder encoder = new GenericFieldProtobufEncoder(); Options options = new Options(); Descriptors.Descriptor descriptor = encoder.transform( types, "Args", Set.of(), options ); Debug.trace( descriptor.getFields() ); DynamicMessage unmarshaled = DynamicMessage.parseFrom(descriptor, bytes); GenericFieldProtobufDecoder decoder = new GenericFieldProtobufDecoder(); types = FieldEntity.from( methods[0].getParameterTypes() ); Object[] vals = decoder.decodeValues( types, descriptor, unmarshaled, Set.of(), options ); Debug.trace( vals ); String sz = vals[0].toString(); Descriptors.Descriptor retDes = encoder.transform( String.class, sz, Set.of() ); Debug.trace( retDes.getFields() ); DynamicMessage retMsg = encoder.encode( retDes, sz, Set.of(), options ); block.getTransmit().sendMsg(new UlfInformMessage(retMsg.toByteArray())); } }); wolf.execute(); this.getTaskManager().add( wolf ); //this.getTaskManager().syncWaitingTerminated(); } private void testManualRPCClient() throws Exception { Messagron servtron = new Messagron( "", this, new JSONMaptron( "{\n" + " \"Engine\" : \"com.pinecone.tritium.messagron.Messagron\",\n" + " \"Enable\" : true,\n" + " \"ExpressFactory\" : \"com.pinecone.framework.util.lang.GenericDynamicFactory\",\n" + "\n" + " \"Expresses\" : {\n" + " \"WolfMCExpress\": {\n" + " \"Engine\": \"com.pinecone.hydra.umct.WolfMCExpress\"\n" + " }\n" + " }\n" + "}" ) ); WolfMCClient wolf = new WolfMCClient( "", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( "Messagers.Messagers.WolfMCKingpin" ) ); wolf.apply( new WolfMCExpress( servtron ) ).execute(); Method[] methods = Raccoon.class.getMethods(); FieldEntity[] types = FieldEntity.from( methods[0].getParameterTypes() ); Debug.trace( types ); GenericFieldProtobufEncoder encoder = new GenericFieldProtobufEncoder(); Options options = new Options(); Descriptors.Descriptor descriptor = encoder.transform( types, "Args", Set.of(), options ); Debug.trace( descriptor.getFields() ); types[0].setValue("fuck you"); types[1].setValue(2024); DynamicMessage message = encoder.encode( descriptor, types, Set.of(), options ); Debug.trace( message.getAllFields() ); Debug.sleep( 500 ); UMCMessage retMsg = wolf.sendSyncMsg(new UlfInformMessage(message.toByteArray())); if(retMsg instanceof UlfInformMessage) { Debug.trace(retMsg.getHead().getExtraHead()); Descriptors.Descriptor retDes = encoder.transform( String.class, "", Set.of() ); Debug.trace( retDes.getFields() ); DynamicMessage retDy = DynamicMessage.parseFrom( retDes, (byte[])retMsg.getHead().getExtraHead() ); GenericBeanProtobufDecoder decoder = new GenericBeanProtobufDecoder(); var dm = decoder.decode( String.class, retDes, retDy, Set.of(), options ); Debug.info(dm); } this.getTaskManager().add( wolf ); this.getTaskManager().syncWaitingTerminated(); } protected void testStructure() throws Exception { GenericStructure structure = new GenericStructure( "test.red", 3 ); structure.setDataField( 0, "name", "test" ); structure.setDataField( 1, "t1", "v1" ); structure.setDataField( 2, "t2", new JSONMaptron( "{ k: v}" ) ); Debug.trace( structure, structure.findDataField( "t2" ), structure.findTextField( "__NAME__" ), structure.findTextField( "sss" ) ); structure.resize( 4 ); structure.setDataField( 3, "t3", 3 ); Debug.trace( structure ); //structure.setDataOffset( 2 ); //structure.setTextOffset( 3 ); Debug.trace( structure, structure.size(), structure.capacity() ); Method method = ClassUtils.getFirstMethodByName( Raccoon.class, "scratch" ); if( method != null ) { GenericArgumentRequest request = new GenericArgumentRequest( Raccoon.class.getName(), method.getParameterTypes() ); Debug.trace( request, request.getAddressPath(), request.getInterceptedPath(), request.getInterceptorName(), request.getSegments() ); } Raccoon raccoon = new RedRaccoon(); Debug.trace( raccoon.scratch( "you", 166 ) ); } protected void testIfacInspector() throws Exception { BytecodeIfaceCompiler inspector = new BytecodeIfaceCompiler( ClassPool.getDefault() ); Debug.trace( inspector.compile( Raccoon.class, false ).getMethodDigests() ); } } public class TestProtobuf { public static void main( String[] args ) throws Exception { //String[] as = args; String[] as = new String[]{ "TestWolfMCClient=true" }; Pinecone.init( (Object...cfg )->{ Appleby appleby = (Appleby) Pinecone.sys().getTaskManager().add( new Appleby( as, Pinecone.sys() ) ); appleby.vitalize(); return 0; }, (Object[]) as ); } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/TestRPCSystem.java ================================================ package com.protobuf; import java.io.IOException; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import com.mc.JesusChrist; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.framework.util.lang.GenericDynamicFactory; import com.pinecone.hydra.umc.wolf.client.WolfMCClient; import com.pinecone.hydra.umc.wolf.server.WolfMCServer; import com.pinecone.hydra.uma.HuskyDuplexExpress; import com.pinecone.hydra.uma.wolf.WolfAppointClient; import com.pinecone.hydra.uma.wolf.WolfAppointServer; import com.pinecone.hydra.uma.wolf.WolvesAppointClient; import com.pinecone.hydra.uma.wolf.WolvesAppointServer; import com.pinecone.hydra.umct.husky.compiler.ClassDigest; import com.pinecone.hydra.umct.husky.machinery.HuskyMappingLoader; import com.pinecone.hydra.umct.husky.machinery.MultiMappingLoader; import com.pinecone.hydra.umct.mapping.BytecodeControllerInspector; import com.pinecone.hydra.umct.mapping.MappingDigest; import com.pinecone.hydra.umct.husky.compiler.BytecodeIfaceCompiler; import com.pinecone.hydra.umct.husky.compiler.DynamicMethodPrototype; import com.pinecone.hydra.umct.husky.compiler.MethodDigest; import com.pinecone.tritium.messagron.Messagron; import javassist.ClassPool; class Jeff extends JesusChrist { public Jeff( String[] args, CascadeSystem parent ) { this(args, null, parent); } public Jeff( String[] args, String szName, CascadeSystem parent ) { super(args, szName, parent); } @Override public void vitalize () throws Exception { //this.testProtoRPCServer(); //this.testProtoRPCClient(); //this.testIfaceProxy(); //this.testController(); //this.testProtoRPCServerController(); //this.testClassScanner(); this.testDuplex(); } private void testProtoRPCServer() throws Exception { Messagron messagron = new Messagron( "", this, new JSONMaptron() ); WolfMCServer wolf1 = new WolfMCServer( "", this, new JSONMaptron("{host: \"0.0.0.0\",\n" + "port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}") ); WolfAppointServer wolf = new WolfAppointServer( wolf1 ); BytecodeIfaceCompiler inspector = new BytecodeIfaceCompiler( ClassPool.getDefault() ); List digests = inspector.compile( Raccoon.class, false ).getMethodDigests(); MethodDigest digest = digests.get( 0 ); DynamicMethodPrototype prototype = (DynamicMethodPrototype) digest; // // // wolf.getDefaultDeliver().registerController("com.protobuf.Raccoon.scratch", new MessageHandler() { // @Override // public String getAddressMapping() { // return null; // } // // @Override // public Object invoke( Object... args ) throws Exception { // Debug.purplef( args ); // // return "miaomiao"; // } // // @Override // public List getArgumentsKey() { // return null; // } // // @Override // public Object getReturnDescriptor() { // return prototype.getReturnDescriptor(); // } // // @Override // public Object getArgumentsDescriptor() { // return prototype.getArgumentsDescriptor(); // } // }); RaccoonKing raccoonKing = new RaccoonKing(); wolf.registerInstance( raccoonKing, Raccoon.class ); wolf.execute(); this.getTaskManager().add( wolf ); //this.getTaskManager().syncWaitingTerminated(); } private void testProtoRPCClient() throws Exception { WolfAppointClient wolf = new WolfAppointClient( new WolfMCClient( 2048, "", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( "Messagers.Messagers.WolfMCKingpin" ) ) ); wolf.execute(); wolf.compile( Raccoon.class, false ); DynamicMethodPrototype digest = (DynamicMethodPrototype)wolf.queryMethodDigest( "com.protobuf.Raccoon.scratch" ); Debug.sleep( 500 ); // wolf.invokeInformAsyn(digest, new Object[]{"fuck you", 2024}, new AsynReturnHandler() { // @Override // public void onSuccessfulReturn( Object ret ) throws Exception { // Debug.greenf( ret ); // } // // @Override // public void onErrorMsgReceived( UMCMessage msg ) throws Exception { // // } // }); Debug.greenf( wolf.invokeInform(digest, "a", 0 ) ); boolean testParallel = true; if ( testParallel ) { final AtomicInteger ai = new AtomicInteger(); for ( int j = 0; j < 10; ++j ) { final int id = j; Thread thread = new Thread(()->{ for ( int i = 0; i < 1e3; ++i ) { try { Debug.greenfs( wolf.invokeInform(digest, "afd", id + 7700 ), ai.getAndIncrement() ); } catch (IOException e) { e.printStackTrace(); } } }); thread.start(); } } //long s = System.currentTimeMillis(); for ( int i = 0; i < 1e2; ++i ) { Debug.greenf( wolf.invokeInform(digest, "afd", i ) ); } //Debug.redfs( System.currentTimeMillis() - s ); Debug.sleep( 1000000 ); this.getTaskManager().add( wolf ); this.getTaskManager().syncWaitingTerminated(); } protected void testIfaceProxy() throws Exception { WolfAppointClient wolf = new WolfAppointClient( new WolfMCClient( "", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( "Messagers.Messagers.WolfMCKingpin" ) ) ); wolf.execute(); wolf.compile( Raccoon.class, false ); Raccoon pRaccoon = wolf.getIface( Raccoon.class ); Debug.trace( pRaccoon.scratch( "tree ", 9133 ) ); Debug.trace( pRaccoon.scratch( "tref ", 9132 ) ); Debug.trace( pRaccoon.scratch( "treg ", 9131 ) ); Debug.trace( pRaccoon.scratch( "treh ", 9130 ) ); this.getTaskManager().add( wolf ); this.getTaskManager().syncWaitingTerminated(); } protected void testController() throws Exception { BytecodeControllerInspector inspector = new BytecodeControllerInspector( ClassPool.getDefault() ); List digests = inspector.characterize( RaccoonController.class ); Debug.greenf( digests ); } private void testProtoRPCServerController() throws Exception { Messagron messagron = new Messagron( "", this, new JSONMaptron() ); WolfMCServer wolf1 = new WolfMCServer( "", this, new JSONMaptron("{host: \"0.0.0.0\",\n" + "port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}") ); WolfAppointServer wolf = new WolfAppointServer( wolf1 ); RaccoonController controller = new RaccoonController(); wolf.registerController( controller ); wolf.execute(); this.getTaskManager().add( wolf ); this.testProtoRPCClient(); //this.testIfaceProxy(); } private void testClassScanner() throws Exception { DynamicFactory factory = new GenericDynamicFactory(); WolfMCServer wolf1 = new WolfMCServer( "", this, new JSONMaptron("{host: \"0.0.0.0\",\n" + "port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}") ); WolfAppointServer wolf = new WolfAppointServer( wolf1 ); factory.getClassScope().addScope( "com.protobuf" ); MultiMappingLoader mappingLoader = new HuskyMappingLoader( factory, wolf.getMCTTransformer() ); mappingLoader.updateScope(); Debug.trace( wolf ); } private void testDuplex() throws Exception { Messagron messagron = new Messagron( "", this, new JSONMaptron() ); WolfMCServer wolfKing = new WolfMCServer( "", this, new JSONMaptron("{host: \"0.0.0.0\",\n" + "port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}") ); WolvesAppointServer wolf = new WolvesAppointServer(wolfKing, HuskyDuplexExpress.class); // wolfKing.registerChannelInactiveHandler(new ChannelInactiveHandler() { // @Override // public boolean afterChannelInactive( ChannelControlBlock ccb ) throws ChannelHandleException { // Debug.bluefs( ccb.getChannel().getChannelID(), ccb.getChannel().getIdentityID() ); // ChannelPool pool = wolf.getUMCTExpress().getPoolByClientId( ccb.getChannel().getIdentityID() ); // if ( pool != null ) { // Debug.redfs( pool.isEmpty() ); // } // return false; // } // }); RaccoonController controller = new RaccoonController(); wolf.registerController( controller ); wolf.execute(); this.testDuplexClient(); Debug.sleep( 100 ); ClassDigest digest = wolf.compile( Raccoon.class, false ); // for ( int i = 0; i < 2; i++ ) { // wolf.invokeInformAsyn(2048, "com.protobuf.Raccoon.scratch", new Object[]{"shit", 123}, new AsynReturnHandler() { // @Override // public void onSuccessfulReturn( Object ret ) throws Exception { // Debug.redfs( ret ); // } // // @Override // public void onErrorMsgReceived( UMCMessage msg ) throws Exception { // Debug.redfs( msg ); // } // }); // } //Debug.greenf( wolf.invokeInform( 2048, "com.protobuf.Raccoon.scratch", "fuck you", 2025 ) ); String[] ss = new String[] { "abc", "efg" }; //Debug.greenf( wolf.invokeInform( 2048, "com.protobuf.Raccoon.scratchS", "fuck you", 2025, ss ) ); Rabbit rabbit = new Rabbit(); rabbit.name = "rabbit"; rabbit.bool = true; rabbit.bytes = new byte[] { 1,2,3 }; Monkey monkey = new Monkey(); monkey.name = "monkey"; rabbit.setMonkey( monkey ); rabbit.setMonkeys( new Monkey[] { monkey, monkey } ); Rabbit sub = new Rabbit(); sub.setName( "haha" ); rabbit.setSub( sub ); //Rabbit[] args = new Rabbit[] { rabbit }; List args = List.of(rabbit); //Debug.greenf( wolf.invokeInform( 2048, "com.protobuf.Raccoon.scratchA", "fuck you", 2025, rabbit ) ); //Debug.greenf( wolf.invokeInform( 2048, "com.protobuf.Raccoon.scratchC", "fuck you", 2025, args ) ); Debug.greenf( wolf.invokeInform( 2048, "com.protobuf.Raccoon.scratchList", "fuck you", 2025, args ) ); //Debug.greenf( wolf.invokeInform( 2048, "com.protobuf.Raccoon.scratchVoid" ) ); //Debug.greenf( wolf.invokeInform( 2048, "com.protobuf.Raccoon.scratchPrime", "fuck you", 12025 ) ); //Debug.sleep( 3000 ); //Raccoon raccoon = wolf.getIface( 2048, Raccoon.class ); //Debug.greenf( raccoon.scratch( "fuck you", 202510 ) ); this.getTaskManager().add( wolf ); this.getTaskManager().syncWaitingTerminated(); } private void testDuplexClient() throws Exception { WolvesAppointClient wolf = new WolvesAppointClient( new WolfMCClient( 2048, "", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( "Messagers.Messagers.WolfMCKingpin" ) ) ); wolf.execute(); wolf.compile( Raccoon.class, false ); DynamicMethodPrototype digest = (DynamicMethodPrototype)wolf.queryMethodDigest( "com.protobuf.Raccoon.scratch" ); RaccoonController controller = new RaccoonController(); wolf.getRouteDispatcher().registerController( controller ); Debug.sleep( 200 ); wolf.embraces(2); Rabbit rabbit = new Rabbit(); rabbit.name = "rabbit"; rabbit.bytes = new byte[] { 1,2,3 }; Monkey monkey = new Monkey(); monkey.name = "monkey"; rabbit.setMonkey( monkey ); //Debug.bluef( wolf.invokeInform( "com.protobuf.Raccoon.scratchA", "DP you!", 5202123, rabbit ) ); Debug.bluef( wolf.invokeInform( "com.protobuf.Raccoon.scratch", "DP you!", 5202 ) ); // Debug.sleep( 3500 ); // Debug.bluef( wolf.invokeInform( "com.protobuf.Raccoon.scratch", "DP you!", 5201 ) ); //Debug.greenf( wolf.invokeInform(digest, "fuck you", 2024 ) ); this.getTaskManager().add( wolf ); } } public class TestRPCSystem { public static void main( String[] args ) throws Exception { //String[] as = args; String[] as = new String[]{ "TestWolfMCClient=true" }; Pinecone.init( (Object...cfg )->{ Jeff jeff = (Jeff) Pinecone.sys().getTaskManager().add( new Jeff( as, Pinecone.sys() ) ); jeff.vitalize(); return 0; }, (Object[]) as ); } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/TestRocketClient.java ================================================ package com.protobuf; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.hydra.umb.UlfMBInformMessage; import com.pinecone.hydra.umb.UlfPackageMessageHandler; import com.pinecone.hydra.umb.broadcast.BroadcastConsumer; import com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer; import com.pinecone.hydra.umb.broadcast.BroadcastControlNode; import com.pinecone.hydra.umb.broadcast.BroadcastControlProducer; import com.pinecone.hydra.umb.broadcast.BroadcastProducer; import com.pinecone.hydra.umb.broadcast.UMCBroadcastConsumer; import com.pinecone.hydra.umb.broadcast.UMCBroadcastProducer; import com.pinecone.hydra.umb.rocket.RocketClient; import com.pinecone.hydra.umb.rocket.RocketMQClient; import com.pinecone.hydra.umb.rocket.UlfRocketClient; import com.pinecone.hydra.umb.rocket.WolfMCRocketClient; import com.pinecone.hydra.umb.wolf.WolfMCBClient; import com.pinecone.hydra.umc.msg.Medium; import com.pinecone.hydra.umc.msg.UMCMessage; import com.pinecone.hydra.umc.msg.UMCReceiver; import com.pinecone.hydra.umc.msg.UMCTransmit; import com.pinecone.hydra.umct.UMCTExpressHandler; import com.pinecone.hydra.umct.WolfMCExpress; import com.pinecone.tritium.Tritium; class Garrison extends Tritium { public Garrison( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Garrison( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { //this.testFundamental(); //this.testWolfMB(); this.testWolfMCTB(); } public void testFundamental() throws Exception { String nameSrvAddr = "localhost:9876"; String groupName = "testGroup"; String topic = "testTopic"; String tags = "*"; String keys = "testKeys"; String body = "This is a test message"; RocketClient client = new RocketMQClient( nameSrvAddr, groupName ); BroadcastConsumer consumer = client.createConsumer( topic ); consumer.start(new UlfPackageMessageHandler() { @Override public void onSuccessfulMsgReceived( byte[] body, Object[] args ) throws Exception { Debug.trace( new String( body ) ); } }); BroadcastProducer producer = client.createProducer(); producer.start(); producer.sendMessage( topic, body.getBytes() ); Debug.sleep( 100000 ); } public void testWolfMB() throws Exception { String nameSrvAddr = "localhost:9876"; String groupName = "testGroup"; String topic = "testTopic"; String tags = "*"; String keys = "testKeys"; UlfRocketClient client = new WolfMCRocketClient( nameSrvAddr, groupName ); UMCBroadcastConsumer consumer = client.createUlfConsumer( topic ); consumer.start(new UMCTExpressHandler() { @Override public void onSuccessfulMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception { if ( msg.evinceTransferMessage() != null ) { Debug.greenfs( msg.getHead(), new String( (byte[]) msg.evinceTransferMessage().getBody() ) ); } else { Debug.redf( msg.getHead() ); } } }); UMCBroadcastProducer producer = client.createUlfProducer(); producer.start(); producer.sendMessage( topic, new UlfMBInformMessage( new JSONMaptron( "{ path: '/user/getName ' }" ) ) ); //producer.sendMessage( topic, new UlfMBInformMessage( new JSONMaptron( "{ msg: 'Jesus, Mr.Garrison! ' }" ), 0xFA ) ); //producer.sendMessage( topic, new UlfBytesTransferMessage( new JSONMaptron( "{ msg: 'Jesus, Mr.Garrison! ' }" ), "fuck you" ) ); Debug.sleep( 100000 ); } public void testWolfMCTB() throws Exception { String nameSrvAddr = "localhost:9876"; String groupName = "UCDNFileServiceTransmitGroup"; String topic = "ucdn-file-cloud-distribute-topic"; String tags = "*"; String keys = "testKeys"; BroadcastControlNode client = new WolfMCBClient( new WolfMCRocketClient( nameSrvAddr, groupName ), "", this, WolfMCExpress.class ); // UMCBroadcastConsumer consumer = client.createUlfConsumer( topic ); // consumer.start(new UMCTExpressHandler() { // @Override // public void onSuccessfulMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception { // if ( msg.evinceTransferMessage() != null ) { // Debug.greenfs( msg.getHead(), new String( (byte[]) msg.evinceTransferMessage().getBody() ) ); // } // else { // byte[] bytes = (byte[]) msg.evinceInformMessage().getExHead(); // for ( int i = 0; i < bytes.length; ++i ) { // try{ // Debug.greenfs( (char)bytes[ i ] ); // } // catch ( Exception e ) { // e.printStackTrace(); // } // } // // Debug.redf( msg.getHead() ); // } // } // }); BroadcastControlConsumer consumer = client.createBroadcastControlConsumer( topic ); RaccoonController controller = new RaccoonController(); consumer.registerController( controller ); consumer.start(); client.compile( Raccoon.class, false ); BroadcastControlProducer producer = client.createBroadcastControlProducer(); producer.start(); producer.issueInform( topic, "com.protobuf.Raccoon.scratch", "fuck you !", 2025 ); Debug.sleep( 100000 ); } } public class TestRocketClient { public static void main(String[] args) throws Exception { Pinecone.init( (Object...cfg )->{ Garrison garrison = (Garrison) Pinecone.sys().getTaskManager().add( new Garrison( args, Pinecone.sys() ) ); garrison.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/rpc.proto ================================================ syntax = "proto3"; //package rpc; option java_multiple_files = true; option java_package = "com.protobuf"; message RpcRequest { string method = 1; bytes payload = 2; } message RpcResponse { int32 code = 1; string message = 2; bytes payload = 3; } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/v3/Rpc.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: rpc.proto package com.protobuf.v3; public final class Rpc { private Rpc() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistryLite registry) { } public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions( (com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_RpcRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_RpcRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_RpcResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_RpcResponse_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\trpc.proto\"-\n\nRpcRequest\022\016\n\006method\030\001 \001(" + "\t\022\017\n\007payload\030\002 \001(\014\"=\n\013RpcResponse\022\014\n\004cod" + "e\030\001 \001(\005\022\017\n\007message\030\002 \001(\t\022\017\n\007payload\030\003 \001(" + "\014B\020\n\014com.protobufP\001b\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }); internal_static_RpcRequest_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_RpcRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_RpcRequest_descriptor, new java.lang.String[] { "Method", "Payload", }); internal_static_RpcResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_RpcResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_RpcResponse_descriptor, new java.lang.String[] { "Code", "Message", "Payload", }); } // @@protoc_insertion_point(outer_class_scope) } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/v3/RpcRequest.java1 ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: rpc.proto package com.protobuf.v3; /** * Protobuf type {@code RpcRequest} */ public final class RpcRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:RpcRequest) RpcRequestOrBuilder { private static final long serialVersionUID = 0L; // Use RpcRequest.newBuilder() to construct. private RpcRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private RpcRequest() { method_ = ""; payload_ = com.google.protobuf.ByteString.EMPTY; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new RpcRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RpcRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); method_ = s; break; } case 18: { payload_ = input.readBytes(); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return Rpc.internal_static_RpcRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return Rpc.internal_static_RpcRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( RpcRequest.class, RpcRequest.Builder.class); } public static final int METHOD_FIELD_NUMBER = 1; private volatile java.lang.Object method_; /** * string method = 1; * @return The method. */ @java.lang.Override public java.lang.String getMethod() { java.lang.Object ref = method_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); method_ = s; return s; } } /** * string method = 1; * @return The bytes for method. */ @java.lang.Override public com.google.protobuf.ByteString getMethodBytes() { java.lang.Object ref = method_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); method_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAYLOAD_FIELD_NUMBER = 2; private com.google.protobuf.ByteString payload_; /** * bytes payload = 2; * @return The payload. */ @java.lang.Override public com.google.protobuf.ByteString getPayload() { return payload_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { // if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(method_)) { // com.google.protobuf.GeneratedMessageV3.writeString(output, 1, method_); // } if (!payload_.isEmpty()) { output.writeBytes(2, payload_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; // if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(method_)) { // size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, method_); // } if (!payload_.isEmpty()) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, payload_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof RpcRequest)) { return super.equals(obj); } RpcRequest other = (RpcRequest) obj; if (!getMethod() .equals(other.getMethod())) return false; if (!getPayload() .equals(other.getPayload())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + METHOD_FIELD_NUMBER; hash = (53 * hash) + getMethod().hashCode(); hash = (37 * hash) + PAYLOAD_FIELD_NUMBER; hash = (53 * hash) + getPayload().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static RpcRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static RpcRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static RpcRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static RpcRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static RpcRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static RpcRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static RpcRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static RpcRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static RpcRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static RpcRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static RpcRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static RpcRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(RpcRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code RpcRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:RpcRequest) RpcRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return Rpc.internal_static_RpcRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return Rpc.internal_static_RpcRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( RpcRequest.class, RpcRequest.Builder.class); } // Construct using com.protobuf.RpcRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); method_ = ""; payload_ = com.google.protobuf.ByteString.EMPTY; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return Rpc.internal_static_RpcRequest_descriptor; } @java.lang.Override public RpcRequest getDefaultInstanceForType() { return RpcRequest.getDefaultInstance(); } @java.lang.Override public RpcRequest build() { RpcRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public RpcRequest buildPartial() { RpcRequest result = new RpcRequest(this); result.method_ = method_; result.payload_ = payload_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof RpcRequest) { return mergeFrom((RpcRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(RpcRequest other) { if (other == RpcRequest.getDefaultInstance()) return this; if (!other.getMethod().isEmpty()) { method_ = other.method_; onChanged(); } if (other.getPayload() != com.google.protobuf.ByteString.EMPTY) { setPayload(other.getPayload()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { RpcRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (RpcRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object method_ = ""; /** * string method = 1; * @return The method. */ public java.lang.String getMethod() { java.lang.Object ref = method_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); method_ = s; return s; } else { return (java.lang.String) ref; } } /** * string method = 1; * @return The bytes for method. */ public com.google.protobuf.ByteString getMethodBytes() { java.lang.Object ref = method_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); method_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string method = 1; * @param value The method to set. * @return This builder for chaining. */ public Builder setMethod( java.lang.String value) { if (value == null) { throw new NullPointerException(); } method_ = value; onChanged(); return this; } /** * string method = 1; * @return This builder for chaining. */ public Builder clearMethod() { method_ = getDefaultInstance().getMethod(); onChanged(); return this; } /** * string method = 1; * @param value The bytes for method to set. * @return This builder for chaining. */ public Builder setMethodBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); method_ = value; onChanged(); return this; } private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY; /** * bytes payload = 2; * @return The payload. */ @java.lang.Override public com.google.protobuf.ByteString getPayload() { return payload_; } /** * bytes payload = 2; * @param value The payload to set. * @return This builder for chaining. */ public Builder setPayload(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } payload_ = value; onChanged(); return this; } /** * bytes payload = 2; * @return This builder for chaining. */ public Builder clearPayload() { payload_ = getDefaultInstance().getPayload(); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:RpcRequest) } // @@protoc_insertion_point(class_scope:RpcRequest) private static final RpcRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new RpcRequest(); } public static RpcRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public RpcRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new RpcRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public RpcRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/v3/RpcRequestOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: rpc.proto package com.protobuf.v3; public interface RpcRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:RpcRequest) com.google.protobuf.MessageOrBuilder { /** * string method = 1; * @return The method. */ java.lang.String getMethod(); /** * string method = 1; * @return The bytes for method. */ com.google.protobuf.ByteString getMethodBytes(); /** * bytes payload = 2; * @return The payload. */ com.google.protobuf.ByteString getPayload(); } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/v3/RpcResponse.java1 ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: rpc.proto package com.protobuf.v3; /** * Protobuf type {@code RpcResponse} */ public final class RpcResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:RpcResponse) RpcResponseOrBuilder { private static final long serialVersionUID = 0L; // Use RpcResponse.newBuilder() to construct. private RpcResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private RpcResponse() { message_ = ""; payload_ = com.google.protobuf.ByteString.EMPTY; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new RpcResponse(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RpcResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { code_ = input.readInt32(); break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); message_ = s; break; } case 26: { payload_ = input.readBytes(); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return Rpc.internal_static_RpcResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return Rpc.internal_static_RpcResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( RpcResponse.class, RpcResponse.Builder.class); } public static final int CODE_FIELD_NUMBER = 1; private int code_; /** * int32 code = 1; * @return The code. */ @java.lang.Override public int getCode() { return code_; } public static final int MESSAGE_FIELD_NUMBER = 2; private volatile java.lang.Object message_; /** * string message = 2; * @return The message. */ @java.lang.Override public java.lang.String getMessage() { java.lang.Object ref = message_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); message_ = s; return s; } } /** * string message = 2; * @return The bytes for message. */ @java.lang.Override public com.google.protobuf.ByteString getMessageBytes() { java.lang.Object ref = message_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); message_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAYLOAD_FIELD_NUMBER = 3; private com.google.protobuf.ByteString payload_; /** * bytes payload = 3; * @return The payload. */ @java.lang.Override public com.google.protobuf.ByteString getPayload() { return payload_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (code_ != 0) { output.writeInt32(1, code_); } // if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) { // com.google.protobuf.GeneratedMessageV3.writeString(output, 2, message_); // } if (!payload_.isEmpty()) { output.writeBytes(3, payload_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (code_ != 0) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(1, code_); } // if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) { // size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, message_); // } if (!payload_.isEmpty()) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, payload_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof RpcResponse)) { return super.equals(obj); } RpcResponse other = (RpcResponse) obj; if (getCode() != other.getCode()) return false; if (!getMessage() .equals(other.getMessage())) return false; if (!getPayload() .equals(other.getPayload())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + CODE_FIELD_NUMBER; hash = (53 * hash) + getCode(); hash = (37 * hash) + MESSAGE_FIELD_NUMBER; hash = (53 * hash) + getMessage().hashCode(); hash = (37 * hash) + PAYLOAD_FIELD_NUMBER; hash = (53 * hash) + getPayload().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static RpcResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static RpcResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static RpcResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static RpcResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static RpcResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static RpcResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static RpcResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static RpcResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static RpcResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static RpcResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static RpcResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static RpcResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(RpcResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code RpcResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:RpcResponse) RpcResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return Rpc.internal_static_RpcResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return Rpc.internal_static_RpcResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( RpcResponse.class, RpcResponse.Builder.class); } // Construct using com.protobuf.RpcResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); code_ = 0; message_ = ""; payload_ = com.google.protobuf.ByteString.EMPTY; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return Rpc.internal_static_RpcResponse_descriptor; } @java.lang.Override public RpcResponse getDefaultInstanceForType() { return RpcResponse.getDefaultInstance(); } @java.lang.Override public RpcResponse build() { RpcResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public RpcResponse buildPartial() { RpcResponse result = new RpcResponse(this); result.code_ = code_; result.message_ = message_; result.payload_ = payload_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof RpcResponse) { return mergeFrom((RpcResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(RpcResponse other) { if (other == RpcResponse.getDefaultInstance()) return this; if (other.getCode() != 0) { setCode(other.getCode()); } if (!other.getMessage().isEmpty()) { message_ = other.message_; onChanged(); } if (other.getPayload() != com.google.protobuf.ByteString.EMPTY) { setPayload(other.getPayload()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { RpcResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (RpcResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int code_ ; /** * int32 code = 1; * @return The code. */ @java.lang.Override public int getCode() { return code_; } /** * int32 code = 1; * @param value The code to set. * @return This builder for chaining. */ public Builder setCode(int value) { code_ = value; onChanged(); return this; } /** * int32 code = 1; * @return This builder for chaining. */ public Builder clearCode() { code_ = 0; onChanged(); return this; } private java.lang.Object message_ = ""; /** * string message = 2; * @return The message. */ public java.lang.String getMessage() { java.lang.Object ref = message_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); message_ = s; return s; } else { return (java.lang.String) ref; } } /** * string message = 2; * @return The bytes for message. */ public com.google.protobuf.ByteString getMessageBytes() { java.lang.Object ref = message_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); message_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string message = 2; * @param value The message to set. * @return This builder for chaining. */ public Builder setMessage( java.lang.String value) { if (value == null) { throw new NullPointerException(); } message_ = value; onChanged(); return this; } /** * string message = 2; * @return This builder for chaining. */ public Builder clearMessage() { message_ = getDefaultInstance().getMessage(); onChanged(); return this; } /** * string message = 2; * @param value The bytes for message to set. * @return This builder for chaining. */ public Builder setMessageBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); message_ = value; onChanged(); return this; } private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY; /** * bytes payload = 3; * @return The payload. */ @java.lang.Override public com.google.protobuf.ByteString getPayload() { return payload_; } /** * bytes payload = 3; * @param value The payload to set. * @return This builder for chaining. */ public Builder setPayload(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } payload_ = value; onChanged(); return this; } /** * bytes payload = 3; * @return This builder for chaining. */ public Builder clearPayload() { payload_ = getDefaultInstance().getPayload(); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:RpcResponse) } // @@protoc_insertion_point(class_scope:RpcResponse) private static final RpcResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new RpcResponse(); } public static RpcResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public RpcResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new RpcResponse(input, extensionRegistry); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public RpcResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/protobuf/v3/RpcResponseOrBuilder.java ================================================ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: rpc.proto package com.protobuf.v3; public interface RpcResponseOrBuilder extends // @@protoc_insertion_point(interface_extends:RpcResponse) com.google.protobuf.MessageOrBuilder { /** * int32 code = 1; * @return The code. */ int getCode(); /** * string message = 2; * @return The message. */ java.lang.String getMessage(); /** * string message = 2; * @return The bytes for message. */ com.google.protobuf.ByteString getMessageBytes(); /** * bytes payload = 3; * @return The payload. */ com.google.protobuf.ByteString getPayload(); } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/springram/TestSpringram.java ================================================ package com.springram; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.summer.spring.Springron; import com.pinecone.tritium.Tritium; class JesusChrist extends Tritium { public JesusChrist( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public JesusChrist( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } public void vitalize () throws Exception { Springron springron = new Springron( "Springron", this ); springron.execute(); Thread shutdowner = new Thread(()->{ Debug.sleep( 5000 ); springron.terminate(); }); //shutdowner.start(); this.getTaskManager().add( springron ); this.getTaskManager().syncWaitingTerminated(); } } public class TestSpringram { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ JesusChrist jesus = (JesusChrist) Pinecone.sys().getTaskManager().add( new JesusChrist( args, Pinecone.sys() ) ); jesus.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/utils/TestSchemeQuerier.java ================================================ package com.utils; import com.pinecone.Pinecone; import com.pinecone.framework.unit.ListDictium; import com.pinecone.framework.unit.MapDictium; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.Randomium; import com.pinecone.framework.util.json.*; import com.pinecone.slime.cache.query.LocalDictCachePage; import com.pinecone.slime.cache.query.LocalFixedLRUDictCachePage; import com.pinecone.slime.cache.query.pool.CountSelfPooledPageDictCache; import com.pinecone.slime.cache.query.pool.LocalHotspotPooledDictCache; import com.pinecone.slime.cache.query.pool.LocalLRUPrimaryPooledDictCache; import com.pinecone.slime.jelly.source.ibatis.GenericMybatisQuerierDataManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisManipulatorProxyMapperFactory; import com.pinecone.slime.jelly.source.memcached.GenericMemcachedManipulator; import com.pinecone.slime.jelly.source.redis.GenericRedisHashManipulator; import com.pinecone.slime.jelly.source.redis.GenericRedisMasterManipulator; import com.pinecone.slime.map.LocalMapQuerier; import java.net.InetSocketAddress; import java.util.*; import com.pinecone.slime.map.indexable.IndexableMapQuerier; import com.pinecone.slime.map.rdb.RDBMapQuerier; import com.pinecone.slime.source.indexable.*; import com.pinecone.slime.source.rdb.*; import net.spy.memcached.MemcachedClient; import org.apache.ibatis.session.*; import org.apache.ibatis.datasource.pooled.PooledDataSource; import org.apache.ibatis.transaction.jdbc.JdbcTransactionFactory; import org.apache.ibatis.mapping.Environment; import redis.clients.jedis.Jedis; import redis.clients.jedis.JedisPool; import redis.clients.jedis.JedisPoolConfig; import javax.sql.DataSource; import java.util.Map; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; class DataEntity { private Object id; private Object value; public Object getId() { return this.id; } public void setId( Object key ) { this.id = key; } public Object getValue() { return this.value; } public void setValue( Object value ) { this.value = value; } @Override public String toString() { return "{" + this.id + ":" + this.value + "}"; } } class MyBatisUtil { private static SqlSessionFactory sqlSessionFactory; static { try { // Define database connection information String driver = "com.mysql.cj.jdbc.Driver"; String url = "jdbc:mysql://localhost:3306/pinecone"; String username = "root"; String password = "test"; DataSource dataSource = new PooledDataSource(driver, url, username, password); Environment environment = new Environment("development", new JdbcTransactionFactory(), dataSource); Configuration configuration = new Configuration(environment); // Add mappers directly in the configuration configuration.addMapper(GenericMybatisQuerierDataManipulator.class); sqlSessionFactory = new SqlSessionFactoryBuilder().build(configuration); } catch ( Exception e ) { e.printStackTrace(); } } public static SqlSessionFactory getSqlSessionFactory() { return sqlSessionFactory; } } public class TestSchemeQuerier { public static void testListDict() throws Exception { JSONArray ja = new JSONArraytron( "[1,2,sss,null,false]" ); ListDictium listDictium = new ListDictium<>( ja.toList() ); for( Map.Entry kv : listDictium.entrySet() ) { Debug.trace( kv ); } Debug.trace( listDictium.entrySet() ); } public static void testMapDict() throws Exception { JSONObject jo = new JSONMaptron( "{ k1:v1, k2:v2, k3:3 }" ); MapDictium mapDictium = new MapDictium<>( jo.toMap(), true ); for( Map.Entry kv : mapDictium.entrySet() ) { Debug.trace( kv.getKey(), kv.getValue() ); } Debug.trace( mapDictium.entrySet() ); } public static void testLocalDict() throws Exception { JSONObject jo = new JSONMaptron( "{ k1:v1, k2:v2, k3:3 }" ); LocalMapQuerier querier = new LocalMapQuerier<>( jo ); for( Object kv : querier.entrySet() ) { Debug.trace( kv ); } Debug.trace( querier ); querier = new LocalMapQuerier<>( true ); querier.insert( 0, 111 ); querier.insert( 1, 211 ); querier.insert( 2, 311 ); querier.insert( 3, 311 ); Debug.trace( querier ); querier.insert( 8, 811 ); Debug.trace( querier ); } public static void testLRUDictCache() throws Exception { SqlSessionFactory sqlSessionFactory = MyBatisUtil.getSqlSessionFactory(); try (SqlSession sqlSession = sqlSessionFactory.openSession()) { //sqlSession.getConnection().setAutoCommit(true); GenericMybatisQuerierDataManipulator manipulator = IbatisManipulatorProxyMapperFactory.getMapper( sqlSession, GenericMybatisQuerierDataManipulator.class ); RDBTargetTableMeta meta = ( new GenericRDBTargetTableMeta("test_table", "id", String.class, manipulator ) ).addValueMetaKey( "value" );/*.addValueMetaKey( "id" ).addValueMetaKey( "value" )*/ ContiguousNumIndexBatchPageSourceRetriever retriever = new ContiguousNumIndexBatchPageSourceRetriever<>( meta, 100, "id" ); //Debug.trace( retriever.retrieve( 56 ) ); Debug.trace( ( (LocalDictCachePage) retriever.retrieves( 8561 ) ).getDictium() ); LocalLRUPrimaryPooledDictCache cache = new LocalLRUPrimaryPooledDictCache<>( 100, 3, retriever ); Debug.trace( cache.get( 123 ) ); Debug.trace( cache.get( 126 ) ); Debug.trace( cache.get( 128 ) ); // {username:undefined, role:admin, expired:20250117-12:30:00, xxxx} Debug.trace( cache.get( 1995 ) ); Debug.trace( cache.get( 1915 ) ); cache.erase( 1915 ); Debug.trace( cache.get( 1915 ) ); Debug.trace( cache.get( 2915 ) ); Debug.trace( cache.get( 3615 ) ); Debug.trace( cache.get( 3415 ) ); LocalFixedLRUDictCachePage cachePage = new LocalFixedLRUDictCachePage<>( 3, retriever ); Debug.trace( cachePage.get( 1995 ) ); Debug.trace( cachePage.get( 1915 ) ); cachePage.erase( 1915 ); Debug.trace( cachePage.get( 1915 ) ); // for ( int i = 0; i < (int)1e4; ++i ) { // Debug.trace( cachePage.get( i ) ); // } } } public static void testHotspotDictCache() throws Exception { SqlSessionFactory sqlSessionFactory = MyBatisUtil.getSqlSessionFactory(); try (SqlSession sqlSession = sqlSessionFactory.openSession()) { //sqlSession.getConnection().setAutoCommit(true); GenericMybatisQuerierDataManipulator manipulator = IbatisManipulatorProxyMapperFactory.getMapper( sqlSession, GenericMybatisQuerierDataManipulator.class ); RDBTargetTableMeta meta = ( new GenericRDBTargetTableMeta("test_table", "id", String.class, manipulator ) ).addValueMetaKey( "value" );/*.addValueMetaKey( "id" ).addValueMetaKey( "value" )*/ ContiguousNumIndexBatchPageSourceRetriever retriever = new ContiguousNumIndexBatchPageSourceRetriever<>( meta, 100, "id" ); //Debug.trace( retriever.retrieve( 56 ) ); Debug.trace( ( (LocalDictCachePage) retriever.retrieves( 8561 ) ).getDictium() ); LocalHotspotPooledDictCache cache = new LocalHotspotPooledDictCache<>( 100, 6, retriever ); Debug.trace( cache.get( 123 ) ); Debug.trace( cache.get( 126 ) ); Debug.trace( cache.get( 128 ) ); Debug.trace( cache.get( 1995 ) ); Debug.trace( cache.get( 1915 ) ); Debug.trace( cache.get( 2915 ) ); Debug.trace( cache.get( 3414 ) ); Debug.trace( cache.get( 3415 ) ); //cache.erase( 3415 ); //Debug.trace( cache.get( 3415 ) ); Debug.trace( cache.get( 3416 ) ); Debug.trace( cache.get( 3417 ) ); Debug.trace( cache.get( 4915 ) ); Debug.trace( cache.get( 4916 ) ); Debug.trace( cache.get( 4917 ) ); Debug.trace( cache.get( 1917 ) ); Debug.trace( cache.get( 1918 ) ); Debug.trace( cache.get( 5917 ) ); Debug.trace( cache.get( 6917 ) ); Randomium randomium = Randomium.newInstance(); int scale = (int)1e4; for ( int i = 0; i < scale; ++i ) { //Debug.trace( cache.get( i ) ); Debug.trace( cache.get( (int)randomium.nextBias(0, (int)1e3, 0.4 ) ) ); } Debug.trace( cache.getMisses() ); Debug.trace( cache.getAccesses() ); } } public static void testRDBDict() throws Exception { SqlSessionFactory sqlSessionFactory = MyBatisUtil.getSqlSessionFactory(); try (SqlSession sqlSession = sqlSessionFactory.openSession()) { //sqlSession.getConnection().setAutoCommit(true); RangedRDBQuerierDataManipulator manipulator = sqlSession.getMapper( GenericMybatisQuerierDataManipulator.class ); RDBTargetTableMeta meta = ( new GenericRDBTargetTableMeta("test_table", "id", String.class, manipulator ) ).addValueMetaKey( "value" );/*.addValueMetaKey( "id" ).addValueMetaKey( "value" )*/ CountSelfPooledPageDictCache cache = new LocalLRUPrimaryPooledDictCache<>( 100, 3, new ContiguousNumIndexBatchPageSourceRetriever<>( meta, 100, "id" ) ); RDBMapQuerier querier = new RDBMapQuerier<>( meta, cache ); // querier.insert(1, "value1"); // querier.insert(2, "value2"); // querier.insert(3, "value3"); // querier.insert(4, "value4"); // for ( int i = 0; i < (int)1e4; ++i ) { // querier.insert(i, "value"+i); // } // Debug.trace( querier ); // // //querier.clear(); // Debug.trace(querier.get(1)); Debug.trace(querier.get(2)); Debug.trace(querier.get(3)); sqlSession.commit(); Debug.trace( querier.values() ); Debug.trace( querier.isEmpty() ); Debug.trace( querier.queryVal( "SELECT * FROM test_table WHERE id > 100 AND id < 120" ) ); } } public static void testRedisDict() throws Exception { //IndexableMapQuerier querier = new IndexableMapQuerier<>( "b-serverkingpin", 6397, "", "wolf19310918" ); //Debug.trace( querier.get( "name" ) ); JedisPoolConfig poolConfig = new JedisPoolConfig(); JedisPool jedisPool = new JedisPool( poolConfig, "b-serverkingpin", 6379, 2000, "wolf19310918", 0 ); Jedis jedis = jedisPool.getResource(); jedis.auth( "wolf19310918" ); //IndexableIteratableManipulator manipulator = new GenericRedisHashManipulator<>( jedis ); IndexableIterableManipulator manipulator = new GenericRedisMasterManipulator<>( jedis ); IndexableTargetScopeMeta meta = new GenericIndexableTargetScopeMeta( "1", "test", Object.class, manipulator ); //manipulator.insert( meta, "hah", "hhhh" ); //Debug.trace( manipulator.selectByKey( meta, "name" ) ); //Debug.trace( manipulator.selectByKey( meta, "li" ) ); //manipulator.insertByNS( meta, "shit", "more", "fuck" ); //manipulator.insertByNS( meta, "shit", "more", "fuck" ); jedis.select( 0 ); // manipulator.insert( meta, "shit1", "vshit1" ); // manipulator.insert( meta, "crap:shit1", "crap:vshit1" ); // manipulator.insert( meta, "crap:shit2", "crap:vshit2" ); // Debug.trace( manipulator.selectAllByNS( meta, null, null ) ); manipulator = new GenericRedisMasterManipulator<>( jedis ); meta = new GenericIndexableTargetScopeMeta( "0", "", Object.class, manipulator ); //IndexableMapQuerier querier = new IndexableMapQuerier<>( meta ); //IndexableMapQuerier querier = new IndexableMapQuerier<>( meta, false ); IndexableMapQuerier querier = new IndexableMapQuerier<>( meta, true ); Debug.trace( querier.get( "test" ) ); Debug.trace( querier.get( "test" ) ); Debug.trace( querier.get( "test" ) ); Debug.trace( querier.containsKey( "li" ) ); Debug.trace( querier.containsValue( "ssss" ) ); Map map = querier.toMap(); Debug.trace( map.entrySet() ); manipulator = new GenericRedisHashManipulator<>( jedis ); meta = new GenericIndexableTargetScopeMeta( "0", "student", Object.class, manipulator ); Iterator iter = manipulator.iterator( meta ); while ( iter.hasNext() ) { Debug.trace( iter.next() ); } querier = new IndexableMapQuerier<>( meta ); map = querier.toMap(); Debug.trace( map.entrySet() ); } public static void testMemCachedDict() throws Exception { MemcachedClient client = new MemcachedClient( new InetSocketAddress( "b-serverkingpin", 11211 ) ); // 设置一个键值对 Future setFuture = client.set("key1", 900, "value1"); Debug.trace("Set key1: " + setFuture.get(5, TimeUnit.SECONDS)); Debug.trace(client.get("key1")); Debug.trace(client.get("key2")); Set allKeys = new HashSet<>(); IndexableIterableManipulator manipulator = new GenericMemcachedManipulator<> ( client ); IndexableTargetScopeMeta meta = new GenericIndexableTargetScopeMeta( "", "", Object.class, manipulator ); manipulator.insert( meta, "key2", "val2" ); Debug.trace( ((GenericMemcachedManipulator) manipulator).keys(), manipulator.counts( meta, "key1" ) ); IndexableMapQuerier querier = new IndexableMapQuerier<>( meta ); Map map = querier.toMap(); Debug.trace( map ); Debug.trace( querier.get( "key2" ) ); client.shutdown(); } public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ //TestSchemeQuerier.testListDict(); //TestSchemeQuerier.testMapDict(); //TestSchemeQuerier.testLocalDict(); //TestSchemeQuerier.testLRUDictCache(); //TestSchemeQuerier.testHotspotDictCache(); //TestSchemeQuerier.testRDBDict(); TestSchemeQuerier.testRedisDict(); //TestSchemeQuerier.testMemCachedDict(); return 0; }, (Object[]) args ); } } ================================================ FILE: Hydra/hydra-system-tritium/src/test/java/com/utils/TestVFS.java ================================================ package com.utils; import com.pinecone.Pinecone; import com.pinecone.framework.util.Debug; import org.apache.commons.vfs2.*; import org.apache.commons.vfs2.cache.SoftRefFilesCache; import org.apache.commons.vfs2.impl.StandardFileSystemManager; import org.apache.commons.vfs2.provider.http5.Http5FileProvider; import org.apache.commons.vfs2.provider.http5s.Http5sFileProvider; import org.apache.commons.vfs2.provider.webdav.WebdavFileProvider; import java.net.URLEncoder; public class TestVFS { public static void testCRUD ( ) throws Exception { // FileSystemManager fsManager = null; // // fsManager = VFS.getManager(); StandardFileSystemManager fsManager = new StandardFileSystemManager(); Http5FileProvider http5FileProvider = new Http5FileProvider(); fsManager.addProvider( "http", http5FileProvider ); fsManager.addProvider( "https", new Http5sFileProvider()); fsManager.addProvider( "webdav", new WebdavFileProvider() ); fsManager.setCacheStrategy(CacheStrategy.ON_CALL); fsManager.setFilesCache(new SoftRefFilesCache()); String localFilePath = "file:///K:/test/1.txt"; localFilePath = "\\\\b-serverkingpin\\ARBOmnium/EnderChest/1.txt"; String username = "undefined"; String password = ""; String webdavHost = "b-serverkingpin:8077"; String encodedPassword = URLEncoder.encode(password, "UTF-8"); localFilePath = "http://" + username + ":" + encodedPassword + "@" + webdavHost + "/EnderChest/test/1.text"; //localFilePath = "https://www.rednest.cn/index.html"; //String webdavFilePath = "webdav://username:password@xxx/test"; FileObject fileObject = fsManager.resolveFile(localFilePath); //FileObject webdavFile = fsManager.resolveFile(webdavFilePath); if ( !fileObject.exists() ) { if ( fileObject.getType() == FileType.IMAGINARY ) { fileObject.createFile(); Debug.trace( fileObject.getName() ); } } Debug.trace( fileObject.getName() + " exists." ); if ( fileObject.getType() == FileType.FOLDER ) { for ( FileObject child : fileObject.getChildren() ) { Debug.trace(" - " + child.getName().getBaseName()); } } if ( fileObject.getType() == FileType.FILE ) { FileContent content = fileObject.getContent(); byte[] buffer = content.getInputStream().readAllBytes(); Debug.echo( new String(buffer) ); } } public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ TestVFS.testCRUD(); return 0; }, (Object[]) args ); } } ================================================ FILE: Hydra/pom.xml ================================================ sauron com.sauron 1.2.7 4.0.0 com.pinecone.hydra hydra pom 2.5.1 hydra-architecture hydra-architecture-conduct hydra-architecture-storage hydra-framework-runtime hydra-message-control hydra-message-broadcast hydra-framework-service hydra-framework-device hydra-framework-config hydra-framework-storage hydra-kom-default-driver hydra-lib-thrift-sdk hydra-lib-grpc-service-sdk hydra-lib-uofs-cache hydra-system-tritium hydra-system-reign hydra-service-control hydra-architecture-message ================================================ FILE: LICENSE ================================================ MIT License Copyright (c) 2024 undefined Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: Messenger/Messenger.iml ================================================ ================================================ FILE: Messenger/pom.xml ================================================ com.Sauron sauron 1.0-SNAPSHOT 4.0.0 Messenger jar Messenger http://maven.apache.org UTF-8 com.Sauron pinecone 3.3.1 org.springframework.boot spring-boot-starter-amqp org.springframework.boot spring-boot-starter-test org.springframework.boot spring-boot-starter-web com.fasterxml.jackson.dataformat jackson-dataformat-xml 2.9.10 com.alibaba fastjson 1.2.75 ================================================ FILE: Messenger/src/main/java/com/genius/App.java ================================================ package com.genius; /** * Hello world! * */ public class App { public static void main( String[] args ) { System.out.println( "Hello World!" ); } } ================================================ FILE: Messenger/src/main/java/com/genius/common/UlfUMC/CommonMessageBuilder.java ================================================ package com.genius.common.UlfUMC; import java.util.Map; /** * @author Genius * @date 2023/05/14 20:40 **/ public class CommonMessageBuilder implements MessageBuilder{ private UlfUMCMessage message; public CommonMessageBuilder(){ message = new UlfUMCMessage(); } public UlfUMCMessage getMessage(){ return message; } @Override public MessageBuilder func(String funcName) { message.getUlfUMCBody().setFunction(funcName); return this; } @Override public MessageBuilder method(UlfUMCMessageType methodType) { message.getUlfUMCBody().setMethod(methodType); return this; } @Override public MessageBuilder data(Map data) { message.getUlfUMCBody().setData(data); return this; } @Override public UlfUMCMessage build() { return this.message; } @Override public byte[] toByte() { return UlfUMCMessage.encode(message); } } ================================================ FILE: Messenger/src/main/java/com/genius/common/UlfUMC/ErrorMessageBuilder.java ================================================ package com.genius.common.UlfUMC; import com.genius.pool.FunctionNamePool; import java.util.Map; /** * @author Genius * @date 2023/05/18 20:14 **/ public class ErrorMessageBuilder implements MessageBuilder{ private UlfUMCMessage errorMessage; public ErrorMessageBuilder(){ errorMessage = new UlfUMCMessage(UlfUMCMessageType.GET, FunctionNamePool.ERROR,Map.of("error","")); } @Override public MessageBuilder func(String funcName) { return null; } @Override public MessageBuilder method(UlfUMCMessageType methodType) { return null; } @Override public MessageBuilder data(Map data) { return null; } public MessageBuilder error(Object data){ errorMessage.getUlfUMCBody().getData().put("error",data); return this; } @Override public UlfUMCMessage build() { return errorMessage; } @Override public byte[] toByte() { return UlfUMCMessage.encode(errorMessage); } } ================================================ FILE: Messenger/src/main/java/com/genius/common/UlfUMC/MessageBuilder.java ================================================ package com.genius.common.UlfUMC; import java.util.Map; public interface MessageBuilder { MessageBuilder func(String funcName); MessageBuilder method(UlfUMCMessageType methodType); MessageBuilder data(Map data); UlfUMCMessage build(); byte[] toByte(); } ================================================ FILE: Messenger/src/main/java/com/genius/common/UlfUMC/MessageFactory.java ================================================ package com.genius.common.UlfUMC; /** * @author Genius * @date 2023/05/14 20:42 **/ public class MessageFactory{ public enum MessageBuilderType{ COMMON, SLAVE, ERROR } public static MessageBuilder getMessageBuilder(MessageBuilderType builderType){ switch (builderType){ case SLAVE:return new SlaveMessageBuilder(); case ERROR: return new ErrorMessageBuilder(); default:return new CommonMessageBuilder(); } } } ================================================ FILE: Messenger/src/main/java/com/genius/common/UlfUMC/SlaveMessageBuilder.java ================================================ package com.genius.common.UlfUMC; import com.genius.config.SystemConfig; import java.util.HashMap; import java.util.Map; /** * @author Genius * @date 2023/05/14 20:32 **/ public class SlaveMessageBuilder extends CommonMessageBuilder{ public SlaveMessageBuilder(){ super(); } @Override public MessageBuilder data(Map data) { HashMap newData = new HashMap<>(data); newData.put("serviceId",SystemConfig.ServiceId); getMessage().getUlfUMCBody().setData(newData); return this; } } ================================================ FILE: Messenger/src/main/java/com/genius/common/UlfUMC/UlfUMCBody.java ================================================ package com.genius.common.UlfUMC; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import java.io.Serializable; import java.util.Map; /** * @author Genius * @date 2023/05/16 21:34 **/ @Data @AllArgsConstructor @NoArgsConstructor public class UlfUMCBody implements Serializable { private UlfUMCMessageType method; private String function; private Map data; } ================================================ FILE: Messenger/src/main/java/com/genius/common/UlfUMC/UlfUMCMessage.java ================================================ package com.genius.common.UlfUMC; import com.alibaba.fastjson.JSONObject; import com.genius.util.IntToByteUtil; //import io.netty.buffer.ByteBuf; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import java.io.Serializable; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; /** * @author Genius * @date 2023/05/16 16:41 **/ @Data public class UlfUMCMessage implements Serializable { private UlfUMCProtocol ulfUMCProtocol; private UlfUMCBody ulfUMCBody; public UlfUMCMessage(UlfUMCMessageType methodType,String function,Map data){ ulfUMCBody = new UlfUMCBody(methodType,function,data); ulfUMCProtocol = new UlfUMCProtocol(); ulfUMCProtocol.setLength(JSONObject.toJSONString(ulfUMCBody).length()); } public UlfUMCMessage(){ ulfUMCBody = new UlfUMCBody(); ulfUMCProtocol = new UlfUMCProtocol(); ulfUMCProtocol.setLength(0); } public static byte[] encode(UlfUMCMessage message){ UlfUMCProtocol ulfUMCProtocol = message.getUlfUMCProtocol(); UlfUMCBody ulfUMCBody = message.getUlfUMCBody(); ByteBuffer buffer = ByteBuffer.allocate(message.getLength()+UlfUMCProtocol.header.length()+1028); buffer.put(UlfUMCProtocol.header.getBytes()); String body = JSONObject.toJSONString(ulfUMCBody); buffer.put(IntToByteUtil.intToByte(message.getLength())); buffer.put(body.getBytes(StandardCharsets.UTF_8)); return buffer.array(); } public static UlfUMCMessage decode(byte[] in) throws UlfUMCMessageException { try { int index = 0; String UMCProtocolHeader = new String(Arrays.copyOfRange(in,index,UlfUMCProtocol.header.length())); if(!UMCProtocolHeader.equals(UlfUMCProtocol.header)){ throw new UlfUMCMessageException("UlfUMCMessage decode Error"); } index+=UMCProtocolHeader.length(); int length = IntToByteUtil.byteArrayToInt(Arrays.copyOfRange(in,index,index+4)); index+=4; String body = new String(Arrays.copyOfRange(in,index,index+length)); Map map = JSONObject.parseObject(body, Map.class); UlfUMCMessageType methodType = UlfUMCMessageType.valueOf((String) map.get("method")); String function = (String) map.get("function"); Map data = new HashMap<>(); if (map.containsKey("data")) { data = JSONObject.parseObject(map.get("data").toString(), Map.class); } return new UlfUMCMessage(methodType,function,data); }catch (Exception e){ throw new UlfUMCMessageException("UlfUMCMessage decode Error"); } } public int getLength(){ int length = JSONObject.toJSONString(ulfUMCBody).length(); ulfUMCProtocol.setLength(length); return length; } public Map getData(){ return this.ulfUMCBody.getData(); } public UlfUMCMessageType getMethod(){ return this.ulfUMCBody.getMethod(); } public String getFunction(){ return this.ulfUMCBody.getFunction(); } } ================================================ FILE: Messenger/src/main/java/com/genius/common/UlfUMC/UlfUMCMessageException.java ================================================ package com.genius.common.UlfUMC; /** * @author Genius * @date 2023/05/16 22:05 **/ public class UlfUMCMessageException extends Exception{ private String reason; public UlfUMCMessageException(String reason) { this.reason = reason; } @Override public String getMessage() { return reason; } } ================================================ FILE: Messenger/src/main/java/com/genius/common/UlfUMC/UlfUMCMessageType.java ================================================ package com.genius.common.UlfUMC; /** * @author Genius * @date 2023/05/18 18:39 **/ public enum UlfUMCMessageType { GET("Get"), POST("Post"); private final String value; UlfUMCMessageType(String value){this.value = value;} public String getName(){ return this.value; } } ================================================ FILE: Messenger/src/main/java/com/genius/common/UlfUMC/UlfUMCProtocol.java ================================================ package com.genius.common.UlfUMC; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import javax.naming.directory.SearchResult; import java.io.Serializable; /** * @author Genius * @date 2023/05/16 16:32 **/ @AllArgsConstructor @NoArgsConstructor public class UlfUMCProtocol implements Serializable { public static final String header = "UMC/1.1"; private int length; public void setLength(int length){ this.length = length; } } ================================================ FILE: Messenger/src/main/java/com/genius/config/MessageConverterConfig.java ================================================ package com.genius.config; import org.springframework.amqp.support.converter.Jackson2JsonMessageConverter; import org.springframework.amqp.support.converter.MessageConverter; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; /** * @author Genius * @date 2023/05/12 15:44 **/ @Configuration public class MessageConverterConfig { @Bean public MessageConverter messageConverter() { return new Jackson2JsonMessageConverter();} } ================================================ FILE: Messenger/src/main/java/com/genius/pool/FunctionNamePool.java ================================================ package com.genius.pool; /** * @author Genius * @date 2023/05/12 18:14 **/ public class FunctionNamePool { public static final String QUERY_TASK_RANGE = "QueryTaskRange"; public static final String SHUTDOWN = "shutdown"; public static final String ERROR = "Error"; } ================================================ FILE: Messenger/src/main/java/com/genius/pool/MqPool.java ================================================ package com.genius.pool; /** * @author Genius * @date 2023/05/12 18:23 **/ public class MqPool { public static final String EXCHANGE_TOPIC_NONJRON_TASK = "com.pinecone.tritium.task.direct"; public static final String MASTER_TASK_SEND_CENTER = "task.send"; } ================================================ FILE: Messenger/src/test/java/com/genius/AppTest.java ================================================ package com.genius; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; /** * unit test for simple App. */ public class AppTest extends TestCase { /** * Create the test case * * @param testName name of the test case */ public AppTest( String testName ) { super( testName ); } /** * @return the suite of tests being tested */ public static Test suite() { return new TestSuite( AppTest.class ); } /** * Rigourous Test :-) */ public void testApp() { assertTrue( true ); } } ================================================ FILE: Odin/odin-architecture/pom.xml ================================================ odin com.walnut.odin 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.walnut.odin odin-architecture 2.5.1 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile com.pinecone.ulf ulfhedinn 1.2.1 compile com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.pinecone.hydra.kernel hydra-message-control 2.1.0 compile com.pinecone.slime.jelly jelly 2.1.0 compile ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/atlas/advance/GraphAdvancer.java ================================================ package com.walnut.odin.atlas.advance; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.unit.iqueue.entity.QueueElement; import com.pinecone.hydra.unit.vgraph.VectorDAG; import java.util.List; public interface GraphAdvancer extends Pinenut { void traverse( VectorDAG vectorDAG ); List fetchExecuteNode(long offset, long limit ); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/atlas/advance/GraphStratumAdvancer.java ================================================ package com.walnut.odin.atlas.advance; public interface GraphStratumAdvancer extends GraphAdvancer { } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/atlas/advance/GraphStratumTape.java ================================================ package com.walnut.odin.atlas.advance; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.iqueue.DeflectPriorityQueue; import com.pinecone.hydra.unit.vgraph.entity.GraphNode; import java.util.List; public interface GraphStratumTape extends Pinenut { GraphNode queryNodeByIndex( long index ); GUID queryNodeGuidByIndex( long index ); List fetchNodes( List guids ); List fetchNodes( long offset, long limit ); List fetchNodes( long queuePriority, long offset, long limit ); List fetchGuids( long offset, long limit ); List fetchGuids( long queryPriority, long offset, long limit ); int countStratum(); DeflectPriorityQueue query(int stratumId, short runtimePriority ); DeflectPriorityQueue getExecutionPriorityQueue(); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/atlas/graph/RuntimeAtlasInstrument.java ================================================ package com.walnut.odin.atlas.graph; import java.util.List; import com.pinecone.framework.system.Unsafe; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.unit.vgraph.AtlasInstrument; import com.pinecone.hydra.unit.vgraph.VectorDAG; import com.pinecone.hydra.unit.vgraph.entity.GraphNode; import com.pinecone.slime.meta.TableIndexMeta; import com.walnut.odin.atlas.advance.GraphStratumTape; /** * Pinecone Ursus For Java RuntimeAtlas * Author: Ken, Harald.E (Dragon King) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Runtime Orchestration Atlas * 统一大规模运行矢量调度云图 * ***************************************************************************************** */ public interface RuntimeAtlasInstrument extends Pinenut, AtlasInstrument { TaskInstrument taskInstrument(); GraphNode queryGraphNodeByTaskGuid( GUID taskGuid ); TaskElement queryTaskElementByGuid( GUID graphNodeGuid ); GraphStratumTape tapedGraphStratumAdvancer(VectorDAG vectorDAG, KOIMappingDriver driver ); String querySegmentName( GUID vgraphGuid, short stratumId, short runtimePriority ); int countStratum( GUID vgraphGuid ); int countPriority( GUID vgraphGuid, short stratumId ); void putStratumMeta( GUID vgraphGuid, short stratumId, short runtimePriority, String segmentName ); VectorDAG getByLayerGuid( GUID layerGuid ); VectorDAG queryByPath( String path ); List fetchParentIds( GUID graphNodeGuid ); @Unsafe( "TestOnly" ) List fetchIsolatedNodesAll(); List fetchIsolatedNodes( long offset, long limit ); List fetchIsolatedNodesById( long idStart, long idEnd ); TableIndexMeta getIsolatedNodeIndexMeta(); long queryMaxIsolatedNodePage( long limit ); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/CollectiveTaskLegionary.java ================================================ package com.walnut.odin.conduct; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.proc.ProcessManager; import com.walnut.odin.conduct.entity.RegimentJoinResponse; import com.walnut.odin.proc.RemoteProcessServiceRPCException; import com.walnut.odin.proc.client.RemoteProcessManagerClient; public interface CollectiveTaskLegionary extends Pinenut { String getName(); long getClientId(); ProcessManager processManager(); void startService () throws RemoteProcessServiceRPCException; RegimentJoinResponse joinRegiment () throws RegimentException; RemoteProcessManagerClient remoteProcessManagerClient(); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/CollectiveTaskRegiment.java ================================================ package com.walnut.odin.conduct; import com.pinecone.framework.system.regime.Regiment; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.Identification; import com.pinecone.hydra.proc.ProcessManager; import com.pinecone.hydra.system.component.Slf4jTraceable; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.walnut.odin.conduct.entity.LaunchedContext; import com.walnut.odin.conduct.entity.RegimentJoinRequest; import com.walnut.odin.conduct.entity.RegimentJoinResponse; import com.walnut.odin.dispatch.TaskDispatchException; import com.walnut.odin.dispatch.TaskDispatcher; import com.walnut.odin.proc.RemoteProcessServiceRPCException; import com.walnut.odin.proc.server.RemoteProcessManagerServer; import com.walnut.odin.task.CentralizedTaskInstrument; import com.walnut.odin.task.RavenTask; import com.walnut.odin.task.troll.InstanceLaunchException; import com.walnut.odin.task.troll.LaunchFeature; import com.walnut.odin.task.troll.TaskExecutionLauncher; public interface CollectiveTaskRegiment extends Regiment, Slf4jTraceable { RavenTask createTask( TaskElement taskElement, Identification serviceId ); void purgeTask( GUID guid ); RavenTask affirmTask( String path, Identification serviceId, TaskElement metaInfos ); RavenTask queryTaskByPath( String path ) ; RavenTask getTaskByGuid( GUID taskGuid ) ; RemoteProcessManagerServer remoteProcessManagerServer(); ProcessManager processManager(); CentralizedTaskInstrument taskInstrument(); TaskExecutionLauncher taskExecutionLauncher(); TaskDispatcher taskDispatcher(); void startRemoteProcessServer() throws RemoteProcessServiceRPCException; LaunchedContext create( GUID taskGuid, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException; LaunchedContext launch( GUID taskGuid, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException; LaunchedContext create( String path, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException; LaunchedContext launch( String path, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException; RegimentJoinResponse invokeJoinRegiment( RegimentJoinRequest request ); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/ProcessorDeployManager.java ================================================ package com.walnut.odin.conduct; import com.pinecone.framework.system.regime.arch.Manager; public interface ProcessorDeployManager extends Manager { } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/RegimentException.java ================================================ package com.walnut.odin.conduct; import com.pinecone.framework.system.prototype.Pinenut; public class RegimentException extends Exception implements Pinenut { public RegimentException() { super(); } public RegimentException( String message ) { super(message); } public RegimentException( String message, Throwable cause ) { super(message, cause); } public RegimentException( Throwable cause ) { super(cause); } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/entity/InstanceAtlasAdjacent.java ================================================ package com.walnut.odin.conduct.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface InstanceAtlasAdjacent extends Pinenut { GUID getGuid(); void setGuid( GUID guid ); GUID getParentGuid(); void setParentGuid( GUID parentGuid ); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/entity/InstanceAtlasNode.java ================================================ package com.walnut.odin.conduct.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface InstanceAtlasNode extends Pinenut { GUID getGuid(); void setGuid(GUID guid); GUID getInstanceGuid(); void setInstanceGuid(GUID instanceGuid); String getNodeName(); void setNodeName(String nodeName); boolean isIsolated(); void setIsIsolated(boolean isIsolated); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/entity/InstanceEvent.java ================================================ package com.walnut.odin.conduct.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public interface InstanceEvent extends Pinenut { GUID getGuid(); void setGuid(GUID guid); GUID getTaskGuid(); void setTaskGuid(GUID taskGuid); GUID getInstanceGuid(); void setInstanceGuid(GUID instanceGuid); String getInstanceName(); void setInstanceName(String instanceName); int getRetryTimes(); void setRetryTimes(int retryTimes); int getCurrentRetryNumber(); void setCurrentRetryNumber(int currentRetryNumber); String getEventType(); void setEventType(String eventType); String getState(); void setState(String state); String getEventContext(); void setEventContext(String eventContext); LocalDateTime getExecTime(); void setExecTime(LocalDateTime execTime); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/entity/InstanceExec.java ================================================ package com.walnut.odin.conduct.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import java.time.LocalDateTime; public interface InstanceExec extends Pinenut { long getId(); void setId(long id); GUID getTaskGuid(); void setTaskGuid(GUID taskGuid); GUID getInstanceGuid(); void setInstanceGuid(GUID instanceGuid); String getTaskName(); void setTaskName(String taskName); String getInstanceName(); void setInstanceName(String instanceName); String getProcessorQueue(); void setProcessorQueue(String processorQueue); String getClusterName(); void setClusterName(String clusterName); String getExecState(); void setExecState(String execState); int getCurrentRetryNumber(); void setCurrentRetryNumber(int currentRetryNumber); int getRetryTimes(); void setRetryTimes(int retryTimes); LocalDateTime getStartTime(); void setStartTime(LocalDateTime startTime); LocalDateTime getRunTime(); void setRunTime(LocalDateTime runTime); LocalDateTime getFinishTime(); void setFinishTime(LocalDateTime finishTime); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/entity/LaunchedContext.java ================================================ package com.walnut.odin.conduct.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.proc.UProcess; import com.walnut.odin.task.RavenTaskInstance; public class LaunchedContext implements Pinenut { protected UProcess process; protected RavenTaskInstance taskInstance; public LaunchedContext( UProcess process, RavenTaskInstance taskInstance ) { this.process = process; this.taskInstance = taskInstance; } public RavenTaskInstance getTaskInstance() { return this.taskInstance; } public void setTaskInstance( RavenTaskInstance taskInstance ) { this.taskInstance = taskInstance; } public UProcess getProcess() { return this.process; } public void setProcess( UProcess process ) { this.process = process; } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/entity/RegimentJoinRequest.java ================================================ package com.walnut.odin.conduct.entity; import com.pinecone.framework.system.prototype.Pinenut; public class RegimentJoinRequest implements Pinenut { protected String mszNodeName; protected Long mnClientId; public RegimentJoinRequest() { } public String getNodeName() { return this.mszNodeName; } public void setNodeName( String szNodeName ) { this.mszNodeName = szNodeName; } public Long getClientId() { return this.mnClientId; } public void setClientId( Long nClientId ) { this.mnClientId = nClientId; } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/entity/RegimentJoinResponse.java ================================================ package com.walnut.odin.conduct.entity; import com.pinecone.framework.system.prototype.Pinenut; public class RegimentJoinResponse implements Pinenut { protected String mszGuid; protected String mszName; protected String mszClusterPath; protected String mszClusterName; protected long mnControlClientId; protected int mnPriority; protected String mszQueueName; protected int mnQueueMaxCapacity; protected int mnQueueMinCapacity; protected int mnQueueRuntimeInstanceCapacity; protected String mszErrorMsg; public RegimentJoinResponse() { } public String getGuid() { return this.mszGuid; } public void setGuid( String szGuid ) { this.mszGuid = szGuid; } public String getName() { return this.mszName; } public void setName( String szName ) { this.mszName = szName; } public String getClusterPath() { return this.mszClusterPath; } public void setClusterPath( String szClusterPath ) { this.mszClusterPath = szClusterPath; } public String getClusterName() { return this.mszClusterName; } public void setClusterName( String szClusterName ) { this.mszClusterName = szClusterName; } public long getControlClientId() { return this.mnControlClientId; } public void setControlClientId( long nControlClientId ) { this.mnControlClientId = nControlClientId; } public int getPriority() { return this.mnPriority; } public void setPriority( int nPriority ) { this.mnPriority = nPriority; } public String getQueueName() { return this.mszQueueName; } public void setQueueName( String szQueueName ) { this.mszQueueName = szQueueName; } public int getQueueMaxCapacity() { return this.mnQueueMaxCapacity; } public void setQueueMaxCapacity( int nQueueMaxCapacity ) { this.mnQueueMaxCapacity = nQueueMaxCapacity; } public int getQueueMinCapacity() { return this.mnQueueMinCapacity; } public void setQueueMinCapacity( int nQueueMinCapacity ) { this.mnQueueMinCapacity = nQueueMinCapacity; } public int getQueueRuntimeInstanceCapacity() { return this.mnQueueRuntimeInstanceCapacity; } public void setQueueRuntimeInstanceCapacity( int nQueueRuntimeInstanceCapacity ) { this.mnQueueRuntimeInstanceCapacity = nQueueRuntimeInstanceCapacity; } public String getErrorMsg() { return this.mszErrorMsg; } public void setErrorMsg( String errorMsg ) { this.mszErrorMsg = errorMsg; } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/schedule/InstanceScheduleAllocator.java ================================================ package com.walnut.odin.conduct.schedule; import java.util.Collection; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.task.kom.instance.InstanceEntry; import com.walnut.odin.conduct.schedule.entity.ScheduleFittingContext; public interface InstanceScheduleAllocator extends Pinenut { String getPartitionName(); ScheduleFittingContext pipeFitting( Collection instances ); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/schedule/InstanceScheduleImpetus.java ================================================ package com.walnut.odin.conduct.schedule; import java.time.LocalDateTime; import java.util.Collection; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.task.TaskInstanceStatus; public interface InstanceScheduleImpetus extends Pinenut { UniformTaskScheduler taskScheduler(); void impelSchedulableInstances( Collection statuses, LocalDateTime targetTime ); void impelPrelaunchInstances( LocalDateTime targetTime ); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/schedule/TaskSchedulePreparator.java ================================================ package com.walnut.odin.conduct.schedule; import java.time.LocalDateTime; import java.util.Collection; import java.util.List; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.task.marshal.TaskScheduleCycle; public interface TaskSchedulePreparator extends Pinenut { UniformTaskScheduler taskScheduler(); void prepareSchedulableTasks( Collection cycles, LocalDateTime targetTime ); void prepareSchedulableTasksDaily( LocalDateTime targetTime ); List fetchSchedulableTasksInRange( long idMin, long idMax, Collection cycles, LocalDateTime targetTime ); List fetchSchedulableTasksDaily( long idMin, long idMax, LocalDateTime targetTime ); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/schedule/UniformTaskScheduler.java ================================================ package com.walnut.odin.conduct.schedule; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.task.kom.instance.InstanceInstrument; import com.walnut.odin.atlas.graph.RuntimeAtlasInstrument; import com.walnut.odin.dispatch.TaskDispatcher; import com.walnut.odin.task.CentralizedTaskInstrument; import com.walnut.odin.task.RavenTaskConfig; import com.walnut.odin.task.troll.TaskExecutionLauncher; public interface UniformTaskScheduler extends Pinenut { RavenTaskConfig ravenTaskConfig(); CentralizedTaskInstrument taskInstrument(); InstanceInstrument instanceInstrument(); RuntimeAtlasInstrument atlasInstrument(); TaskExecutionLauncher taskExecutionLauncher(); TaskDispatcher taskDispatcher(); String getPartitionName(); TaskSchedulePreparator taskSchedulePreparator(); InstanceScheduleImpetus instanceScheduleImpetus(); InstanceScheduleAllocator instanceScheduleAllocator(); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/schedule/entity/ConcurrentQuota.java ================================================ package com.walnut.odin.conduct.schedule.entity; import java.util.HashMap; import java.util.Map; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.json.JSONEncoder; import com.pinecone.framework.util.json.JSONObject; public class ConcurrentQuota implements Pinenut { private final short mnPriority; // 最大水位,如果高于该水位,对应优先级的任务不允许再并行启动 // The maximum level, if it is higher than this level, // tasks of corresponding priority are not allowed to start in parallel again private volatile double mnMaximumRatio; private volatile Long mnMaximumCnt; // 最低保障水位,如果低于该水位,会主动启动对应优先级的任务 // The minimum guaranteed level, // if it is lower than this level, the corresponding priority task will be initiated actively private volatile double mnMinimumRatio; private volatile Long mnMinimumCnt; public ConcurrentQuota( short priority ) { this.mnPriority = priority; } public short getPriority() { return this.mnPriority; } public double getMaximumRatio() { return this.mnMaximumRatio; } public void setMaximumRatio( double nMaximumRatio ) { this.mnMaximumRatio = nMaximumRatio; } public Long getMaximumCnt() { return this.mnMaximumCnt; } public void setMaximumCnt( Long nMaximumCnt ) { this.mnMaximumCnt = nMaximumCnt; } public double getMinimumRatio() { return this.mnMinimumRatio; } public void setMinimumRatio( double nMinimumRatio ) { this.mnMinimumRatio = nMinimumRatio; } public Long getMinimumCnt() { return this.mnMinimumCnt; } public void setMinimumCnt( Long nMinimumCnt ) { this.mnMinimumCnt = nMinimumCnt; } public boolean isMaximumRatioMode() { return this.mnMaximumRatio >= 0D; } public boolean isMinimumRatioMode() { return this.mnMinimumRatio >= 0D; } public boolean isMaximumUnlimited() { return this.mnMaximumCnt != null && this.mnMaximumCnt < 0L; } public boolean isMinimumUnlimited() { return this.mnMinimumCnt != null && this.mnMinimumCnt < 0L; } public static ConcurrentQuota from( JSONObject map ) { short nPriority = (short) map.optLong( "priority" ); ConcurrentQuota quota = new ConcurrentQuota( nPriority ); quota.setMaximumRatio( map.optDouble( "maximumRatio", 0D ) ); quota.setMinimumRatio( map.optDouble( "minimumRatio", 0D ) ); if ( map.hasOwnProperty( "maximumCnt" ) && !map.isNull( "maximumCnt" ) ) { long nMaximumCnt = map.optLong( "maximumCnt" ); if ( nMaximumCnt < 0 ) { quota.setMaximumCnt( Long.MAX_VALUE ); } else { quota.setMaximumCnt( nMaximumCnt ); } } if ( map.hasOwnProperty( "minimumCnt" ) && !map.isNull( "minimumCnt" ) ) { long nMinimumCnt = map.optLong( "minimumCnt" ); if ( nMinimumCnt < 0 ) { quota.setMinimumCnt( Long.MAX_VALUE ); } else { quota.setMinimumCnt( nMinimumCnt ); } } return quota; } public static Map fromThose( JSONObject map ) { Map quotas = new HashMap<>(); if ( map == null ) { return quotas; } for ( Map.Entry entry : map.entrySet() ) { String szKey = entry.getKey(); JSONObject joQuota = (JSONObject) entry.getValue(); ConcurrentQuota quota = ConcurrentQuota.from( joQuota ); quotas.put( szKey, quota ); } return quotas; } public ConcurrentQuota reproduce( short nPriority ) { ConcurrentQuota quota = new ConcurrentQuota( nPriority ); quota.setMaximumRatio( this.getMaximumRatio() ); quota.setMinimumRatio( this.getMinimumRatio() ); quota.setMaximumCnt( this.getMaximumCnt() ); quota.setMinimumCnt( this.getMinimumCnt() ); return quota; } @Override public String toJSONString() { return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "priority" , this.getPriority() ), new KeyValue<>( "maximumRatio" , this.getMaximumRatio() ), new KeyValue<>( "maximumCnt" , this.getMaximumCnt() ), new KeyValue<>( "minimumRatio" , this.getMinimumRatio() ), new KeyValue<>( "minimumCnt" , this.getMaximumCnt() ) } ); } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/schedule/entity/DepartureChecklist.java ================================================ package com.walnut.odin.conduct.schedule.entity; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Collection; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.task.TaskInstanceStatus; import com.pinecone.hydra.task.kom.instance.InstanceEntry; public class DepartureChecklist implements Pinenut { private boolean mbTraceDependencyDetails; // 是否追踪依赖明细,开启会增加内存消耗,会记录依赖实例. private Collection mDependentInstanceIds; private LocalDateTime mCheckTime; private InstanceEntry mTargetInstance; private TaskInstanceStatus mInterceptedStatus; // 拦截状态原因. private TaskInstanceStatus mPreDepartureLastStatus; // 启动前最后状态,DepartureStandby状态才能出港,其他状态送入下一批流水线. public DepartureChecklist( InstanceEntry targetInstance, boolean bTraceDependencyDetails ) { this.mTargetInstance = targetInstance; this.mbTraceDependencyDetails = bTraceDependencyDetails; this.mCheckTime = LocalDateTime.now(); } public DepartureChecklist( InstanceEntry targetInstance ) { this( targetInstance, true ); } public boolean isTraceDependencyDetails() { return this.mbTraceDependencyDetails; } public void setTraceDependencyDetails( boolean bTraceDependencyDetails ) { this.mbTraceDependencyDetails = bTraceDependencyDetails; } public Collection getDependentInstanceIds() { return this.mDependentInstanceIds; } public void setDependentInstanceIds( Collection dependentInstanceIds ) { this.mDependentInstanceIds = dependentInstanceIds; } public InstanceEntry getTargetInstance() { return this.mTargetInstance; } public void setTargetInstance( InstanceEntry targetInstance ) { this.mTargetInstance = targetInstance; } public TaskInstanceStatus getInterceptedStatus() { return this.mInterceptedStatus; } public void setInterceptedStatus( TaskInstanceStatus interceptedStatus ) { this.mInterceptedStatus = interceptedStatus; } public TaskInstanceStatus getPreDepartureLastStatus() { return this.mPreDepartureLastStatus; } public void setPreDepartureLastStatus( TaskInstanceStatus preDepartureLastStatus ) { this.mPreDepartureLastStatus = preDepartureLastStatus; } public boolean isDepartureCheckPassed() { return this.mPreDepartureLastStatus == TaskInstanceStatus.DepartureStandby; } public boolean isIntercepted() { return this.mInterceptedStatus != null; } public void addDependentInstanceId( GUID dependentInstanceId ) { if ( !this.mbTraceDependencyDetails ) { return; } if ( this.mDependentInstanceIds == null ) { this.mDependentInstanceIds = new ArrayList<>(); } this.mDependentInstanceIds.add( dependentInstanceId ); } public LocalDateTime getCheckTime() { return this.mCheckTime; } public void setCheckTime( LocalDateTime checkTime ) { this.mCheckTime = checkTime; } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/schedule/entity/ScheduleFittingContext.java ================================================ package com.walnut.odin.conduct.schedule.entity; import java.util.ArrayList; import java.util.Collection; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.task.kom.instance.InstanceEntry; public class ScheduleFittingContext implements Pinenut { private Collection mFittedInstances; private Collection mDiscardedInstances; public ScheduleFittingContext() { this.mFittedInstances = new ArrayList<>(); this.mDiscardedInstances = new ArrayList<>(); } public Collection getFittedInstances() { return this.mFittedInstances; } public void setFittedInstances( Collection launchedInstances ) { this.mFittedInstances = launchedInstances; } public Collection getDiscardedInstances() { return this.mDiscardedInstances; } public void setDiscardedInstances( Collection discardedInstances ) { this.mDiscardedInstances = discardedInstances; } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/ArchTaskExecutionI32Queue.java ================================================ package com.walnut.odin.dispatch; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Deque; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.locks.ReentrantLock; import com.pinecone.framework.util.id.Identification; public abstract class ArchTaskExecutionI32Queue implements TaskExecutionQueue { protected String mszName; protected volatile int mnMaxCapacity; protected volatile int mnMinCapacity; protected volatile int mnUsedCapacity; protected volatile int mnRuntimeInstanceCapacity; protected final Deque mWaitingQueue; protected final Map mRunningInstances; protected final ReentrantLock mLock; protected ArchTaskExecutionI32Queue() { this.mWaitingQueue = new ArrayDeque<>(); this.mRunningInstances = new HashMap<>(); this.mLock = new ReentrantLock(); } protected void assertOfferCapacityLocked( int nIncoming ) throws QueueBadAllocatedException { if ( nIncoming <= 0 ) { return; } int nFuture = this.mnUsedCapacity + this.mWaitingQueue.size() + nIncoming; if ( nFuture > this.mnMaxCapacity ) { throw new QueueBadAllocatedException( "Queue capacity exceeded. incoming=" + nIncoming + ", used=" + this.mnUsedCapacity + ", waiting=" + this.mWaitingQueue.size() + ", max=" + this.mnMaxCapacity ); } } @Override public void offer( Collection contexts ) throws TaskDispatchException { this.mLock.lock(); try { this.assertOfferCapacityLocked( contexts.size() ); for ( TaskLaunchContext context : contexts ) { this.mWaitingQueue.addLast( context ); } } finally { this.mLock.unlock(); } } @Override public void offer( TaskLaunchContext context ) throws TaskDispatchException { this.mLock.lock(); try { this.assertOfferCapacityLocked( 1 ); this.mWaitingQueue.addLast( context ); } finally { this.mLock.unlock(); } } @Override public Collection consume( int n, boolean bForce, TaskInstanceConsumer consumer ) throws TaskConsumeException { this.mLock.lock(); try { if ( n <= 0 ) { return Collections.emptyList(); } int nFreeCapacity = this.pendingCapacity(); if ( nFreeCapacity <= 0 ) { return Collections.emptyList(); } int nAllowedByRuntime; if ( bForce ) { nAllowedByRuntime = n; } else { nAllowedByRuntime = this.mnRuntimeInstanceCapacity > 0 ? this.mnRuntimeInstanceCapacity : n; } int nConsume = Math.min( Math.min( n, nAllowedByRuntime ), Math.min( nFreeCapacity, this.mWaitingQueue.size() ) ); if ( nConsume <= 0 ) { return Collections.emptyList(); } List result = new ArrayList<>( nConsume ); while ( nConsume > 0 ) { TaskLaunchContext context = this.mWaitingQueue.pollFirst(); if ( context == null ) { break; } try { consumer.tryConsume( context ); } catch ( TaskConsumeException e ) { ConsumeCompromisedPolice police = consumer.compromisedPolice(); switch ( police ) { case EvictionIgnore: { // 丢弃任务,不重新入队 --nConsume; continue; } case EvictionException: { e.setEvictionTask( context ); throw e; } case BreakException: default: { // 恢复任务到原队列位置(队头) this.mWaitingQueue.addFirst( context ); throw e; } } } Identification id = context.getTaskInstance().getId(); this.mRunningInstances.put( id, context ); ++this.mnUsedCapacity; result.add( context ); --nConsume; } return result; } finally { this.mLock.unlock(); } } @Override public Collection consume( int n, TaskInstanceConsumer consumer ) throws TaskConsumeException { return this.consume( n, false, consumer ); } @Override public Collection consume( TaskInstanceConsumer consumer ) throws TaskConsumeException { return this.consume( this.mnRuntimeInstanceCapacity, false, consumer ); } protected void addRemain( Collection products, TaskLaunchContext context ) { boolean bSkipCurrent = true; for ( TaskLaunchContext remain : products ) { if ( bSkipCurrent ) { if ( remain == context ) { bSkipCurrent = false; } continue; } this.mWaitingQueue.addLast( remain ); } } @Override public Collection pipeConsume( Collection products, TaskInstanceConsumer consumer ) throws TaskDispatchException, TaskConsumeException { this.mLock.lock(); try { if ( products == null || products.isEmpty() ) { return Collections.emptyList(); } this.assertOfferCapacityLocked( products.size() ); int nFreeCapacity = this.pendingCapacity(); if ( nFreeCapacity <= 0 ) { for ( TaskLaunchContext context : products ) { this.mWaitingQueue.addLast( context ); } return Collections.emptyList(); } int nAllowedByRuntime = this.mnRuntimeInstanceCapacity > 0 ? this.mnRuntimeInstanceCapacity : products.size(); int nConsume = Math.min( Math.min( products.size(), nAllowedByRuntime ), nFreeCapacity ); if ( nConsume <= 0 ) { for ( TaskLaunchContext context : products ) { this.mWaitingQueue.addLast( context ); } return Collections.emptyList(); } List consumed = new ArrayList<>( nConsume ); int nIndex = 0; for ( TaskLaunchContext context : products ) { if ( nIndex < nConsume ) { try { consumer.tryConsume( context ); } catch ( TaskConsumeException e ) { ConsumeCompromisedPolice police = consumer.compromisedPolice(); switch ( police ) { case EvictionIgnore: { --nConsume; continue; } case EvictionException: { this.addRemain( products, context ); e.setEvictionTask( context ); throw e; } case BreakException: default: { // 当前任务未消费,重新入 waiting 队尾 this.mWaitingQueue.addLast( context ); // 剩余未遍历的 products 全部入队 this.addRemain( products, context ); throw e; } } } Identification id = context.getTaskInstance().getId(); this.mRunningInstances.put( id, context ); ++this.mnUsedCapacity; consumed.add( context ); ++nIndex; } else { this.mWaitingQueue.addLast( context ); } } return consumed; } finally { this.mLock.unlock(); } } @Override public Collection runningInstances() { this.mLock.lock(); try { return Collections.unmodifiableCollection( this.mRunningInstances.values() ); } finally { this.mLock.unlock(); } } @Override public void markTerminated( Identification id ) { this.mLock.lock(); try { TaskLaunchContext context = this.mRunningInstances.remove( id ); if ( context != null ) { this.mnUsedCapacity--; } } finally { this.mLock.unlock(); } } @Override public Collection recycleTerminated( Collection terminatedIds ) { this.mLock.lock(); try { if ( terminatedIds == null || terminatedIds.isEmpty() ) { return Collections.emptyList(); } List recycled = new ArrayList<>( terminatedIds.size() ); for ( Identification id : terminatedIds ) { TaskLaunchContext context = this.mRunningInstances.remove( id ); if ( context != null ) { --this.mnUsedCapacity; recycled.add( context ); } } if ( recycled.isEmpty() ) { return Collections.emptyList(); } return recycled; } finally { this.mLock.unlock(); } } @Override public Collection consumePending( TaskInstanceConsumer consumer ) throws TaskConsumeException { return this.consume( this.mnRuntimeInstanceCapacity, false, consumer ); } @Override public Collection shiftPipeline( Collection terminatedIds, TaskInstanceConsumer consumer ) throws TaskConsumeException { // Recycle terminated instances first to release capacity. // The returned collection only represents newly consumed contexts. // Recycled instances are intentionally not part of the return value, // since this method models a "release-then-refill" pipeline step. // Callers must not rely on the return value to infer recycle results. this.recycleTerminated( terminatedIds ); return this.consumePending( consumer ); } @Override public int pendingCapacity() { return this.mnMaxCapacity - this.mnUsedCapacity; } @Override public TaskLaunchContext getRunningContextById( Identification id ) { this.mLock.lock(); try { return this.mRunningInstances.get( id ); } finally { this.mLock.unlock(); } } @Override public int waitingSize() { this.mLock.lock(); try { return this.mWaitingQueue.size(); } finally { this.mLock.unlock(); } } @Override public int runningSize() { this.mLock.lock(); try { return this.mRunningInstances.size(); } finally { this.mLock.unlock(); } } @Override public boolean isFull() { this.mLock.lock(); try { return this.mnUsedCapacity >= this.mnMaxCapacity; } finally { this.mLock.unlock(); } } @Override public boolean isIdle() { this.mLock.lock(); try { return this.mnUsedCapacity == 0 && this.mWaitingQueue.isEmpty(); } finally { this.mLock.unlock(); } } @Override public Collection drainAllWaiting() { this.mLock.lock(); try { if ( this.mWaitingQueue.isEmpty() ) { return Collections.emptyList(); } List drained = new ArrayList<>( this.mWaitingQueue.size() ); while ( !this.mWaitingQueue.isEmpty() ) { TaskLaunchContext context = this.mWaitingQueue.pollFirst(); if ( context == null ) { break; } drained.add( context ); } return drained; } finally { this.mLock.unlock(); } } @Override public boolean isUsageCriticalCapacity() { this.mLock.lock(); try { return this.mnUsedCapacity >= this.mnMinCapacity; } finally { this.mLock.unlock(); } } @Override public void applyMaxCapacity( int nMaxCapacity ) { this.mLock.lock(); try { this.mnMaxCapacity = (int) nMaxCapacity; } finally { this.mLock.unlock(); } } @Override public void applyMinCapacity( int nMinCapacity ) { this.mLock.lock(); try { this.mnMinCapacity = (int) nMinCapacity; } finally { this.mLock.unlock(); } } @Override public void applyRuntimeInstanceCapacity( int nCapacity ) { this.mLock.lock(); try { this.mnRuntimeInstanceCapacity = nCapacity; } finally { this.mLock.unlock(); } } @Override public String getName() { return this.mszName; } @Override public int getMaxCapacity() { return this.mnMaxCapacity; } @Override public int getMinCapacity() { return this.mnMinCapacity; } @Override public int getUsedCapacity() { return this.mnUsedCapacity; } @Override public int getRuntimeInstanceCapacity() { return this.mnRuntimeInstanceCapacity; } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/ConsumeCompromisedPolice.java ================================================ package com.walnut.odin.dispatch; import com.pinecone.framework.system.prototype.Pinenut; public enum ConsumeCompromisedPolice implements Pinenut { EvictionIgnore("EvictionIgnore"), EvictionException("EvictionException"), BreakException("BreakException"), ; private final String value; ConsumeCompromisedPolice( String value ){ this.value = value; } public String getName(){ return this.value; } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/DispatchStrategy.java ================================================ package com.walnut.odin.dispatch; import java.util.Collection; import java.util.Map; import com.pinecone.framework.system.prototype.Pinenut; public interface DispatchStrategy extends Pinenut { Map> dispatch( Collection processors, Collection contexts, TaskDispatcher dispatcher ) throws TaskDispatchException; } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/PipelineLaunchReport.java ================================================ package com.walnut.odin.dispatch; import java.util.Collection; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.proc.UProcess; public interface PipelineLaunchReport extends Pinenut { Collection launchedProcesses(); Collection launchedContext(); Collection waitingContext(); boolean isPreparing(); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/QueueBadAllocatedException.java ================================================ package com.walnut.odin.dispatch; public class QueueBadAllocatedException extends TaskDispatchException { public QueueBadAllocatedException() { super(); } public QueueBadAllocatedException( String message ) { super(message); } public QueueBadAllocatedException( String message, Throwable cause ) { super(message, cause); } public QueueBadAllocatedException( Throwable cause ) { super(cause); } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/TaskConsumeException.java ================================================ package com.walnut.odin.dispatch; public class TaskConsumeException extends TaskDispatchException { protected TaskLaunchContext evictionTask; public TaskConsumeException() { super(); } public TaskConsumeException( String message ) { super(message); } public TaskConsumeException( String message, Throwable cause, TaskLaunchContext context ) { super(message, cause); this.evictionTask = context; } public TaskConsumeException( Throwable cause, TaskLaunchContext context ) { super(cause); this.evictionTask = context; } public TaskConsumeException( String message, Throwable cause ) { super(message, cause); } public TaskConsumeException( Throwable cause ) { super(cause); } public TaskConsumeException( String message, TaskLaunchContext context ) { super(message); this.evictionTask = context; } public TaskLaunchContext getEvictionTask() { return this.evictionTask; } public void setEvictionTask( TaskLaunchContext evictionTask ) { this.evictionTask = evictionTask; } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/TaskDispatchException.java ================================================ package com.walnut.odin.dispatch; public class TaskDispatchException extends Exception { public TaskDispatchException() { super(); } public TaskDispatchException(String message ) { super(message); } public TaskDispatchException(String message, Throwable cause ) { super(message, cause); } public TaskDispatchException(Throwable cause ) { super(cause); } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/TaskDispatcher.java ================================================ package com.walnut.odin.dispatch; import java.util.Collection; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.Identification; import com.pinecone.hydra.proc.UProcess; import com.walnut.odin.dispatch.entity.TaskProcessorEntity; import com.walnut.odin.task.RavenTaskInstance; import com.walnut.odin.task.troll.InstanceLaunchException; import com.walnut.odin.task.troll.LaunchFeature; import com.walnut.odin.task.troll.TaskExecutionLauncher; public interface TaskDispatcher extends Pinenut { TaskExecutionLauncher taskExecutionLauncher(); void registerProcessor( TaskExecutionProcessor processor ); TaskProcessorEntity registerProcessor( String szProcessorName, long nClientId ) throws IllegalArgumentException; void unregisterProcessor( String szProcessorName ); void unregisterProcessor( long nClientId ); Collection fetchProcessors(); void setProcessorAffinity( String szProcessorName, TaskLaunchContext launchContext ); TaskExecutionProcessor getAffinityTasks( Identification taskId ); Collection queryAffinityTasks( String szProcessorName ); PipelineLaunchReport pipeCreate( Collection contexts ) throws InstanceLaunchException, TaskDispatchException; PipelineLaunchReport pipeLaunch( Collection contexts ) throws InstanceLaunchException, TaskDispatchException; UProcess create( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException; UProcess launch( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException; } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/TaskExecutionProcessor.java ================================================ package com.walnut.odin.dispatch; import java.util.Collection; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.Identification; import com.pinecone.hydra.deploy.Server; import com.pinecone.hydra.proc.UProcess; import com.walnut.odin.task.RavenTaskInstance; import com.walnut.odin.task.troll.InstanceLaunchException; import com.walnut.odin.task.troll.LaunchFeature; public interface TaskExecutionProcessor extends Pinenut { String getName(); Server getDeployClusterServer(); String getClusterPath(); String getClusterName(); long getControlClientId(); TaskExecutionQueue getTaskExecutionQueue(); boolean isLocal(); int getPriority(); boolean isExclusive(); TaskLaunchContext getTaskLaunchContextByPID( GUID pid ); int getRunningSize(); int getWaitingSize(); UProcess directlyCreate( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException; UProcess directlyLaunch( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException; PipelineLaunchReport recycleTerminated(Collection terminatedIds ); PipelineLaunchReport launchsPending() throws TaskDispatchException; PipelineLaunchReport shiftLaunchsPipeline(Collection terminatedIds ) throws TaskDispatchException; PipelineLaunchReport prepare(Collection contexts ) throws TaskDispatchException; PipelineLaunchReport pipeCreate(Collection contexts ) throws TaskDispatchException; PipelineLaunchReport pipeLaunch(Collection contexts ) throws TaskDispatchException; } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/TaskExecutionQueue.java ================================================ package com.walnut.odin.dispatch; import java.util.Collection; import com.pinecone.framework.util.id.Identification; public interface TaskExecutionQueue extends TaskQueueMeta { void applyMaxCapacity( int maxCapacity ); void applyMinCapacity( int minCapacity ); void applyRuntimeInstanceCapacity( int capacity ); boolean isUsageCriticalCapacity(); void offer( Collection contexts ) throws TaskDispatchException; void offer( TaskLaunchContext context ) throws TaskDispatchException; Collection consume( int n, boolean bForce, TaskInstanceConsumer consumer ) throws TaskConsumeException; Collection consume( int n, TaskInstanceConsumer consumer ) throws TaskConsumeException; Collection consume( TaskInstanceConsumer consumer ) throws TaskConsumeException; Collection pipeConsume( Collection products, TaskInstanceConsumer consumer ) throws TaskDispatchException, TaskConsumeException; Collection runningInstances(); void markTerminated( Identification id ); Collection recycleTerminated( Collection terminatedIds ); Collection consumePending( TaskInstanceConsumer consumer ) throws TaskConsumeException; Collection shiftPipeline( Collection terminatedIds, TaskInstanceConsumer consumer ) throws TaskConsumeException; int pendingCapacity(); int waitingSize(); int runningSize(); boolean isFull(); boolean isIdle(); Collection drainAllWaiting(); TaskLaunchContext getRunningContextById( Identification id ); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/TaskInstanceConsumer.java ================================================ package com.walnut.odin.dispatch; import com.pinecone.framework.system.prototype.Pinenut; public interface TaskInstanceConsumer extends Pinenut { void tryConsume( TaskLaunchContext context ) throws TaskConsumeException; ConsumeCompromisedPolice compromisedPolice(); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/TaskLaunchContext.java ================================================ package com.walnut.odin.dispatch; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.Identification; import com.pinecone.hydra.proc.UProcess; import com.walnut.odin.task.RavenTaskInstance; import com.walnut.odin.task.troll.LaunchFeature; public interface TaskLaunchContext extends Pinenut { LaunchFeature getLaunchFeature(); RavenTaskInstance getTaskInstance(); default Identification getTaskId() { return this.getTaskInstance().getOwnedTask().getId(); } default Identification getTaskInstanceId() { return this.getTaskInstance().getId(); } String getAffinityProcessorName(); void setAffinityProcessorName( String affinityProcessorName ); UProcess getLaunchedProcess(); void afterProcessLaunched( UProcess launchedProcess ); static TaskLaunchContext of( RavenTaskInstance taskInstance, LaunchFeature launchFeature ) { return new TaskLaunchContext() { private String affinityProcessorName; private UProcess launchedProcess; @Override public LaunchFeature getLaunchFeature() { return launchFeature; } @Override public RavenTaskInstance getTaskInstance() { return taskInstance; } @Override public String getAffinityProcessorName() { return this.affinityProcessorName; } @Override public void setAffinityProcessorName( String affinityProcessorName ) { this.affinityProcessorName = affinityProcessorName; } @Override public UProcess getLaunchedProcess() { return this.launchedProcess; } @Override public void afterProcessLaunched( UProcess launchedProcess ) { this.launchedProcess = launchedProcess; } }; } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/TaskQueueMeta.java ================================================ package com.walnut.odin.dispatch; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.deploy.Server; public interface TaskQueueMeta extends Pinenut { String getName(); int getMaxCapacity(); int getMinCapacity(); int getUsedCapacity(); int getRuntimeInstanceCapacity(); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/entity/ArchTaskQueueMeta.java ================================================ package com.walnut.odin.dispatch.entity; import java.util.Map; import com.walnut.odin.dispatch.TaskQueueMeta; public abstract class ArchTaskQueueMeta implements TaskQueueMeta { protected String mszName; protected int mnMaxCapacity; protected int mnMinCapacity; protected int mnUsedCapacity; protected int mnRuntimeInstanceCapacity; protected ArchTaskQueueMeta() { } public ArchTaskQueueMeta( Map jo ) { if ( jo == null ) { return; } Object name = jo.get( "name" ); if ( name instanceof String ) { this.mszName = (String) name; } Object maxCapacity = jo.get( "maxCapacity" ); if ( maxCapacity instanceof Number ) { this.mnMaxCapacity = ( (Number) maxCapacity ).intValue(); } Object minCapacity = jo.get( "minCapacity" ); if ( minCapacity instanceof Number ) { this.mnMinCapacity = ( (Number) minCapacity ).intValue(); } Object runtimeCapacity = jo.get( "runtimeInstanceCapacity" ); if ( runtimeCapacity instanceof Number ) { this.mnRuntimeInstanceCapacity = ( (Number) runtimeCapacity ).intValue(); } } @Override public String getName() { return this.mszName; } @Override public int getMaxCapacity() { return this.mnMaxCapacity; } @Override public int getMinCapacity() { return this.mnMinCapacity; } @Override public int getUsedCapacity() { return this.mnUsedCapacity; } @Override public int getRuntimeInstanceCapacity() { return this.mnRuntimeInstanceCapacity; } protected void setName( String szName ) { this.mszName = szName; } protected void setMaxCapacity( int nMaxCapacity ) { this.mnMaxCapacity = nMaxCapacity; } protected void setMinCapacity( int nMinCapacity ) { this.mnMinCapacity = nMinCapacity; } protected void setUsedCapacity( int nUsedCapacity ) { this.mnUsedCapacity = nUsedCapacity; } protected void setRuntimeInstanceCapacity( int nRuntimeInstanceCapacity ) { this.mnRuntimeInstanceCapacity = nRuntimeInstanceCapacity; } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/entity/GenericTaskProcessorEntity.java ================================================ package com.walnut.odin.dispatch.entity; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.Server; import com.walnut.odin.dispatch.TaskQueueMeta; public class GenericTaskProcessorEntity implements TaskProcessorEntity { protected GUID mGuid; protected String mszName; protected Server mDeployClusterServer; protected String mszClusterPath; protected String mszClusterName; protected long mnControlClientId; protected boolean mbLocal; protected boolean mbExclusive; protected int mnPriority; protected TaskQueueMeta mTaskQueueMeta; protected boolean mbEnable; public GenericTaskProcessorEntity() { this.mTaskQueueMeta = new GenericTaskQueueEntity(); } @SuppressWarnings( "unchecked" ) public GenericTaskProcessorEntity( Map jo ) { this(); if ( jo == null ) { return; } Object name = jo.get( "name" ); if ( name instanceof String ) { this.mszName = (String) name; } Object clusterPath = jo.get( "clusterPath" ); if ( clusterPath instanceof String ) { this.mszClusterPath = (String) clusterPath; } Object clusterName = jo.get( "clusterName" ); if ( clusterName instanceof String ) { this.mszClusterName = (String) clusterName; } Object controlClientId = jo.get( "controlClientId" ); if ( controlClientId instanceof Number ) { this.mnControlClientId = ( (Number) controlClientId ).intValue(); } Object local = jo.get( "local" ); if ( local instanceof Boolean ) { this.mbLocal = (Boolean) local; } Object priority = jo.get( "priority" ); if ( priority instanceof Number ) { this.mnPriority = ( (Number) priority ).intValue(); } Object queueMeta = jo.get( "queueMeta" ); if ( queueMeta instanceof Map ) { this.mTaskQueueMeta = new GenericTaskQueueEntity( (Map)queueMeta ); } } @Override public GUID getGuid() { return this.mGuid; } @Override public String getName() { return this.mszName; } @Override public Server getDeployClusterServer() { return this.mDeployClusterServer; } @Override public String getClusterPath() { return this.mszClusterPath; } @Override public String getClusterName() { return this.mszClusterName; } @Override public long getControlClientId() { return this.mnControlClientId; } @Override public boolean isLocal() { return this.mbLocal; } @Override public boolean isExclusive() { return this.mbExclusive; } @Override public int getPriority() { return this.mnPriority; } @Override public TaskQueueMeta getTaskQueueMeta() { return this.mTaskQueueMeta; } @Override public boolean isEnable() { return this.mbEnable; } public void setEnable( boolean enable ) { this.mbEnable = enable; } public void setExclusive( boolean exclusive ) { this.mbExclusive = exclusive; } public void setGuid( GUID guid ) { this.mGuid = guid; } public void setName(String name ) { this.mszName = name; } public String getQueueName() { return this.mTaskQueueMeta != null ? this.asTaskQueueMeta().getName() : null; } public Integer getQueueMaxCapacity() { return this.mTaskQueueMeta != null ? this.asTaskQueueMeta().getMaxCapacity() : null; } public Integer getQueueMinCapacity() { return this.mTaskQueueMeta != null ? this.asTaskQueueMeta().getMinCapacity() : null; } public Integer getQueueRuntimeInstanceCapacity() { return this.mTaskQueueMeta != null ? this.asTaskQueueMeta().getRuntimeInstanceCapacity() : null; } public void setDeployClusterServer( Server server ) { this.mDeployClusterServer = server; } public void setClusterPath( String clusterPath ) { this.mszClusterPath = clusterPath; } public void setClusterName( String clusterName ) { this.mszClusterName = clusterName; } @Override public void setControlClientId( long controlClientId ) { this.mnControlClientId = controlClientId; } public void setLocal( boolean bLocal ) { this.mbLocal = bLocal; } public void setPriority( int priority ) { this.mnPriority = priority; } public void setTaskQueueMeta( TaskQueueMeta queueMeta ) { this.mTaskQueueMeta = queueMeta; } protected ArchTaskQueueMeta asTaskQueueMeta() { return (ArchTaskQueueMeta) this.mTaskQueueMeta; } public void setQueueName( String queueName ) { this.asTaskQueueMeta().setName( queueName ); } public void setQueueMaxCapacity( int nMaxCapacity ) { this.asTaskQueueMeta().setMaxCapacity( nMaxCapacity ); } public void setQueueMinCapacity( int nMinCapacity ) { this.asTaskQueueMeta().setMinCapacity( nMinCapacity ); } public void setQueueUsedCapacity( int nUsedCapacity ) { this.asTaskQueueMeta().setUsedCapacity( nUsedCapacity ); } public void setQueueRuntimeInstanceCapacity( int nRuntimeInstanceCapacity ) { this.asTaskQueueMeta().setRuntimeInstanceCapacity( nRuntimeInstanceCapacity ); } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/entity/GenericTaskQueueEntity.java ================================================ package com.walnut.odin.dispatch.entity; import java.util.Map; public class GenericTaskQueueEntity extends ArchTaskQueueMeta { public GenericTaskQueueEntity() { } public GenericTaskQueueEntity( Map jo ) { super( jo ); } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/entity/TaskProcessorEntity.java ================================================ package com.walnut.odin.dispatch.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.Server; import com.walnut.odin.dispatch.TaskQueueMeta; public interface TaskProcessorEntity extends Pinenut { GUID getGuid(); String getName(); Server getDeployClusterServer(); String getClusterPath(); String getClusterName(); long getControlClientId(); void setControlClientId( long controlClientId ); boolean isLocal(); boolean isExclusive(); int getPriority(); boolean isEnable(); TaskQueueMeta getTaskQueueMeta(); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/proc/ProcessLifecycleExaminer.java ================================================ package com.walnut.odin.proc; import com.pinecone.framework.system.regime.Examiner; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.proc.image.ImageModifier; /** * ProcessLifecycleExaminer * Process Lifecycle Manager and Runtime Examiner * Manages process lifecycles, action auditing, and runtime inspection. * 进程生命周期管理与运行检察器,管理进程生命周期与行为审计、检查 */ public interface ProcessLifecycleExaminer extends Examiner { void startProcess( UProcess process ); ImageModifier imageModifier(); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/proc/ProcessRemoteEventHandler.java ================================================ package com.walnut.odin.proc; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.proc.event.ProcessEvent; public interface ProcessRemoteEventHandler extends Pinenut { void fired( long pmClientId, ProcessEvent event, Object caused ); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/proc/RemoteProcess.java ================================================ package com.walnut.odin.proc; import com.pinecone.hydra.proc.RemoteUProcess; import com.pinecone.hydra.proc.event.ProcessEvent; import com.walnut.odin.proc.entity.UProcessRuntimeMeta; import java.time.LocalDateTime; public interface RemoteProcess extends RemoteUProcess { long getControlClientId(); LocalDateTime remoteGetEndTime(); LocalDateTime remoteGetLastUpdateTime(); UProcessRuntimeMeta retrieveRemoteRuntimeMeta() throws RemoteProcessLifecycleException; void addRemoteEventHandler( ProcessRemoteEventHandler handler ) ; void removeRemoteEventHandler( ProcessRemoteEventHandler handler ) ; int remoteEventHandlerSize( ) ; void notifyRemoteEvent( long pmClientId, ProcessEvent event, Object caused ); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/proc/RemoteProcessLifecycleException.java ================================================ package com.walnut.odin.proc; public class RemoteProcessLifecycleException extends RemoteProcessServiceException { public RemoteProcessLifecycleException() { super(); } public RemoteProcessLifecycleException( String message ) { super(message); } public RemoteProcessLifecycleException( String message, Throwable cause ) { super(message, cause); } public RemoteProcessLifecycleException( Throwable cause ) { super(cause); } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/proc/RemoteProcessManagerNode.java ================================================ package com.walnut.odin.proc; import java.net.URI; import java.util.Collection; import com.pinecone.framework.system.RuntimeSystem; import com.pinecone.framework.system.Unsafe; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.proc.ProcessManager; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.proc.event.ProcessEvent; import com.pinecone.hydra.proc.event.ProcessLifecycleHandler; import com.pinecone.hydra.proc.image.EntryPointRunnable; import com.pinecone.hydra.proc.image.ExecutionImage; import com.pinecone.hydra.proc.image.URLImageLoader; import com.pinecone.hydra.system.component.Slf4jTraceable; import com.walnut.odin.proc.entity.UProcessRuntimeMeta; public interface RemoteProcessManagerNode extends Slf4jTraceable { void startService () throws RemoteProcessServiceRPCException; void terminateService () throws IllegalStateException; GuidAllocator getGuidAllocator(); ProcessManager localProcessManager(); URLImageLoader imageLoader(); ExecutionImage queryExecutionImage( String path ); ExecutionImage queryExecutionImage( URI uri ); RuntimeSystem superiorSystem(); void registerLocalScopeExecutionImage ( String dirPath, ExecutionImage image ); void register( UProcess that ); void erase( UProcess that ); UProcess getProcess( GUID pid ); /** * Checks only whether the current node directly owns the specified process, without involving any child nodes or proxy mirrors. * 仅检查当前节点自身是否直接持有该进程,不涉及任何下级节点或代理镜像。 */ boolean hasOwnProcess( GUID pid ); /** * Determines whether the specified process exists in the current node or any of its child nodes. * 判断当前节点或其下级节点中是否存在指定进程。 */ boolean containProcess( GUID pid ); UProcessRuntimeMeta queryProcessRuntimeMeta( GUID pid ) throws RemoteProcessLifecycleException ; Collection searchProcessesByName( String procName ) ; Collection searchProcessesByNameNoCase( String procName ); RemoteProcessManagerNode addProcessLifecycleHandler( ProcessLifecycleHandler handler ); RemoteProcessManagerNode removeProcessLifecycleHandler( ProcessLifecycleHandler handler ); int getProcessLifecycleHandlersSize(); @Unsafe void notifyProcessLifecycleHandlers( String imageAddress, EntryPointRunnable runnable, ProcessEvent event ); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/proc/RemoteProcessServiceException.java ================================================ package com.walnut.odin.proc; import com.pinecone.framework.system.prototype.Pinenut; public class RemoteProcessServiceException extends Exception implements Pinenut { public RemoteProcessServiceException() { super(); } public RemoteProcessServiceException( String message ) { super(message); } public RemoteProcessServiceException( String message, Throwable cause ) { super(message, cause); } public RemoteProcessServiceException( Throwable cause ) { super(cause); } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/proc/RemoteProcessServiceRPCException.java ================================================ package com.walnut.odin.proc; public class RemoteProcessServiceRPCException extends RemoteProcessServiceException { public RemoteProcessServiceRPCException() { super(); } public RemoteProcessServiceRPCException( String message ) { super(message); } public RemoteProcessServiceRPCException( String message, Throwable cause ) { super(message, cause); } public RemoteProcessServiceRPCException( Throwable cause ) { super(cause); } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/proc/RemoteTerminationStatus.java ================================================ package com.walnut.odin.proc; public enum RemoteTerminationStatus { // === Normal === Expected (0x00), // Expected termination, 计划内正常结束 Error (0x01), // Unexpected termination, 程序内部异常、错误 HostKilled (0x02), // Affiliated otter-host-process termination, 由Daemon执行宿主进程死刑(宿主JVM进程终止) InitFailure (0x03), // Initialization Failure, 初始化构造失败(尚未真正开始执行) // === Signal === SignalInterrupted (0x20), // Voluntary Interrupted (SIGINT), 收到信号程序主动中断 (走线程信号中断) SignalApoptosis (0x21), // Voluntary Apoptosis (SIGAPOP), 收到信号程序主动凋亡 (走程序性死亡协议) SignalElimination (0x22), // Destination killed by kernel (SIGELIM), 中央内核终末强制终止 (内部进程强制杀死) // === Procedure === Restarted (0x50), // Expected restart, 计划内重启(如热更新、配置变更) Transferred (0x51), // Expected transfer, 计划内进程置换(如负载均衡、资源调度) Preempted (0x52), // Expected preempt, 计划内被调度器抢占式终止(后期自动恢复) // === Internal Failure === AuthFailure (0x60), // Authority failure, 鉴权失败,安全策略拒绝 HeathyCheckFailed (0x61), // Heathy check failed, 健康检查失败被终止 ResourceExhausted (0x62), // Resource exhausted, 资源耗尽 MigrationAborted (0x63), // Migration aborted, 进程迁移过程中失败或终止 // === Network Error === NetUnreachableTimeout (0x80), // Net RPC unreachable timeout, 失联超时状态 // === Checkpoint / Tombstone === Suspended (0xC001), // Suspended, 可恢复的挂起状态 SuspendedAborted (0xC002), // Suspended Aborted, 挂起状态被取消 FrozenSeal (0xC003), // Frozen and seal, 封印状态(进程终止并持久化挂起到磁盘) CheckpointCorrupted (0xC004); // Checkpoint corrupted, 检查点不可用,恢复失败 ; private final int code; RemoteTerminationStatus( int code ) { this.code = code; } public int getCode() { return this.code; } public static RemoteTerminationStatus getByCode( int code ) { for ( RemoteTerminationStatus type : RemoteTerminationStatus.values() ) { if ( type.code == code ) { return type; } } return null; } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/proc/RemoteVitalizationStatus.java ================================================ package com.walnut.odin.proc; public enum RemoteVitalizationStatus { New ( 0x00 ), Vitalized ( 0x01 ), NoImage ( 0x02 ), Error ( 0x03 ), AuthorityDenial ( 0x04 ), ; private final int code; RemoteVitalizationStatus( int code ) { this.code = code; } public int getCode() { return this.code; } public static RemoteVitalizationStatus getByCode( int code ) { for ( RemoteVitalizationStatus type : RemoteVitalizationStatus.values() ) { if ( type.code == code ) { return type; } } return null; } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/proc/client/RemoteProcessManagerClient.java ================================================ package com.walnut.odin.proc.client; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.proc.image.ExecutionImage; import com.pinecone.hydra.uma.DuplexAppointClient; import com.walnut.odin.proc.RemoteProcessLifecycleException; import com.walnut.odin.proc.RemoteProcessManagerNode; import com.walnut.odin.proc.entity.RemoteVitalizationResponse; import com.walnut.odin.proc.entity.UProcessMirrorDTO; import java.util.Map; public interface RemoteProcessManagerClient extends RemoteProcessManagerNode { /** * createLocalUProcess * Proactively creating local-UProcess. */ UProcess createLocalUProcess( ExecutionImage image, UProcess parent, Map startupArgs, Map contextEnvironmentVars ); void startLocalUProcess( GUID pid ); long getClientId(); RemoteVitalizationResponse createLocalUProcess( UProcessMirrorDTO handlerDTO, UProcess[] lpProcess ) throws RemoteProcessLifecycleException; RemoteVitalizationResponse vitalizeLocalUProcess( UProcessMirrorDTO handlerDTO ) throws RemoteProcessLifecycleException; DuplexAppointClient duplexAppointClient(); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/proc/entity/RemoteTerminationReport.java ================================================ package com.walnut.odin.proc.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.walnut.odin.proc.RemoteTerminationStatus; public class RemoteTerminationReport implements Pinenut { protected long mnLocalPID; protected GUID mPID; protected String mszPID; protected int mnTerminationStatus; protected int mnExitCode; protected String mszErrorMsg; public RemoteTerminationReport() { this.mnTerminationStatus = RemoteTerminationStatus.Expected.getCode(); } public long getLocalPID() { return this.mnLocalPID; } public void setLocalPID( long nLocalPID ) { this.mnLocalPID = nLocalPID; } public String getPID() { return this.mszPID; } public void setPID( String szPID ) { this.mszPID = szPID; } public void setTerminationStatus( int nStatus ) { this.mnTerminationStatus = nStatus; } public int getTerminationStatus() { return this.mnTerminationStatus; } public String getErrorMsg() { return this.mszErrorMsg; } public void setErrorMsg( String szErrorMsg ) { this.mszErrorMsg = szErrorMsg; } public void setProcessID( GUID pid ) { this.setPID( pid.toString() ); this.mPID = pid; } public GUID optProcessID() { return this.mPID; } public int getExitCode() { return this.mnExitCode; } public void setExitCode( int nExitCode ) { this.mnExitCode = nExitCode; } public void setRemoteTerminationStatus( RemoteTerminationStatus status ) { this.setTerminationStatus( status.getCode() ); } public RemoteTerminationStatus optStatus() { return RemoteTerminationStatus.getByCode( this.getTerminationStatus() ); } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/proc/entity/RemoteVitalizationResponse.java ================================================ package com.walnut.odin.proc.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.walnut.odin.proc.RemoteVitalizationStatus; public class RemoteVitalizationResponse implements Pinenut { protected long mnLocalPID; protected String mszName; protected GUID mPID; protected String mszPID; protected int mnStatus; protected String mszErrorMsg; private String mStartupArguments; private String mEnvironmentVariables; private String mszImageAddress; private boolean mbImageAddressURI; public RemoteVitalizationResponse() { this.mnStatus = RemoteVitalizationStatus.Vitalized.getCode(); } public String getImageAddress() { return this.mszImageAddress; } public void setImageAddress( String szImageAddress ) { this.mszImageAddress = szImageAddress; } public void setImageAddressURI( boolean bImageAddressURI ) { this.mbImageAddressURI = bImageAddressURI; } public boolean isImageAddressURI() { return this.mbImageAddressURI; } public String getName() { return this.mszName; } public void setName( String szName ) { this.mszName = szName; } public long getLocalPID() { return this.mnLocalPID; } public void setLocalPID( long nLocalPID ) { this.mnLocalPID = nLocalPID; } public String getPID() { return this.mszPID; } public void setPID( String szPID ) { this.mszPID = szPID; } public void setStatus( int nStatus ) { this.mnStatus = nStatus; } public int getStatus() { return this.mnStatus; } public String getErrorMsg() { return this.mszErrorMsg; } public void setErrorMsg( String szErrorMsg ) { this.mszErrorMsg = szErrorMsg; } public void setProcessID( GUID pid ) { this.setPID( pid.toString() ); this.mPID = pid; } public GUID optProcessID() { return this.mPID; } public void setRemoteVitalizationStatus( RemoteVitalizationStatus status ) { this.setStatus( status.getCode() ); } public RemoteVitalizationStatus optStatus() { return RemoteVitalizationStatus.getByCode( this.getStatus() ); } public String getStartupArguments() { return mStartupArguments; } public void setStartupArguments( String startupArguments ) { this.mStartupArguments = startupArguments; } public String getEnvironmentVariables() { return mEnvironmentVariables; } public void setEnvironmentVariables( String environmentVariables ) { this.mEnvironmentVariables = environmentVariables; } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/proc/entity/UProcessMirrorDTO.java ================================================ package com.walnut.odin.proc.entity; import com.pinecone.framework.system.prototype.Pinenut; public class UProcessMirrorDTO implements Pinenut { private String mszName; private long mnLocalPID; private String mszParentPID; private String mszProcessId; private String mStartupArguments; private String mEnvironmentVariables; private String mszImageAddress; private boolean mbImageAddressURI; public UProcessMirrorDTO( String name, long localPID, String processId, String startupArguments, String environmentVariables ) { this.mszName = name; this.mnLocalPID = localPID; this.mszProcessId = processId; this.mStartupArguments = startupArguments; this.mEnvironmentVariables = environmentVariables; } public UProcessMirrorDTO( String name, long localPID, String processId ) { this( name, localPID, processId, null, null ); } public UProcessMirrorDTO(){} public String getImageAddress() { return this.mszImageAddress; } public void setImageAddress( String szImageAddress ) { this.mszImageAddress = szImageAddress; } public void setImageAddressURI( boolean bImageAddressURI ) { this.mbImageAddressURI = bImageAddressURI; } public boolean isImageAddressURI() { return this.mbImageAddressURI; } public String getName() { return mszName; } public void setName( String name ) { this.mszName = name; } public String getParentPID() { return this.mszParentPID; } public void setParentPID( String szParentPID ) { this.mszParentPID = szParentPID; } public long getLocalPID() { return mnLocalPID; } public void setLocalPID( long pid ) { this.mnLocalPID = pid; } public String getPID() { return mszProcessId; } public void setPID( String pid ) { this.mszProcessId = pid; } public String getStartupArguments() { return mStartupArguments; } public void setStartupArguments( String startupArguments ) { this.mStartupArguments = startupArguments; } public String getEnvironmentVariables() { return mEnvironmentVariables; } public void setEnvironmentVariables( String environmentVariables ) { this.mEnvironmentVariables = environmentVariables; } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/proc/entity/UProcessRuntimeMeta.java ================================================ package com.walnut.odin.proc.entity; import com.pinecone.framework.system.prototype.Pinenut; public class UProcessRuntimeMeta implements Pinenut { private String mszName; private long mnLocalPID; private String mszParentPID; private String mszProcessId; private String mszCreateTime; private String mszStartTime; private String mszEndTime; private String mszLastUpdateTime; private String mszMainThreadStatus; private boolean mbTerminated; public UProcessRuntimeMeta() { } public String getCreateTime() { return this.mszCreateTime; } public void setCreateTime( String createTime ) { this.mszCreateTime = createTime; } public String getStartTime() { return this.mszStartTime; } public void setStartTime( String startTime ) { this.mszStartTime = startTime; } public String getEndTime() { return this.mszEndTime; } public void setEndTime( String endTime ) { this.mszEndTime = endTime; } public String getLastUpdateTime() { return this.mszLastUpdateTime; } public void setLastUpdateTime( String lastUpdateTime ) { this.mszLastUpdateTime = lastUpdateTime; } public String getMainThreadStatus() { return this.mszMainThreadStatus; } public void setMainThreadStatus( String mainThreadStatus ) { this.mszMainThreadStatus = mainThreadStatus; } public boolean isTerminated() { return this.mbTerminated; } public void setTerminated( boolean terminated ) { this.mbTerminated = terminated; } public String getName() { return this.mszName; } public void setName( String name ) { this.mszName = name; } public String getParentPID() { return this.mszParentPID; } public void setParentPID( String szParentPID ) { this.mszParentPID = szParentPID; } public long getLocalPID() { return mnLocalPID; } public void setLocalPID( long pid ) { this.mnLocalPID = pid; } public String getPID() { return mszProcessId; } public void setPID( String pid ) { this.mszProcessId = pid; } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/proc/server/RemoteProcessManagerServer.java ================================================ package com.walnut.odin.proc.server; import java.net.URI; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.uma.DuplexAppointServer; import com.walnut.odin.proc.RemoteProcess; import com.walnut.odin.proc.RemoteProcessLifecycleException; import com.walnut.odin.proc.RemoteProcessManagerNode; import com.walnut.odin.proc.RemoteProcessServiceRPCException; import com.walnut.odin.proc.entity.RemoteVitalizationResponse; import com.walnut.odin.proc.entity.UProcessMirrorDTO; public interface RemoteProcessManagerServer extends RemoteProcessManagerNode { DuplexAppointServer duplexAppointServer(); void registerProcess( long clientId, UProcessMirrorDTO processDTO ); void startRemoteUProcess( GUID pid ) throws RemoteProcessServiceRPCException; RemoteVitalizationResponse vitalizeRemoteUProcess( long clientId, String imageAddress, boolean isURI, GUID parentPID, Map startupArgs, Map contextEnvironmentVars ) throws RemoteProcessLifecycleException; RemoteVitalizationResponse vitalizeRemoteUProcess( long clientId, String imagePath, GUID parentPID, Map startupArgs, Map contextEnvironmentVars ) throws RemoteProcessLifecycleException; RemoteVitalizationResponse vitalizeRemoteUProcess( long clientId, URI imageURI, GUID parentPID, Map startupArgs, Map contextEnvironmentVars ) throws RemoteProcessLifecycleException; RemoteCreationResult createRemoteUProcess( long clientId, String imageAddress, boolean isURI, GUID parentPID, Map startupArgs, Map contextEnvironmentVars ) throws RemoteProcessLifecycleException; RemoteCreationResult createRemoteUProcess( long clientId, String imagePath, GUID parentPID, Map startupArgs, Map contextEnvironmentVars ) throws RemoteProcessLifecycleException; RemoteCreationResult createRemoteUProcess( long clientId, URI imageURI, GUID parentPID, Map startupArgs, Map contextEnvironmentVars ) throws RemoteProcessLifecycleException; @Override void register( UProcess that ); @Override void erase( UProcess that ); Long queryClientIdByPID( GUID pid ); RemoteProcess createMediatedRemoteProcess( long clientId, RemoteVitalizationResponse response ); RemoteProcess createMediatedRemoteProcess( long clientId, UProcessMirrorDTO processDTO ); class RemoteCreationResult { RemoteVitalizationResponse response; RemoteProcess process; public RemoteProcess getProcess() { return this.process; } public RemoteVitalizationResponse getResponse() { return this.response; } } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/system/RavenException.java ================================================ package com.walnut.odin.system; import com.pinecone.framework.system.prototype.Pinenut; public class RavenException extends Exception implements Pinenut { public RavenException() { super(); } public RavenException(String message ) { super(message); } public RavenException(String message, Throwable cause ) { super(message, cause); } public RavenException(Throwable cause ) { super(cause); } protected RavenException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) { super( message, cause, enableSuppression, writableStackTrace ); } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/system/RavenRuntimeException.java ================================================ package com.walnut.odin.system; import com.pinecone.framework.system.PineRuntimeException; public class RavenRuntimeException extends PineRuntimeException { public RavenRuntimeException() { super(); } public RavenRuntimeException( String message ) { super(message); } public RavenRuntimeException( String message, Throwable cause ) { super(message, cause); } public RavenRuntimeException( Throwable cause ) { super(cause); } protected RavenRuntimeException( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) { super( message, cause, enableSuppression, writableStackTrace ); } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/system/TaskCentralControl.java ================================================ package com.walnut.odin.system; import com.pinecone.framework.system.SynergicSystem; import com.pinecone.hydra.system.centrum.CentralControlSubsystem; public interface TaskCentralControl extends SynergicSystem, CentralControlSubsystem { } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/CentralizedTaskInstrument.java ================================================ package com.walnut.odin.task; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.Identification; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.UniformTaskInstrument; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.walnut.odin.task.service.CategoryService; import com.walnut.odin.task.source.RavenTaskMasterManipulator; import com.walnut.odin.task.system.TaskPathInvalidException; public interface CentralizedTaskInstrument extends TaskInstrument { RavenTaskConfig RAVEN_TASK_CONFIG = new GenericRavenTaskConfig(); UniformTaskInstrument getUniformTaskInstrument(); RavenTaskMasterManipulator getRavenTaskMasterManipulator(); CategoryService getCategoryService(); GUID assertGUIDByPath ( String taskTreePath ) throws TaskPathInvalidException; GUID assertTaskGUIDByPath ( String taskTreePath ) throws TaskPathInvalidException, IllegalArgumentException; RavenTask constructTask( TaskElement taskElement ); RavenTask constructTask( TaskElement taskElement, @Nullable Identification serviceId ); RavenTask createTask( TaskElement taskElement, @Nullable Identification serviceId ); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/GenericRavenTaskConfig.java ================================================ package com.walnut.odin.task; import java.util.Map; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.system.ko.ArchKernelObjectConfig; public class GenericRavenTaskConfig extends ArchKernelObjectConfig implements RavenTaskConfig { protected String mszInstanceTitleTimeFormat = RavenTaskConstants.InstanceTitleTimeFormat; protected String mszDefaultDateTimeFormat = RavenTaskConstants.DefaultDateTimeFormat; protected int mnScheduleScanThreadCount = RavenTaskConstants.ScheduleScanThreadCount; protected long mnScheduleScanIdWindow = RavenTaskConstants.ScheduleScanIdWindow; protected String mszSchedulePartitionName = "__DEFAULT__"; protected JSONObject mScheduleGlobalAllocatorConfig; public GenericRavenTaskConfig() { super(); } public GenericRavenTaskConfig( JSONObject main ) { super( main.optJSONObject( "kernelConfig" ) ); JSONObject config = main.optJSONObject( "kernelConfig" ); this.mszInstanceTitleTimeFormat = (String) config.getOrDefault("instanceTitleTimeFormat", RavenTaskConstants.InstanceTitleTimeFormat); this.mszDefaultDateTimeFormat = (String) config.getOrDefault("defaultDateTimeFormat", RavenTaskConstants.DefaultDateTimeFormat); this.mnScheduleScanThreadCount = ( (Number) config.getOrDefault("scheduleScanThreadCount", RavenTaskConstants.ScheduleScanThreadCount) ).intValue(); this.mnScheduleScanIdWindow = ( (Number) config.getOrDefault("scheduleScanIdWindow", RavenTaskConstants.ScheduleScanIdWindow) ).longValue(); this.mszSchedulePartitionName = main.optJSONObject( "scheduler" ).optString( "partitionName", "__DEFAULT__" ); this.mScheduleGlobalAllocatorConfig = main.optJSONObject( "scheduler" ).optJSONObject( "globalAllocator" ); } @Override public String getInstanceTitleTimeFormat() { return this.mszInstanceTitleTimeFormat; } @Override public String getDefaultDateTimeFormat() { return this.mszDefaultDateTimeFormat; } @Override public int getScheduleScanThreadCount() { return this.mnScheduleScanThreadCount; } @Override public long getScheduleScanIdWindow() { return this.mnScheduleScanIdWindow; } @Override public JSONObject getScheduleGlobalAllocatorConfig() { return this.mScheduleGlobalAllocatorConfig; } @Override public String getSchedulePartitionName() { return this.mszSchedulePartitionName; } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/RavenTask.java ================================================ package com.walnut.odin.task; import com.pinecone.hydra.system.ups.UniformPyramidTask; import com.pinecone.hydra.task.Task; import com.pinecone.hydra.task.kom.entity.TaskElement; public interface RavenTask extends Task, UniformPyramidTask { RavenTaskInstance createInstance(); TaskElement getTaskElement(); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/RavenTaskConfig.java ================================================ package com.walnut.odin.task; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.system.ko.KernelObjectConfig; public interface RavenTaskConfig extends KernelObjectConfig { String getInstanceTitleTimeFormat(); String getDefaultDateTimeFormat(); int getScheduleScanThreadCount(); long getScheduleScanIdWindow(); JSONObject getScheduleGlobalAllocatorConfig(); String getSchedulePartitionName(); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/RavenTaskConstants.java ================================================ package com.walnut.odin.task; import com.pinecone.framework.util.datetime.DatePattern; public final class RavenTaskConstants { public static final String InstanceTitleTimeFormat = "yyyy_MM_dd_HH_mm_ss"; public static final String DefaultDateTimeFormat = DatePattern.NORM_DATETIME_PATTERN; public static final int ScheduleScanThreadCount = 8; public static final long ScheduleScanIdWindow = 1000L; } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/RavenTaskInstance.java ================================================ package com.walnut.odin.task; import java.net.URI; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.system.ko.MetaPersistenceException; import com.pinecone.hydra.task.TaskInstance; import com.pinecone.hydra.task.kom.instance.InstanceInstrument; public interface RavenTaskInstance extends TaskInstance { URI getProcessImageURI(); UProcess affinityProcess(); void startLocalProcess(); void startRemoteProcess(); void startRemoteProcess( boolean bDirectlyVitalize ); void startRemoteProcess( boolean bDirectlyVitalize, long processClientId ); void update() throws MetaPersistenceException; void persist() throws MetaPersistenceException; InstanceInstrument instanceInstrument(); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/dto/CategoryTag.java ================================================ package com.walnut.odin.task.dto; import com.pinecone.framework.util.id.GUID; import com.pinecone.slime.entity.EnumIndexableEntity; public interface CategoryTag extends EnumIndexableEntity { void setEnumId( long id ); void setTaskGuid( GUID taskGuid ); GUID getTaskGuid(); void setCategoryName( String categoryName ); String getCategoryName(); void setCategoryType( String categoryType ); String getCategoryType(); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/entity/pyramid/Category.java ================================================ package com.walnut.odin.task.entity.pyramid; import com.pinecone.slime.entity.EnumIndexableEntity; public interface Category extends EnumIndexableEntity { void setEnumId( long id ); void setName( String name ) ; String getName() ; void setAlias( String alias ) ; String getAlias() ; void setDescription( String description ) ; String getDescription() ; } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/entity/pyramid/CategoryType.java ================================================ package com.walnut.odin.task.entity.pyramid; public interface CategoryType extends Category { } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/entity/pyramid/TaskCategory.java ================================================ package com.walnut.odin.task.entity.pyramid; public interface TaskCategory extends Category { } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/mapper/InstanceAtlasAdjacentMapper.java ================================================ package com.walnut.odin.task.mapper; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import com.walnut.odin.conduct.entity.InstanceAtlasAdjacent; @IbatisDataAccessObject public interface InstanceAtlasAdjacentMapper { void insert( InstanceAtlasAdjacent instanceAtlasAdjacent ); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/mapper/InstanceAtlasNodeMapper.java ================================================ package com.walnut.odin.task.mapper; import com.pinecone.hydra.task.kom.instance.source.InstanceNodeManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import com.walnut.odin.conduct.entity.InstanceAtlasNode; @IbatisDataAccessObject public interface InstanceAtlasNodeMapper extends InstanceNodeManipulator { void insert( InstanceAtlasNode instanceAtlasNode); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/mapper/InstanceEventMapper.java ================================================ package com.walnut.odin.task.mapper; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import com.walnut.odin.conduct.entity.InstanceEvent; @IbatisDataAccessObject public interface InstanceEventMapper { void insert( InstanceEvent instanceEvent ); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/mapper/InstanceExecMapper.java ================================================ package com.walnut.odin.task.mapper; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import com.walnut.odin.conduct.entity.InstanceExec; @IbatisDataAccessObject public interface InstanceExecMapper { void insert( InstanceExec instanceExec ); void updateStateByInstanceGuid( InstanceExec execUpdate ); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/mapper/OdinTaskMappingDriver.java ================================================ package com.walnut.odin.task.mapper; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; public interface OdinTaskMappingDriver extends KOIMappingDriver { KOIMappingDriver getParentDriver(); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/service/CategoryService.java ================================================ package com.walnut.odin.task.service; import java.util.List; import com.pinecone.framework.system.NonNull; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.walnut.odin.task.dto.CategoryTag; import com.walnut.odin.task.system.TaskPathInvalidException; public interface CategoryService extends Pinenut { void addCategoryTag ( CategoryTag categoryTag ); void addCategoryTag ( String taskTreePath, CategoryTag categoryTag ) throws TaskPathInvalidException, IllegalArgumentException; CategoryTag setCategoryTag ( String taskTreePath, CategoryTag categoryTag ) throws TaskPathInvalidException, IllegalArgumentException; void updateCategoryTag ( CategoryTag categoryTag ); CategoryTag queryOwnedTag( GUID taskGuid, String type, String name ); List queryCategoryTag ( GUID taskGuid ); List queryCategoryTag ( String taskTreePath ); long countCategoryTag( String type, String name ); List queryCategoryTag ( String type, String name, long offset, long pageSize ); long countCategoryTagsByName( String name ); List fetchCategoryTagByName ( String name, long offset, long pageSize ); void purgeCategoryTag( @Nullable GUID taskGuid, @Nullable String type, @Nullable String name ); void purgeCategoryTag( @NonNull String name ); void purgeCategoryTag( @NonNull GUID taskGuid ); void removeCategoryTag( @NonNull GUID taskGuid, @NonNull String type, @NonNull String name ); void eraseCategoryTag( @NonNull String taskTreePath, @Nullable String type, @Nullable String name ) throws TaskPathInvalidException, IllegalArgumentException; } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/source/CategoryMappingManipulator.java ================================================ package com.walnut.odin.task.source; import java.util.List; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.walnut.odin.task.dto.CategoryTag; public interface CategoryMappingManipulator extends Pinenut { void insert( CategoryTag categoryTag ); List queryByTaskGuid ( GUID taskGuid ); CategoryTag queryOwnedTag( GUID taskGuid, String type, String name ); long countTag( String type, String name ); List queryTag ( String type, String name, long offset, long pageSize ); long countTagsByName( String name ); List fetchByName ( String name, long offset, long pageSize ); void update( CategoryTag categoryTag ); void purge( GUID taskGuid, String type, String name ); default void remove( GUID taskGuid, String type, String name ) { this.purge( taskGuid, type, name ); } default void purgeByName( String name ) { this.purge( null, null, name ); } default void purgeByTaskGuid( GUID taskGuid ) { this.purge( taskGuid, null, null ); } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/source/CategoryTypeManipulator.java ================================================ package com.walnut.odin.task.source; import java.util.List; import com.pinecone.framework.system.prototype.Pinenut; import com.walnut.odin.task.entity.pyramid.CategoryType; public interface CategoryTypeManipulator extends Pinenut { void insert( CategoryType categoryType ); CategoryType queryType( String name ); long countTypes(); List fetchType( long offset, long pageSize ); default List fetchType() { return this.fetchType( 0, this.countTypes() ); } void remove( String name ); void update( CategoryType categoryType ); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/source/RavenTaskMasterManipulator.java ================================================ package com.walnut.odin.task.source; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.hydra.task.kom.source.TaskMasterManipulator; public interface RavenTaskMasterManipulator extends KOIMasterManipulator { TaskMasterManipulator getTaskMasterManipulator(); KOIMappingDriver getTaskMappingDriver(); CategoryTypeManipulator getCategoryTypeManipulator(); TaskCategoryManipulator getTaskCategoryManipulator(); CategoryMappingManipulator getCategoryMappingManipulator(); TaskProcessorManipulator getTaskProcessorManipulator(); ScheduleManipulator getScheduleManipulator(); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/source/ScheduleManipulator.java ================================================ package com.walnut.odin.task.source; import com.walnut.odin.task.mapper.InstanceAtlasAdjacentMapper; import com.walnut.odin.task.mapper.InstanceAtlasNodeMapper; import com.walnut.odin.task.mapper.InstanceEventMapper; import com.walnut.odin.task.mapper.InstanceExecMapper; public interface ScheduleManipulator { InstanceEventMapper getInstanceEventMapper(); InstanceAtlasAdjacentMapper getInstanceAtlasAdjacentMapper(); InstanceAtlasNodeMapper getInstanceAtlasNodeMapper(); InstanceExecMapper getInstanceExecMapper(); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/source/TaskCategoryManipulator.java ================================================ package com.walnut.odin.task.source; import java.util.List; import com.pinecone.framework.system.prototype.Pinenut; import com.walnut.odin.task.entity.pyramid.TaskCategory; public interface TaskCategoryManipulator extends Pinenut { void insert( TaskCategory taskCategory ); TaskCategory queryTaskCategory( String name ); long countCategories(); List fetchCategory( long offset, long pageSize ); default List fetchCategory() { return this.fetchCategory( 0, this.countCategories() ); } void remove( String name ); void update( TaskCategory kernelCategory ); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/source/TaskProcessorManipulator.java ================================================ package com.walnut.odin.task.source; import java.util.List; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.walnut.odin.dispatch.entity.TaskProcessorEntity; public interface TaskProcessorManipulator extends Pinenut { TaskProcessorEntity selectByProcessorName( String szProcessorName ); TaskProcessorEntity selectByGuid( GUID guid ); List selectByClusterName( String clusterName ); List selectAll(); int insert( TaskProcessorEntity entity ); int updateByGuid( TaskProcessorEntity entity ); int updateQueueCapacity( GUID guid, int maxCapacity, int minCapacity, int runtimeCapacity ); int deleteByGuid( GUID guid ); int disable( GUID guid ); int enable( GUID guid ); } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/system/TaskPathInvalidException.java ================================================ package com.walnut.odin.task.system; import com.walnut.odin.system.RavenRuntimeException; public class TaskPathInvalidException extends RavenRuntimeException { public TaskPathInvalidException() { super(); } public TaskPathInvalidException( String path ) { super( "Path `" + path + "` is invalided." ); } public TaskPathInvalidException( String message, Throwable cause ) { super(message, cause); } public TaskPathInvalidException( Throwable cause ) { super(cause); } protected TaskPathInvalidException( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) { super( message, cause, enableSuppression, writableStackTrace ); } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/troll/InstanceLaunchException.java ================================================ package com.walnut.odin.task.troll; public class InstanceLaunchException extends LaunchException { public InstanceLaunchException() { super(); } public InstanceLaunchException( String message ) { super(message); } public InstanceLaunchException( String message, Throwable cause ) { super(message, cause); } public InstanceLaunchException( Throwable cause ) { super(cause); } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/troll/LaunchException.java ================================================ package com.walnut.odin.task.troll; import com.pinecone.framework.system.prototype.Pinenut; public class LaunchException extends Exception implements Pinenut { public LaunchException() { super(); } public LaunchException(String message ) { super(message); } public LaunchException(String message, Throwable cause ) { super(message, cause); } public LaunchException(Throwable cause ) { super(cause); } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/troll/LaunchFeature.java ================================================ package com.walnut.odin.task.troll; import java.net.URI; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.List; import java.util.Map; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.proc.event.ProcessEventHandler; public class LaunchFeature implements Pinenut { private boolean retry; private URI designatedImageURI; private UProcess parentProcess; private String processorDesignated; private GUID parentPid; private Map startupArgs; private Map contextEnvironmentVars; private LocalDateTime bizTimeEpoch; private List sysProcEventHandlers; public LaunchFeature() { this.bizTimeEpoch = LocalDateTime.now().minusDays( 1 ); // dtm } public boolean isRetry() { return this.retry; } public URI getDesignatedImageURI() { return this.designatedImageURI; } public UProcess getParentProcess() { return this.parentProcess; } public Map getStartupArgs() { return this.startupArgs; } public Map getContextEnvironmentVars() { return this.contextEnvironmentVars; } public GUID getParentPid() { return this.parentPid; } public List getSysProcEventHandlers() { return this.sysProcEventHandlers; } public String getProcessorDesignated() { return this.processorDesignated; } public LaunchFeature withProcessorDesignated( String processorName ) { this.processorDesignated = processorName; return this; } public LaunchFeature withParentPid( GUID pid ) { this.parentPid = pid; return this; } public LocalDateTime getBizTimeEpoch() { return this.bizTimeEpoch; } public void setBizTimeEpoch( LocalDateTime bizTimeEpoch ) { this.bizTimeEpoch = bizTimeEpoch; } public LaunchFeature withRetry(boolean retry ) { this.retry = retry; return this; } public LaunchFeature withDesignatedImageURI( URI designatedImageURI ) { this.designatedImageURI = designatedImageURI; return this; } public LaunchFeature withParentProcess( UProcess parent ) { this.parentProcess = parent; this.parentPid = parent.getPID(); return this; } public LaunchFeature withStartupArgs( Map startupArgs ) { this.startupArgs = startupArgs; return this; } public LaunchFeature withContextEnvironmentVars( Map contextEnvironmentVars ) { this.contextEnvironmentVars = contextEnvironmentVars; return this; } public LaunchFeature withSysProcEventHandlers( List sysProcEventHandlers ) { this.sysProcEventHandlers = sysProcEventHandlers; return this; } public LaunchFeature withSysProcEventHandlers( ProcessEventHandler handler ) { if ( this.sysProcEventHandlers == null ) { this.sysProcEventHandlers = new ArrayList<>(); } this.sysProcEventHandlers.add( handler ); return this; } } ================================================ FILE: Odin/odin-architecture/src/main/java/com/walnut/odin/task/troll/TaskExecutionLauncher.java ================================================ package com.walnut.odin.task.troll; import java.time.LocalDateTime; import com.pinecone.framework.system.regime.arch.Manager; import com.pinecone.hydra.proc.ProcessManager; import com.pinecone.hydra.proc.UProcess; import com.walnut.odin.task.RavenTaskInstance; public interface TaskExecutionLauncher extends Manager { ProcessManager processManager(); LocalDateTime evalBusinessTime( RavenTaskInstance instance, LocalDateTime biz ) ; LocalDateTime evalBusinessTime( RavenTaskInstance instance ) ; String evalBusinessTimeLabel( RavenTaskInstance instance, LocalDateTime biz ) ; String evalBusinessTimeLabel( RavenTaskInstance instance ) ; String evalInstanceName( RavenTaskInstance instance, LocalDateTime now, LocalDateTime bizTimeEpoch ) ; String evalInstanceName( RavenTaskInstance instance, LocalDateTime bizTimeEpoch ) ; void initializeInstance( RavenTaskInstance instance, LaunchFeature feature ); UProcess createLocally( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException; UProcess createRemotely( RavenTaskInstance instance, long pmClientId, LaunchFeature feature ) throws InstanceLaunchException; UProcess launchLocally( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException; UProcess launchRemotely( RavenTaskInstance instance, long pmClientId, LaunchFeature feature ) throws InstanceLaunchException; } ================================================ FILE: Odin/odin-framework-atlas/pom.xml ================================================ odin com.walnut.odin 2.5.1 odin-framework-atlas 2.5.1 4.0.0 com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.pinecone.slime.jelly jelly 2.1.0 compile com.pinecone.hydra.kom.driver.default hydra-kom-default-driver 2.1.0 compile com.pinecone.tritium hydra-system-tritium 2.1.0 compile com.walnut.odin odin-architecture 2.5.1 compile ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/advance/GenericGraphStratumTape.java ================================================ package com.walnut.odin.atlas.advance; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.unit.iqueue.ConfigurableMegaDeflectPriorityQueueMeta; import com.pinecone.hydra.unit.iqueue.MagnitudeDPQueue; import com.pinecone.hydra.unit.iqueue.DeflectPriorityQueue; import com.pinecone.hydra.unit.iqueue.MegaDeflectPriorityQueueMeta; import com.pinecone.hydra.unit.iqueue.entity.QueueElement; import com.pinecone.hydra.unit.vgraph.VectorDAG; import com.pinecone.hydra.unit.vgraph.entity.GraphNode; import com.walnut.odin.atlas.graph.RuntimeAtlasInstrument; public class GenericGraphStratumTape implements GraphStratumTape { protected RuntimeAtlasInstrument mRuntimeAtlasInstrument; // StratumId => RuntimePriority => MegaDeflectPriorityQueue protected List> mMegaDeflectPriorityQueues; protected DeflectPriorityQueue mExecutionPriorityQueue; protected VectorDAG mVectorDAG; public GenericGraphStratumTape( RuntimeAtlasInstrument runtimeAtlasInstrument, VectorDAG vectorDAG, KOIMappingDriver queueDrive ) { this.mRuntimeAtlasInstrument = runtimeAtlasInstrument; this.mVectorDAG = vectorDAG; ArrayList> list = new ArrayList<>(); int stratumNum = this.mRuntimeAtlasInstrument.countStratum(vectorDAG.getAffiliateLayerGuid()); for( int i = 0; i < stratumNum; i++ ) { HashMap map = new HashMap<>(); int priorityNum = this.mRuntimeAtlasInstrument.countPriority(vectorDAG.getAffiliateLayerGuid(), (short) i); for( int j = 0; j < priorityNum; j++ ) { String segmentName = this.mRuntimeAtlasInstrument.querySegmentName(vectorDAG.getAffiliateLayerGuid(), (short) i, (short) j); MegaDeflectPriorityQueueMeta meta = new ConfigurableMegaDeflectPriorityQueueMeta(); meta.setQueueTableName("hydra_queue_nodes"); MagnitudeDPQueue magnitudeDPQueue = new MagnitudeDPQueue(queueDrive, 0, "segment_name", segmentName, meta ); map.put((short) j, magnitudeDPQueue ); } list.add( map ); } this.mMegaDeflectPriorityQueues = list; } @Override public GraphNode queryNodeByIndex( long index ) { long currentNum = 0; for( int i = 0; i < this.mMegaDeflectPriorityQueues.size(); i++ ) { for( DeflectPriorityQueue queue : this.mMegaDeflectPriorityQueues.get(i).values() ) { currentNum += queue.size(); if( currentNum >= index ) { QueueElement queueElement = queue.getByIndex(index); return this.mVectorDAG.get( queueElement.getObjectGuid() ); } } } return null; } @Override public GUID queryNodeGuidByIndex( long index ) { long currentNum = 0; for( int i = 0; i < this.mMegaDeflectPriorityQueues.size(); i++ ) { for( DeflectPriorityQueue queue : this.mMegaDeflectPriorityQueues.get(i).values() ) { currentNum += queue.size(); if( currentNum >= index ) { return queue.getByIndex( index ).getObjectGuid(); } } } return null; } @Override public List fetchNodes( List guids ) { ArrayList nodes = new ArrayList<>(); for( GUID guid : guids ) { nodes.add( this.mVectorDAG.get( guid ) ); } return nodes; } @Override public List fetchNodes( long offset, long limit ) { long currentNum = 0; long maxIndex = offset + limit; ArrayList graphNodes = new ArrayList<>(); for( int i = 0; i < this.mMegaDeflectPriorityQueues.size(); ++i ) { for( DeflectPriorityQueue queue : this.mMegaDeflectPriorityQueues.get(i).values() ) { currentNum += queue.size(); if( currentNum >= offset ) { List queueElements = queue.fetchElements(offset, limit); ArrayList nodes = new ArrayList<>(); for( QueueElement element : queueElements ) { nodes.add( this.mVectorDAG.get( element.getObjectGuid() ) ); } graphNodes.addAll( nodes ); } if( currentNum > maxIndex ) { return graphNodes; } } } return null; } @Override public List fetchNodes( long queuePriority, long offset, long limit ) { long currentNum = 0; long maxIndex = offset + limit; ArrayList graphNodes = new ArrayList<>(); for( int i = 0; i < this.mMegaDeflectPriorityQueues.size(); ++i ) { for( DeflectPriorityQueue queue : this.mMegaDeflectPriorityQueues.get(i).values() ) { currentNum += queue.size(); if( currentNum >= offset ) { List queueElements = queue.fetchElementByPriority( queuePriority, offset, limit ); ArrayList nodes = new ArrayList<>(); for( QueueElement element : queueElements ) { nodes.add( this.mVectorDAG.get( element.getObjectGuid() ) ); } graphNodes.addAll( nodes ); } if( currentNum > maxIndex ) { return graphNodes; } } } return null; } @Override public List fetchGuids( long offset, long limit ) { long currentNum = 0; long maxIndex = offset + limit; ArrayList graphNodes = new ArrayList<>(); for( int i = 0; i < this.mMegaDeflectPriorityQueues.size(); ++i ) { for( DeflectPriorityQueue queue : this.mMegaDeflectPriorityQueues.get(i).values() ) { currentNum += queue.size(); if( currentNum >= offset ) { List queueElements = queue.fetchElements(offset, limit); ArrayList nodes = new ArrayList<>(); for( QueueElement element : queueElements ) { nodes.add( element.getObjectGuid() ); } graphNodes.addAll( nodes ); } if( currentNum > maxIndex ) { return graphNodes; } } } return null; } @Override public List fetchGuids( long queuePriority, long offset, long limit ) { long currentNum = 0; long maxIndex = offset + limit; ArrayList graphNodes = new ArrayList<>(); for( int i = 0; i < this.mMegaDeflectPriorityQueues.size(); ++i ) { for( DeflectPriorityQueue queue : this.mMegaDeflectPriorityQueues.get(i).values() ) { currentNum += queue.size(); if( currentNum >= offset ) { List queueElements = queue.fetchElementByPriority( queuePriority, offset, limit ); ArrayList nodes = new ArrayList<>(); for( QueueElement element : queueElements ) { nodes.add( element.getObjectGuid() ); } graphNodes.addAll( nodes ); } if( currentNum > maxIndex ) { return graphNodes; } } } return null; } @Override public int countStratum() { return this.mRuntimeAtlasInstrument.countStratum( this.mVectorDAG.getAffiliateLayerGuid() ); } @Override public DeflectPriorityQueue query(int stratumId, short runtimePriority ) { Map queueMap = this.mMegaDeflectPriorityQueues.get( stratumId ); if ( queueMap != null ) { return queueMap.get( runtimePriority ); } return null; } @Override public DeflectPriorityQueue getExecutionPriorityQueue() { return this.mExecutionPriorityQueue; } } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/advance/GenericQueueEntity.java ================================================ package com.walnut.odin.atlas.advance; import com.pinecone.framework.util.id.GUID; public class GenericQueueEntity implements QueueEntity{ private GUID mGuid; private int mnStratum; public GenericQueueEntity(){} public GenericQueueEntity( GUID guid, int stratum ) { this.mGuid = guid; this.mnStratum = stratum; } @Override public void setGuid(GUID guid) { this.mGuid = guid; } @Override public GUID getGuid() { return this.mGuid; } @Override public void setStratum(int stratum) { this.mnStratum = stratum; } @Override public int getStratum() { return this.mnStratum; } } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/advance/GenericTapedBFSGraphAdvancer.java ================================================ package com.walnut.odin.atlas.advance; import com.walnut.odin.atlas.advance.strategy.PriorityProcessStrategy; import com.walnut.odin.atlas.graph.RuntimeAtlasInstrument; import com.pinecone.hydra.unit.iqueue.DeflectPriorityQueue; import com.pinecone.hydra.unit.iqueue.QueueExistManipulator; import com.pinecone.hydra.unit.iqueue.entity.QueueElement; import com.pinecone.hydra.unit.vgraph.VectorDAG; import java.util.List; public class GenericTapedBFSGraphAdvancer implements TapedBFSGraphStratumAdvancer { private RuntimeAtlasInstrument mRuntimeAtlasInstrument; private QueueExistManipulator mQueueExistManipulator; private DeflectPriorityQueue mDeflectPriorityQueue; private PriorityProcessStrategy mStrategy; public GenericTapedBFSGraphAdvancer( RuntimeAtlasInstrument runtimeAtlasInstrument, DeflectPriorityQueue deflectPriorityQueue, PriorityProcessStrategy strategy ) { this.mRuntimeAtlasInstrument = runtimeAtlasInstrument; this.mDeflectPriorityQueue = deflectPriorityQueue; this.mQueueExistManipulator = deflectPriorityQueue.getMasterManipulator().getQueueExistManipulator(); this.mStrategy = strategy; } public void traverse( VectorDAG vectorDAG ) { if( !this.mQueueExistManipulator.isExist( vectorDAG.getAffiliateLayerGuid() ) ) { this.mQueueExistManipulator.setQueueExist( vectorDAG.getAffiliateLayerGuid() ); this.mStrategy.process( vectorDAG ); } } @Override public List fetchExecuteNode( long offset, long limit ) { return this.mDeflectPriorityQueue.fetchElements( offset, limit ); } } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/advance/QueueEntity.java ================================================ package com.walnut.odin.atlas.advance; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface QueueEntity extends Pinenut { void setGuid( GUID guid ); GUID getGuid(); void setStratum( int stratum ); int getStratum(); } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/advance/TapedBFSGraphStratumAdvancer.java ================================================ package com.walnut.odin.atlas.advance; public interface TapedBFSGraphStratumAdvancer extends GraphStratumAdvancer { } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/advance/strategy/AtlasPriorityProcessStrategy.java ================================================ package com.walnut.odin.atlas.advance.strategy; import com.pinecone.hydra.unit.vgraph.VectorDAG; import java.util.ArrayList; import java.util.List; public class AtlasPriorityProcessStrategy implements PriorityProcessStrategy { protected List mStrategyPipeline; public AtlasPriorityProcessStrategy() { this.mStrategyPipeline = new ArrayList<>(); } @Override public void process( VectorDAG vectorDAG ) { for ( GraphPriorityProcessStrategy strategy : this.mStrategyPipeline ) { strategy.process( vectorDAG ); } } @Override public void addStrategy( GraphPriorityProcessStrategy strategy ) { this.mStrategyPipeline.add( strategy ); } } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/advance/strategy/GraphPriorityProcessStrategy.java ================================================ package com.walnut.odin.atlas.advance.strategy; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.unit.vgraph.VectorDAG; public interface GraphPriorityProcessStrategy extends Pinenut { void process( VectorDAG vectorDAG ); } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/advance/strategy/MegaInDegreeFirstStrategy.java ================================================ package com.walnut.odin.atlas.advance.strategy; import java.util.List; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.unit.iqueue.DeflectPriorityQueue; import com.pinecone.hydra.unit.iqueue.MegaStratumQueue; import com.pinecone.hydra.unit.iqueue.entity.GenericQueueElement; import com.pinecone.hydra.unit.iqueue.entity.GenericStratumQueueElement; import com.pinecone.hydra.unit.iqueue.entity.QueueStratumElement; import com.pinecone.hydra.unit.vgraph.VectorDAG; import com.pinecone.hydra.unit.vgraph.layer.Layer; import com.pinecone.hydra.unit.vgraph.layer.LayerInstrument; import com.walnut.odin.atlas.graph.RuntimeAtlasInstrument; public class MegaInDegreeFirstStrategy implements GraphPriorityProcessStrategy { private RuntimeAtlasInstrument mRuntimeAtlasInstrument; private DeflectPriorityQueue mDeflectPriorityQueue; private MegaStratumQueue mTempMegaStratumQueue; private LayerInstrument mLayerInstrument; private int mnPriority = 0; public MegaInDegreeFirstStrategy( RuntimeAtlasInstrument runtimeAtlasInstrument, DeflectPriorityQueue deflectPriorityQueue, MegaStratumQueue megaStratumQueue, LayerInstrument layerInstrument ) { this.mRuntimeAtlasInstrument = runtimeAtlasInstrument; this.mDeflectPriorityQueue = deflectPriorityQueue; this.mTempMegaStratumQueue = megaStratumQueue; this.mLayerInstrument = layerInstrument; } @Override public void process( VectorDAG vectorDAG ) { Layer layer = (Layer)this.mLayerInstrument.get(vectorDAG.getAffiliateLayerGuid()); long handNodeNums = this.mLayerInstrument.countSourceNode( vectorDAG.getAffiliateLayerGuid() ); long offset = 0; //todo 后面记得将这个每次遍历的节点数量改成配置 for ( long i = 0; i < handNodeNums; i += 1000 ) { List handleGuids = this.mLayerInstrument.fetchSourceGuidsByTaskPriority(vectorDAG.getAffiliateLayerGuid(),offset, 1000); for (GUID handleGuid : handleGuids) { TaskElement taskElement = this.mRuntimeAtlasInstrument.queryTaskElementByGuid(handleGuid); if (taskElement.getPriority() > this.mnPriority) { this.bfsGraph(vectorDAG, this.mnPriority, layer.getSinkGuids()); ++this.mnPriority; } GenericStratumQueueElement element = new GenericStratumQueueElement(); element.setObjectGuid(handleGuid); element.setStratum((short) 0); this.mTempMegaStratumQueue.pushBack(element); ++offset; } } // 跳出循环后要将所有节点入队,直接降低成最低优先级 this.bfsGraph( vectorDAG, 10,layer.getSinkGuids() ); } protected void bfsGraph( VectorDAG vectorDAG, int priority, List sinkNodes ) { while ( !this.mTempMegaStratumQueue.isEmpty() ) { QueueStratumElement pop = this.mTempMegaStratumQueue.popFront(); GUID currentNodeGuid = pop.getObjectGuid(); TaskElement taskElement = this.mRuntimeAtlasInstrument.queryTaskElementByGuid(currentNodeGuid); GenericQueueElement element = new GenericQueueElement(); element.setObjectGuid(currentNodeGuid); element.setPriority(taskElement.getPriority()); this.mDeflectPriorityQueue.pushBack(element); this.mRuntimeAtlasInstrument.putStratumMeta( vectorDAG.getAffiliateLayerGuid(), (short) pop.getStratum(), (short) element.getPriority(), this.mDeflectPriorityQueue.getSegmentName() ); if (sinkNodes != null && sinkNodes.contains(currentNodeGuid)) { continue; } long childNodeNum = vectorDAG.countChildNodeNum(currentNodeGuid); long childOffset = 0; for ( int i = 0; i < childNodeNum; i += 1000 ) { List guids = vectorDAG.fetchChildNodeGuids(childOffset, 1000, currentNodeGuid); for (GUID guid : guids) { TaskElement childtaskElement = this.mRuntimeAtlasInstrument.queryTaskElementByGuid(guid); if (childtaskElement.getPriority() <= priority) { GenericStratumQueueElement stratumQueueElement = new GenericStratumQueueElement(); stratumQueueElement.setObjectGuid(guid); stratumQueueElement.setStratum((short) (pop.getStratum() + 1)); this.mTempMegaStratumQueue.pushBack(stratumQueueElement); } } } } } } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/advance/strategy/PriorityProcessStrategy.java ================================================ package com.walnut.odin.atlas.advance.strategy; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.unit.vgraph.VectorDAG; public interface PriorityProcessStrategy extends Pinenut { void process( VectorDAG vectorDAG ); void addStrategy( GraphPriorityProcessStrategy strategy ); } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/flow/AnalyzeStage.java ================================================ package com.walnut.odin.atlas.flow; public interface AnalyzeStage extends ConductFlow { } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/flow/ConductFlow.java ================================================ package com.walnut.odin.atlas.flow; import com.pinecone.hydra.system.flow.Flow; public interface ConductFlow extends Flow { } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/flow/ConductStage.java ================================================ package com.walnut.odin.atlas.flow; import com.pinecone.hydra.system.flow.Stage; public interface ConductStage extends Stage { } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/flow/MarshallingStage.java ================================================ package com.walnut.odin.atlas.flow; public interface MarshallingStage extends ConductFlow { } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/flow/OptimizationStage.java ================================================ package com.walnut.odin.atlas.flow; public interface OptimizationStage extends ConductStage { } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/graph/UniformRuntimeAtlas.java ================================================ package com.walnut.odin.atlas.graph; import com.pinecone.framework.system.Unsafe; import com.pinecone.framework.util.Assert; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.entity.ElementNode; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.task.kom.entity.TaskTreeNode; import com.pinecone.hydra.unit.imperium.entity.EntityNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.hydra.unit.vgraph.ArchAtlasInstrument; import com.pinecone.hydra.unit.vgraph.MagnitudeVectorDAG; import com.pinecone.hydra.unit.vgraph.VectorDAG; import com.pinecone.hydra.unit.vgraph.VectorGraphConfig; import com.pinecone.hydra.unit.vgraph.entity.GraphNode; import com.pinecone.hydra.unit.vgraph.layer.Layer; import com.pinecone.hydra.unit.vgraph.layer.LayerInstrument; import com.pinecone.hydra.unit.vgraph.source.AtlasMappingDriver; import com.pinecone.hydra.unit.vgraph.source.VectorGraphMasterManipulator; import com.pinecone.slime.meta.TableIndexMeta; import com.walnut.odin.atlas.advance.GenericGraphStratumTape; import com.walnut.odin.atlas.advance.GraphStratumTape; import com.walnut.odin.atlas.graph.entity.TaskGraphNode; import com.walnut.odin.atlas.mapper.QueueStratumManipulator; import com.walnut.odin.atlas.mapper.RunAtlasMasterManipulator; import com.walnut.odin.atlas.mapper.TaskGraphManipulator; import java.util.List; public class UniformRuntimeAtlas extends ArchAtlasInstrument implements RuntimeAtlasInstrument { private TaskInstrument mTaskInstrument; private RunAtlasMasterManipulator mRuntimeMasterManipulator; private VectorGraphMasterManipulator mVectorGraphMasterManipulator; private TaskGraphManipulator mTaskGraphManipulator; private QueueStratumManipulator mQueueStratumManipulator; protected void init( TaskInstrument taskInstrument ) { this.mTaskInstrument = taskInstrument; this.mRuntimeMasterManipulator = (RunAtlasMasterManipulator) this.mAtlasMasterManipulator; this.mQueueStratumManipulator = this.mRuntimeMasterManipulator.getQueueStratumManipulator(); this.mVectorGraphMasterManipulator = this.mRuntimeMasterManipulator.getVectorGraphMasterManipulator(); this.mTaskGraphManipulator = (TaskGraphManipulator) this.mVectorGraphMasterManipulator.getVectorGraphManipulator(); } public UniformRuntimeAtlas( TaskInstrument taskInstrument, LayerInstrument layerInstrument, AtlasMappingDriver driver, VectorGraphConfig config ) { super( driver, config, layerInstrument ); this.init( taskInstrument ); } public UniformRuntimeAtlas( AtlasMappingDriver driver, TaskInstrument taskInstrument, LayerInstrument layerInstrument ) { super( driver, layerInstrument ); this.init( taskInstrument ); } @Override public TaskInstrument taskInstrument() { return this.mTaskInstrument; } @Override public GUID put( GraphNode graphNode ) { return super.put(graphNode); } @Override public void remove( GUID guid ) { super.remove(guid); } public TaskGraphNode query( GUID guid ) { return (TaskGraphNode) super.get(guid); } @Override public GraphNode queryGraphNodeByTaskGuid( GUID taskGuid ) { TaskGraphNode taskGraphNode = this.mTaskGraphManipulator.getNodeByTaskGuid( taskGuid ); GUID guid = taskGraphNode.getId(); return this.query(guid); } @Override public TaskElement queryTaskElementByGuid( GUID graphNodeGuid ) { GUID guid = this.mTaskGraphManipulator.queryTaskGuidByNodeId( graphNodeGuid ); TaskTreeNode taskTreeNode = (TaskTreeNode) this.mTaskInstrument.get( guid ); ElementNode elementNode = taskTreeNode.evinceElementNode(); if ( elementNode != null ) { return elementNode.evinceTaskElement(); } return null; } @Override public GraphStratumTape tapedGraphStratumAdvancer( VectorDAG vectorDAG, KOIMappingDriver driver ) { return new GenericGraphStratumTape( this, vectorDAG, driver ); } @Override public String querySegmentName( GUID vgraphGuid, short stratumId, short runtimePriority ) { return this.mQueueStratumManipulator.querySegmentName( vgraphGuid, stratumId, runtimePriority ); } @Override public int countStratum( GUID vgraphGuid ) { Integer i = this.mQueueStratumManipulator.countStratum( vgraphGuid ); Assert.notNull( i ); return i; } @Override public int countPriority( GUID vgraphGuid, short stratumId ) { Integer i = this.mQueueStratumManipulator.countPriority( vgraphGuid, stratumId ); Assert.notNull( i ); return i; } @Override public void putStratumMeta( GUID vgraphGuid, short stratumId, short runtimePriority, String segmentName ) { this.mQueueStratumManipulator.put( vgraphGuid, stratumId, runtimePriority, segmentName ); } @Override public VectorDAG toVectorDAG( Layer layer ) { return new MagnitudeVectorDAG( layer, this.mVectorGraphMasterManipulator, this.mVectorGraphConfig ); } @Override public VectorDAG getByLayerGuid( GUID layerGuid ) { TreeNode treeNode = this.mLayerInstrument.get( layerGuid ); if ( !( treeNode instanceof Layer ) ) { return null; } Layer layer = (Layer) treeNode; return this.toVectorDAG( layer ); } @Override public VectorDAG queryByPath( String path ) { EntityNode entityNode = this.mLayerInstrument.queryNode( path ); if ( !( entityNode instanceof Layer ) ) { return null; } Layer layer = (Layer) entityNode; return this.toVectorDAG( layer ); } @Override public List fetchParentIds(GUID graphNodeGuid) { return this.mTaskGraphManipulator.fetchParentIds( graphNodeGuid ); } @Override public void addChild( GUID parentGuid, GUID childGuid ) { this.mVectorGraphManipulator.addChild( parentGuid,childGuid ); } @Unsafe( "TestOnly" ) @Override public List fetchIsolatedNodesAll() { TableIndexMeta meta = this.getIsolatedNodeIndexMeta(); return this.fetchIsolatedNodesById( meta.getMinId(), meta.getMaxId() ); } @Override public List fetchIsolatedNodes( long offset, long limit ) { return this.mTaskGraphManipulator.fetchIsolatedNodes( offset, limit ); } @Override public List fetchIsolatedNodesById( long idStart, long idEnd ) { return this.mTaskGraphManipulator.fetchIsolatedNodesById( idStart, idEnd ); } @Override public TableIndexMeta getIsolatedNodeIndexMeta() { return this.mTaskGraphManipulator.selectIsolatedNodeIndexMeta(); } @Override public long queryMaxIsolatedNodePage( long limit ) { if ( limit <= 0 ) { throw new IllegalArgumentException( "Limit must be greater than zero." ); } long nTotal = this.mTaskGraphManipulator.countIsolatedNodes(); if ( nTotal == 0 ) { return 0; } long nPage = nTotal / limit; if ( nTotal % limit != 0 ) { nPage++; } return nPage; } } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/graph/entity/TaskAtlasNode.java ================================================ package com.walnut.odin.atlas.graph.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import java.util.List; public class TaskAtlasNode implements TaskGraphNode { private long enumId; private GUID guid; private GUID taskGuid; private String name; private List parentIds; private String description; private boolean isolated; public TaskAtlasNode(){ } @Override public long getEnumId() { return this.enumId; } @Override public void setEnumId(long enumId) { this.enumId = enumId; } @Override public String getName() { return this.name; } @Override public GUID getId() { return this.guid; } @Override public void setId(GUID guid) { this.guid = guid; } @Override public GUID getTaskGuid() { return this.taskGuid; } @Override public void setTaskGuid( GUID taskGuid ) { this.taskGuid = taskGuid; } @Override public List getParentIds() { return this.parentIds; } @Override public void setParentIds(List parentIds) { this.parentIds = parentIds; } @Override public void setName(String name) { this.name = name; } @Override public String getDescription() { return this.description; } @Override public void setDescription(String description) { this.description = description; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } public boolean isIsolated() { return this.isolated; } public void setIsolated(boolean isolated ) { this.isolated = isolated; } } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/graph/entity/TaskGraphNode.java ================================================ package com.walnut.odin.atlas.graph.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.vgraph.entity.GraphNode; public interface TaskGraphNode extends GraphNode { void setName( String name ); GUID getTaskGuid(); void setTaskGuid( GUID taskGuid ); } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/mapper/QueueStratumManipulator.java ================================================ package com.walnut.odin.atlas.mapper; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; public interface QueueStratumManipulator extends Pinenut { String querySegmentName( GUID vgraphGuid, short stratumId, short runtimePriority ); Integer countStratum( GUID vgraphGuid ); Integer countPriority( GUID vgraphGuid, short stratumId ); void put( GUID vgraphGuid, short stratumId, short runtimePriority, String segmentName ); } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/mapper/QueueStratumMapper.java ================================================ package com.walnut.odin.atlas.mapper; import com.pinecone.framework.util.id.GUID; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; @Mapper @IbatisDataAccessObject public interface QueueStratumMapper extends QueueStratumManipulator { @Override String querySegmentName(@Param("vgraphGuid") GUID vgraphGuid, @Param("stratumId") short stratumId, @Param("runtimePriority") short runtimePriority); @Override Integer countStratum( GUID vgraphGuid ); @Override Integer countPriority( @Param("vgraphGuid") GUID vgraphGuid, @Param("stratumId") short stratumId ); @Override void put(GUID vgraphGuid, short stratumId, short runtimePriority, String segmentName); } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/mapper/RunAtlasMasterManipulator.java ================================================ package com.walnut.odin.atlas.mapper; import com.pinecone.hydra.unit.vgraph.source.AtlasMasterManipulator; public interface RunAtlasMasterManipulator extends AtlasMasterManipulator { QueueStratumManipulator getQueueStratumManipulator(); } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/mapper/RuntimeVGraphMapper.java ================================================ package com.walnut.odin.atlas.mapper; import java.util.List; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.vgraph.entity.GraphNode; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import com.pinecone.slime.meta.TableIndex64Meta; import com.walnut.odin.atlas.graph.entity.TaskAtlasNode; import com.walnut.odin.atlas.graph.entity.TaskGraphNode; @SuppressWarnings("unchecked") @IbatisDataAccessObject public interface RuntimeVGraphMapper extends TaskGraphManipulator { @Override default void insertHandleNode( GraphNode graphNode ){ this.insertGraphNode(graphNode); } @Override void insertGraphNode( @Param("graphNode") GraphNode graphNode ); void insertNodeAdjacent( @Param("parentGuid") GUID parentGuid, @Param("childGuid") GUID childGuid ); @Override default void insertNodeByEdge(GUID parentGuid, GraphNode graphNode){ this.insertGraphNode(graphNode); this.insertNodeAdjacent(parentGuid,graphNode.getId()); } default void addChild(GUID parentGuid, GraphNode graphNode) { this.insertNodeAdjacent(parentGuid,graphNode.getId()); } @Override default void removeNode( GUID guid ) { this.removeGraphNode(guid); this.removeGraphAdjacent(guid); } void removeGraphNode( @Param("guid") GUID guid ); void removeGraphAdjacent( @Param("guid") GUID guid ); @Override TaskAtlasNode queryNode( @Param("guid") GUID guid ); @Override TaskGraphNode getNodeByTaskGuid( @Param("taskGuid") GUID taskGuid ); @Override GUID queryTaskGuidByNodeId( GUID nodeId ); @Override List fetchParentIds( @Param("guid") GUID guid ); List fetchChildNodes0( @Param("guid") GUID guid ); @Override default List fetchChildNodes( GUID guid ) { return (List) this.fetchChildNodes0( guid ); } @Override List fetchChildNodeGuids(GUID guid); @Override default List fetchRootNodes() { return (List) this.fetchRootNodes0(); } List fetchRootNodes0(); @Override List fetchChildNodeIds( @Param("guid") GUID guid ); List fetchNodesByName0( @Param("name") String name ); @Override default List fetchNodesByName( String name ) { return (List) this.fetchNodesByName0( name ); } @Override @Update("UPDATE `hydra_atlas_vgraph_nodes` " + "SET " + " `task_guid` = #{graphNode.taskGuid}, " + " `node_name` = #{graphNode.nodeName}, " + " `node_description` = #{graphNode.nodeDescription} " + "WHERE `guid` = #{graphNode.guid}") void updateNode( @Param("graphNode") GraphNode graphNode ); @Override List fetchHandleGuids( @Param("offset") long offset, @Param("limit") long limit); @Override @Select("SELECT havn.guid " + "FROM hydra_atlas_vgraph_nodes havn " + "JOIN hydra_atlas_vgraph_task_mapping vatm ON havn.guid = vatm.vgraph_node_guid " + "JOIN hydra_task_task_node httn ON vatm.task_guid = httn.guid " + "WHERE NOT EXISTS (" + "SELECT id FROM hydra_atlas_vgraph_adjacent hava WHERE hava.guid = havn.guid) " + "ORDER BY httn.priority " + "LIMIT #{limit} OFFSET #{offset}") List fetchHandleGuidsByTaskPriority(long offset, long limit); @Override @Select("SELECT COUNT(havn.guid) " + "FROM `hydra_atlas_vgraph_nodes` havn " + "WHERE NOT EXISTS (SELECT `id` FROM `hydra_atlas_vgraph_adjacent` `hava` WHERE `hava`.guid = `havn`.guid)") long countSourceNodes(); @Override List fetchDownstreamNodeGuid( @Param("nodeGuid") GUID nodeGuid, @Param("offset") long offset, @Param("limit") long limit); @Override List fetchUpstreamNodeGuid( @Param("nodeGuid") GUID nodeGuid, @Param("offset") long offset, @Param("limit") long limit); @Override long queryInDegree( @Param("nodeGuid") GUID nodeGuid); @Override long queryOutDegree( @Param("nodeGuid") GUID nodeGuid); @Override long getPriorityByInDegree(@Param("guid") GUID guid); @Override List limitFetchChildNodeGuids(@Param("offset") long offset, @Param("limit") long limit, @Param("guid") GUID guid); @Override long countChildNodeNums(GUID guid); @Override void addChild(GUID parentGuid, GUID childGuid); List fetchIsolatedNodes0( @Param("offset") long offset, @Param("limit") long limit ); @Override default List fetchIsolatedNodes( long offset, long limit ) { return ( List ) this.fetchIsolatedNodes0( offset, limit ); } List fetchIsolatedNodesById0( @Param("idStart") long idStart, @Param("idEnd") long idEnd ); @Override default List fetchIsolatedNodesById( long idStart, long idEnd ) { return ( List ) this.fetchIsolatedNodesById0( idStart, idEnd ); } @Override long countIsolatedNodes(); @Override TableIndex64Meta selectIsolatedNodeIndexMeta(); } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/mapper/RuntimeVectorGraphPathCacheMapper.java ================================================ package com.walnut.odin.atlas.mapper; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.vgraph.source.VectorGraphPathCacheManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import java.util.List; @IbatisDataAccessObject public interface RuntimeVectorGraphPathCacheMapper extends VectorGraphPathCacheManipulator { @Override @Insert("INSERT INTO hydra_atlas_vgraph_cache_path (`guid`, `path`) VALUES (#{path},#{guid})") void insert(@Param("path") String path, @Param("guid") GUID guid); @Override @Insert("INSERT INTO hydra_atlas_vgraph_cache_path (guid, path, long_path) VALUES (#{guid},#{path},#{longPath})") void insertLongPath( GUID guid, String path, String longPath ); @Override @Delete("DELETE FROM `hydra_atlas_vgraph_cache_path` WHERE `guid` = #{guid}") void remove ( GUID guid ); @Override @Select("SELECT `path` FROM `hydra_atlas_vgraph_cache_path` WHERE `guid` = #{guid}") List getPath (GUID guid ); @Override @Select("SELECT `guid` FROM `hydra_atlas_vgraph_cache_path` WHERE `path` = #{path}") GUID getNode ( String path ); @Override @Select("SELECT `guid` FROM `hydra_atlas_vgraph_cache_path` WHERE `path` = #{path}") GUID queryGUIDByPath( String path ); } ================================================ FILE: Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/mapper/TaskGraphManipulator.java ================================================ package com.walnut.odin.atlas.mapper; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.unit.vgraph.source.VectorGraphManipulator; import com.walnut.odin.atlas.graph.entity.TaskGraphNode; public interface TaskGraphManipulator extends VectorGraphManipulator { @Override TaskGraphNode queryNode( GUID guid ); TaskGraphNode getNodeByTaskGuid( GUID taskGuid ); GUID queryTaskGuidByNodeId( GUID nodeId ); } ================================================ FILE: Odin/odin-framework-atlas/src/main/resources/mapper/kernel/task/InstanceAtlasAdjacentMapper.xml ================================================ INSERT INTO odin_taks_ins_atlas_adjacent ( guid, parent_guid ) VALUES ( #{guid}, #{parentGuid} ) ================================================ FILE: Odin/odin-framework-atlas/src/main/resources/mapper/kernel/task/InstanceAtlasNodeMapper.xml ================================================ INSERT INTO odin_task_instance_atlas_nodes ( guid, instance_guid, node_name, is_isolated ) VALUES ( #{guid}, #{instanceGuid}, #{nodeName}, #{isIsolated} ) ================================================ FILE: Odin/odin-framework-atlas/src/main/resources/mapper/kernel/task/InstanceEventMapper.xml ================================================ INSERT INTO odin_task_instance_event ( guid, task_guid, instance_guid, instance_name, retry_times, current_retry_number, event_type, state, event_context, exec_time ) VALUES ( #{guid}, #{taskGuid}, #{instanceGuid}, #{instanceName}, #{retryTimes}, #{currentRetryNumber}, #{eventType}, #{state}, #{eventContext}, #{execTime} ) ================================================ FILE: Odin/odin-framework-atlas/src/main/resources/mapper/kernel/task/InstanceManipulator.xml ================================================ INSERT INTO odin_task_instance_exec ( task_guid, instance_guid, task_name, instance_name, processor_queue, cluster_name, exec_state, current_retry_number, retry_times, start_time, run_time, finish_time ) VALUES ( #{taskGuid}, #{instanceGuid}, #{taskName}, #{instanceName}, #{processorQueue}, #{clusterName}, #{execState}, #{currentRetryNumber}, #{retryTimes}, #{startTime}, #{runTime}, #{finishTime} ) UPDATE odin_task_instance_exec exec_state = #{execState}, start_time = #{startTime}, run_time = #{runTime}, finish_time = #{finishTime}, current_retry_number = #{currentRetryNumber}, WHERE instance_guid = #{instanceGuid} ================================================ FILE: Odin/odin-framework-atlas/src/main/resources/mapper/kernel/task/QueueStratumMapper.xml ================================================ INSERT INTO `hydra_atlas_queue_stratum` ( `runtime_priority`, `stratum_id`, `segment_name`, `vgraph_guid` ) VALUES ( #{runtimePriority}, #{stratumId}, #{segmentName}, #{vgraphGuid} ) ================================================ FILE: Odin/odin-framework-atlas/src/main/resources/mapper/kernel/task/RuntimeVGraphMapper.xml ================================================ INSERT INTO `hydra_atlas_vgraph_nodes` ( `guid`, `task_guid`, `node_name`, `node_description` ) VALUES ( #{graphNode.guid}, #{graphNode.taskGuid}, #{graphNode.name}, #{graphNode.description} ) INSERT INTO `hydra_atlas_vgraph_adjacent` ( `guid`, `linked_type`, `parent_guid` ) VALUES ( #{childGuid}, 'owned', #{parentGuid} ) DELETE FROM `hydra_atlas_vgraph_nodes` WHERE `guid` = #{guid} DELETE FROM `hydra_atlas_vgraph_adjacent` WHERE `guid` = #{guid} INSERT INTO `hydra_atlas_vgraph_adjacent` ( `guid`, `linked_type`, `parent_guid` ) VALUES ( #{childGuid}, 'owned', #{parentGuid} ) ================================================ FILE: Odin/odin-framework-conduct/pom.xml ================================================ odin com.walnut.odin 2.5.1 odin-framework-conduct 2.5.1 4.0.0 org.quartz-scheduler quartz 2.3.2 com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.pinecone.slime.jelly jelly 2.1.0 compile com.pinecone.hydra.kom.driver.default hydra-kom-default-driver 2.1.0 compile com.pinecone.tritium hydra-system-tritium 2.1.0 compile com.walnut.odin odin-architecture 2.5.1 compile com.walnut.odin odin-framework-runtime 2.5.1 compile com.walnut.odin odin-framework-atlas 2.5.1 compile ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/ProcessorLifecycleController.java ================================================ package com.walnut.odin.conduct; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umct.AddressMapping; import com.pinecone.hydra.umct.stereotype.Controller; import com.walnut.odin.conduct.entity.RegimentJoinRequest; import com.walnut.odin.conduct.entity.RegimentJoinResponse; @Controller @AddressMapping( "com.walnut.odin.conduct.ProcessorLifecycleIface." ) public class ProcessorLifecycleController implements Pinenut { private CollectiveTaskRegiment collectiveTaskRegiment; public ProcessorLifecycleController( CollectiveTaskRegiment collectiveTaskRegiment ) { this.collectiveTaskRegiment = collectiveTaskRegiment; } @AddressMapping( "joinRegiment" ) public RegimentJoinResponse joinRegiment( RegimentJoinRequest request ) { return this.collectiveTaskRegiment.invokeJoinRegiment( request ); } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/ProcessorLifecycleIface.java ================================================ package com.walnut.odin.conduct; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umct.stereotype.Iface; import com.walnut.odin.conduct.entity.RegimentJoinRequest; import com.walnut.odin.conduct.entity.RegimentJoinResponse; @Iface public interface ProcessorLifecycleIface extends Pinenut { RegimentJoinResponse joinRegiment( RegimentJoinRequest request ); } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/RavenCollectiveTaskLegionary.java ================================================ package com.walnut.odin.conduct; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pinecone.framework.system.construction.Postpone; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.StringUtils; import com.pinecone.hydra.proc.ProcessManager; import com.pinecone.hydra.proc.UniformProcessManager; import com.pinecone.hydra.uma.DuplexAppointClient; import com.pinecone.hydra.umc.wolf.client.UlfClient; import com.walnut.odin.conduct.entity.RegimentJoinRequest; import com.walnut.odin.conduct.entity.RegimentJoinResponse; import com.walnut.odin.proc.RemoteProcessServiceRPCException; import com.walnut.odin.proc.client.RavenRemoteProcessManagerClient; import com.walnut.odin.proc.client.RemoteProcessManagerClient; public class RavenCollectiveTaskLegionary implements CollectiveTaskLegionary { protected String mszNodeName; protected RemoteProcessManagerClient mRemoteProcessManagerClient; protected ProcessManager mLocalProcessManager; protected ProcessorLifecycleIface mProcessLifecycleIface; protected Logger mLogger; protected RavenCollectiveTaskLegionary( ProcessManager processManager, @Postpone RemoteProcessManagerClient pmClient, String szNodeName ) { this.mszNodeName = szNodeName; this.mLocalProcessManager = processManager; this.mRemoteProcessManagerClient = pmClient; this.mLogger = LoggerFactory.getLogger( this.getClass() ); } public RavenCollectiveTaskLegionary( String szNodeName, ProcessManager processManager, RemoteProcessManagerClient pmClient ) { this( processManager, pmClient, szNodeName ); } public RavenCollectiveTaskLegionary( String szNodeName, Processum superiorProcess, UlfClient rpcClient ) { this( new UniformProcessManager( superiorProcess, null, ( szNodeName + "-process-manager" ).toLowerCase(), "", null ), null, szNodeName ); this.mRemoteProcessManagerClient = new RavenRemoteProcessManagerClient( this.mLocalProcessManager, rpcClient ); } @Override public String getName() { return this.mszNodeName; } @Override public long getClientId() { return this.mRemoteProcessManagerClient.getClientId(); } @Override public ProcessManager processManager() { return this.mLocalProcessManager; } @Override public RemoteProcessManagerClient remoteProcessManagerClient() { return this.mRemoteProcessManagerClient; } @Override public void startService () throws RemoteProcessServiceRPCException { this.mRemoteProcessManagerClient.startService(); DuplexAppointClient duplexAppointClient = this.mRemoteProcessManagerClient.duplexAppointClient(); duplexAppointClient.compile( ProcessorLifecycleIface.class,false ); this.mProcessLifecycleIface = duplexAppointClient.getIface( ProcessorLifecycleIface.class ); } @Override public RegimentJoinResponse joinRegiment() throws RegimentException { RegimentJoinRequest request = new RegimentJoinRequest(); request.setClientId( this.mRemoteProcessManagerClient.getClientId() ); request.setNodeName( this.mszNodeName ); RegimentJoinResponse response = this.mProcessLifecycleIface.joinRegiment( request ); if ( response == null ) { throw new RegimentException( "response is null" ); } else if ( StringUtils.isNoneEmpty( response.getErrorMsg() ) ) { throw new RegimentException( response.getErrorMsg() ); } this.mLogger.info( "[NewProcessorRegister] " + "( name:`{}`, clientId:`{}`, clusterPath:`{}`, priority:`{}`, queueMaxCapacity:`{}`, runtimeCapacity:`{}` ) " + "", response.getName(), response.getControlClientId(), response.getClusterPath(), response.getPriority(), response.getQueueMaxCapacity(), response.getQueueRuntimeInstanceCapacity() ); return response; } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/RavenCollectiveTaskRegiment.java ================================================ package com.walnut.odin.conduct; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.Identification; import com.pinecone.framework.util.io.Tracer; import com.pinecone.hydra.proc.ProcessManager; import com.pinecone.hydra.proc.ProcessManagerSystema; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.system.component.LogStatuses; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.uma.DuplexAppointServer; import com.pinecone.hydra.umc.wolf.server.UlfServer; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.walnut.odin.conduct.entity.LaunchedContext; import com.walnut.odin.conduct.entity.RegimentJoinRequest; import com.walnut.odin.conduct.entity.RegimentJoinResponse; import com.walnut.odin.dispatch.RavenTaskDispatcher; import com.walnut.odin.dispatch.TaskDispatchException; import com.walnut.odin.dispatch.TaskDispatcher; import com.walnut.odin.dispatch.TaskQueueMeta; import com.walnut.odin.dispatch.entity.TaskProcessorEntity; import com.walnut.odin.proc.RemoteProcessServiceRPCException; import com.walnut.odin.proc.server.RavenRemoteProcessManagerServer; import com.walnut.odin.proc.server.RemoteProcessManagerServer; import com.walnut.odin.task.CentralizedTaskInstrument; import com.walnut.odin.task.RavenTaskInstance; import com.walnut.odin.task.troll.GenericRavenTask; import com.walnut.odin.task.RavenTask; import com.walnut.odin.task.troll.InstanceLaunchException; import com.walnut.odin.task.troll.LaunchFeature; import com.walnut.odin.task.troll.TaskExecutionLauncher; import com.walnut.odin.task.troll.TrollTaskExecutionLauncher; public class RavenCollectiveTaskRegiment implements CollectiveTaskRegiment { protected Hydrogen mSystem; protected Logger mLogger; protected CentralizedTaskInstrument mTaskInstrument; protected ProcessManager mProcessManager; protected RemoteProcessManagerServer mRemoteProcessManagerServer; protected TaskExecutionLauncher mTaskExecutionLauncher; protected TaskDispatcher mTaskDispatcher; protected void traceWelcomeInfo() { Tracer console = this.mSystem.console(); console.getOut().print( "---------------------------------------------------------------\n" ); console.getOut().print( "\u001B[34mRaven Odin Collective Task Regiment\u001B[0m\n" ); console.getOut().print( "\u001B[34mCentralized task lifecycle management and deployment system.\u001B[0m\n" ); console.getOut().print( "---------------------------------------------------------------\n" ); this.infoLifecycle( "Welcome to use Odin task orchestration system.", LogStatuses.StatusReady ); } protected void prepare_odin_collective_regiment_subsystem() { this.infoLifecycle( "Preparing Odin`s army, constructing task-regiment.", LogStatuses.StatusStart ); this.mTaskExecutionLauncher = new TrollTaskExecutionLauncher( this ); this.infoLifecycle( "TaskExecutionLauncher: `" + this.mTaskExecutionLauncher.getClass().getName() + "` .", LogStatuses.StatusDone ); this.mTaskDispatcher = new RavenTaskDispatcher( this ); this.infoLifecycle( "TaskDispatcher: `" + this.mTaskDispatcher.getClass().getName() + "` .", LogStatuses.StatusDone ); this.traceWelcomeInfo(); } public RavenCollectiveTaskRegiment( Hydrogen system, CentralizedTaskInstrument taskInstrument, ProcessManager processManager, RemoteProcessManagerServer remoteProcessManagerServer ) { this.mSystem = system; this.mTaskInstrument = taskInstrument; this.mProcessManager = processManager; this.mRemoteProcessManagerServer = remoteProcessManagerServer; this.mLogger = LoggerFactory.getLogger( "OdinCollectiveTaskRegiment" ); this.prepare_odin_collective_regiment_subsystem(); } public RavenCollectiveTaskRegiment( ProcessManagerSystema system, CentralizedTaskInstrument taskInstrument, RemoteProcessManagerServer remoteProcessManagerServer ) { this( system, taskInstrument, system.processManager(), remoteProcessManagerServer ); } public RavenCollectiveTaskRegiment( ProcessManagerSystema system, CentralizedTaskInstrument taskInstrument, UlfServer rpcServer ) { this( system, taskInstrument, system.processManager(), new RavenRemoteProcessManagerServer( system.processManager(), rpcServer ) ); } @Override public Logger getLogger() { return this.mLogger; } @Override public RemoteProcessManagerServer remoteProcessManagerServer() { return this.mRemoteProcessManagerServer; } @Override public void startRemoteProcessServer() throws RemoteProcessServiceRPCException { this.mRemoteProcessManagerServer.startService(); ProcessorLifecycleController controller = new ProcessorLifecycleController( this ); DuplexAppointServer duplexAppointServer = this.mRemoteProcessManagerServer.duplexAppointServer(); duplexAppointServer.registerController( controller ); duplexAppointServer.compile( ProcessorLifecycleIface.class, false ); } @Override public ProcessManager processManager() { return this.mProcessManager; } @Override public CentralizedTaskInstrument taskInstrument() { return this.mTaskInstrument; } @Override public TaskExecutionLauncher taskExecutionLauncher() { return this.mTaskExecutionLauncher; } @Override public TaskDispatcher taskDispatcher() { return this.mTaskDispatcher; } @Override public RavenTask queryTaskByPath( String path ) { GUID objGuid = this.mTaskInstrument.queryGUIDByPath( path ); if ( objGuid == null ) { return null; } return this.getTaskByGuid( objGuid ); } @Override public RavenTask getTaskByGuid( GUID taskGuid ) { TreeNode treeNode = this.mTaskInstrument.get( taskGuid ); if ( !(treeNode instanceof TaskElement) ) { throw new IllegalArgumentException( "Object node `" + taskGuid + "` is not task." ); } TaskElement taskElement = (TaskElement) treeNode; return this.mTaskInstrument.constructTask( taskElement ); } @Override public RavenTask createTask( TaskElement taskElement, Identification serviceId ) { RavenTask task = this.mTaskInstrument.createTask( taskElement, serviceId ); return task; } @Override public RavenTask affirmTask( String path, Identification serviceId, TaskElement metaInfos ) { TaskElement taskElement = this.mTaskInstrument.affirmTask( path ,metaInfos ); Debug.trace(taskElement); /* taskElement.setActuallyPriority( metaInfos.getActuallyPriority() ); taskElement.setDeploymentMethod( metaInfos.getDeploymentMethod() ); taskElement.setEnable( metaInfos.isEnable()); taskElement.setDryRun( metaInfos.isDryRun() ); taskElement.setPriority( metaInfos.getPriority() ); taskElement.setResourceType( metaInfos.getResourceType() ); taskElement.setScheduleCycle( metaInfos.getScheduleCycle() ); taskElement.setScheduleType( metaInfos.getScheduleType() ); taskElement.setScheduleTypeCode( metaInfos.getScheduleTypeCode() ); taskElement.setScheduleCycleCode( metaInfos.getScheduleCycleCode() ); taskElement.setType( metaInfos.getType() ); taskElement.setImagePath( metaInfos.getImagePath() ); taskElement.setName( metaInfos.getName() ); taskElement.setGuid( metaInfos.getGuid() );*/ /*this.mTaskInstrument.get( taskElement.getGuid());*/ /*this.mTaskInstrument.query( taskElement.getGuid() );*/ /* String newPath = this.mTaskInstrument.getPath( taskElement.getGuid()); Debug.trace(newPath);*/ this.updateTaskMeta( taskElement ); String newPath = this.mTaskInstrument.getPath( taskElement.getGuid() ); Debug.trace(newPath); return this.mTaskInstrument.constructTask( taskElement, serviceId ); } @Override public void purgeTask( GUID guid ) { GenericRavenTask task = (GenericRavenTask) this.getTaskByGuid( guid ); task.removeInstance( guid ); this.mTaskInstrument.remove( guid ); } public void updateTaskMeta( RavenTask task ) { this.updateTaskMeta( task.getTaskElement() ); } public void updateTaskMeta( TaskElement taskElement ) { this.mTaskInstrument.update( taskElement ); } protected LaunchedContext launch0( GUID taskGuid, LaunchFeature feature, boolean launch ) throws InstanceLaunchException, TaskDispatchException { RavenTask task = this.getTaskByGuid( taskGuid ); RavenTaskInstance instance = task.createInstance(); UProcess process; if ( launch ) { process = this.mTaskDispatcher.launch( instance, feature ); } else { process = this.mTaskDispatcher.create( instance, feature ); } LaunchedContext context = new LaunchedContext( process, instance ); return context; } @Override public LaunchedContext create( GUID taskGuid, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException { return this.launch0( taskGuid, feature, false ); } @Override public LaunchedContext launch( GUID taskGuid, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException { return this.launch0( taskGuid, feature, true ); } @Override public LaunchedContext create( String path, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException { GUID taskGuid = this.mTaskInstrument.queryGUIDByPath( path ); if ( taskGuid == null ) { throw new IllegalArgumentException( "Task `" + path + "` is not task." ); } return this.create( taskGuid, feature ); } @Override public LaunchedContext launch( String path, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException { GUID taskGuid = this.mTaskInstrument.queryGUIDByPath( path ); if ( taskGuid == null ) { throw new IllegalArgumentException( "Task `" + path + "` is not task." ); } return this.launch( taskGuid, feature ); } @Override public RegimentJoinResponse invokeJoinRegiment( RegimentJoinRequest request ) { RegimentJoinResponse response = new RegimentJoinResponse(); try { TaskProcessorEntity entity = this.mTaskDispatcher.registerProcessor( request.getNodeName(), request.getClientId() ); response.setGuid( entity.getGuid().toString() ); response.setName( entity.getName() ); response.setClusterPath( entity.getClusterPath() ); response.setClusterName( entity.getClusterName() ); response.setControlClientId( entity.getControlClientId() ); response.setPriority( entity.getPriority() ); TaskQueueMeta queueMeta = entity.getTaskQueueMeta(); response.setQueueName( queueMeta.getName() ); response.setQueueMaxCapacity( queueMeta.getMaxCapacity() ); response.setQueueMinCapacity( queueMeta.getMinCapacity() ); response.setQueueRuntimeInstanceCapacity( queueMeta.getRuntimeInstanceCapacity() ); this.mLogger.info( "[NewProcessorRegister] " + "( name:`{}`, clientId:`{}`, clusterPath:`{}`, priority:`{}`, queueMaxCapacity:`{}`, runtimeCapacity:`{}` ) " + "", entity.getName(), entity.getControlClientId(), entity.getClusterPath(), entity.getPriority(), queueMeta.getMaxCapacity(), queueMeta.getRuntimeInstanceCapacity() ); } catch ( IllegalArgumentException e ) { response.setErrorMsg( e.getMessage() ); } return response; } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/RavenProcessorDeployManager.java ================================================ package com.walnut.odin.conduct; import com.pinecone.hydra.uma.DuplexAppointServer; public class RavenProcessorDeployManager implements ProcessorDeployManager { protected CollectiveTaskRegiment mCollectiveTaskRegiment; protected DuplexAppointServer mDuplexAppointServer; public RavenProcessorDeployManager( CollectiveTaskRegiment regiment ) { this.mCollectiveTaskRegiment = regiment; this.mDuplexAppointServer = this.mCollectiveTaskRegiment.remoteProcessManagerServer().duplexAppointServer(); } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/dag/ConfigurableTaskGraphOrchestratorConfig.java ================================================ package com.walnut.odin.conduct.dag; public class ConfigurableTaskGraphOrchestratorConfig implements TaskGraphOrchestratorConfig { private String queueNodesTableName; private String temporaryQueueNodesTableName; public ConfigurableTaskGraphOrchestratorConfig() { this.queueNodesTableName = TaskGraphOrchestratorConstants.STANDARD_GLOBAL_QUEUE_NODES_TABLE; this.temporaryQueueNodesTableName = TaskGraphOrchestratorConstants.STANDARD_GLOBAL_TEMPORARY_QUEUE_NODES_TABLE; } @Override public String getQueueNodesTableName() { return this.queueNodesTableName; } @Override public String getTemporaryQueueNodesTableName() { return this.temporaryQueueNodesTableName; } public void setQueueNodesTableName( String queueNodesTableName ) { this.queueNodesTableName = queueNodesTableName; } public void setTemporaryQueueNodesTableName( String temporaryQueueNodesTableName ) { this.temporaryQueueNodesTableName = temporaryQueueNodesTableName; } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/dag/ExecuteCallBack.java ================================================ package com.walnut.odin.conduct.dag; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.task.kom.entity.TaskElement; import java.util.List; public interface ExecuteCallBack extends Pinenut { List introduceTask(); void nextTask(); } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/dag/RavenTaskGraphOrchestrator.java ================================================ package com.walnut.odin.conduct.dag; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Deque; import java.util.List; import com.pinecone.hydra.orchestration.SequentialAction; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.unit.iqueue.ConfigurableMegaDeflectPriorityQueueMeta; import com.pinecone.hydra.unit.iqueue.ConfigurableMegaStratumQueueMeta; import com.pinecone.hydra.unit.iqueue.MagnitudeDPQueue; import com.pinecone.hydra.unit.iqueue.MegaDPStratumQueue; import com.pinecone.hydra.unit.iqueue.MegaDeflectPriorityQueueMeta; import com.pinecone.hydra.unit.iqueue.MegaStratumQueueMeta; import com.pinecone.hydra.unit.vgraph.VectorDAG; import com.pinecone.hydra.unit.vgraph.layer.Layer; import com.pinecone.hydra.unit.vgraph.layer.LayerInstrument; import com.walnut.odin.atlas.graph.RuntimeAtlasInstrument; import com.walnut.odin.atlas.advance.GenericTapedBFSGraphAdvancer; import com.walnut.odin.atlas.advance.strategy.AtlasPriorityProcessStrategy; import com.walnut.odin.atlas.advance.strategy.MegaInDegreeFirstStrategy; public class RavenTaskGraphOrchestrator implements TaskGraphOrchestrator { protected VectorDAG mVectorDAG; protected LayerInstrument mLayerInstrument; protected RuntimeAtlasInstrument mRuntimeAtlasInstrument; protected KOIMappingDriver mQueueDriver; protected long mnCurrentPos; protected int mnTaskBatchSize; protected int mnExecuteBatchSize; protected TaskGraphOrchestratorConfig mConfig; protected volatile Deque mExecuteGraph; public RavenTaskGraphOrchestrator( VectorDAG vectorDAG, LayerInstrument layerInstrument, int taskBatchSize,int executeBatchSize, RuntimeAtlasInstrument runtimeAtlasInstrument,KOIMappingDriver queueDriver ) { this.mVectorDAG = vectorDAG; this.mLayerInstrument = layerInstrument; this.mRuntimeAtlasInstrument = runtimeAtlasInstrument; this.mnCurrentPos = 0; this.mnTaskBatchSize = taskBatchSize; this.mnExecuteBatchSize = executeBatchSize; this.mQueueDriver = queueDriver; this.mExecuteGraph = new ArrayDeque<>(); this.mConfig = new ConfigurableTaskGraphOrchestratorConfig(); } @Override public void execute() { // 将图分解为可执行的子图 this.createExecuteGraph(); // 对每个子图生成最终执行队列 for( VectorDAG vectorDAG : this.mExecuteGraph ) { this.createExecuteQueue( vectorDAG ); } // 将队列中生成的节点转换成执行任务加入执行器 this.enqueueTasksForExecution(); } private void enqueueTasksForExecution() { for(int i = 0; i < this.mnExecuteBatchSize; i++ ) { VectorDAG vectorDAG = this.mExecuteGraph.pop(); SequentialAction action = new SequentialAction(); MegaDeflectPriorityQueueMeta meta = new ConfigurableMegaDeflectPriorityQueueMeta(); meta.setQueueTableName( this.mConfig.getQueueNodesTableName() ); MagnitudeDPQueue magnitudeDPQueue = new MagnitudeDPQueue(this.mQueueDriver, 0, "segment_name", vectorDAG.getAffiliateLayerGuid().toString(), meta); TaskExecuteCallBack callBack = new TaskExecuteCallBack( magnitudeDPQueue, this.mRuntimeAtlasInstrument,this.mConfig,this.mQueueDriver, this.mExecuteGraph,this.mnTaskBatchSize); TaskExertium taskExertium = new TaskExertium( callBack ); action.add( taskExertium ); action.start(); } } private void createExecuteGraph() { List layers = this.mLayerInstrument.splitGraphLayer(this.mVectorDAG); ArrayList vectorDAGS = new ArrayList<>(); for( Layer layer : layers ) { VectorDAG vectorDAG = this.mRuntimeAtlasInstrument.toVectorDAG(layer); vectorDAGS.add( vectorDAG ); } this.mExecuteGraph.addAll( vectorDAGS ); } private void createExecuteQueue( VectorDAG vectorDAG ) { MegaDeflectPriorityQueueMeta meta1 = new ConfigurableMegaDeflectPriorityQueueMeta(); meta1.setQueueTableName( this.mConfig.getQueueNodesTableName() ); MegaStratumQueueMeta meta2 = new ConfigurableMegaStratumQueueMeta(); meta2.setQueueTableName( this.mConfig.getTemporaryQueueNodesTableName() ); MagnitudeDPQueue magnitudeDPQueue = new MagnitudeDPQueue(this.mQueueDriver, 0, "segment_name", vectorDAG.getAffiliateLayerGuid().toString(), meta1); MegaDPStratumQueue megaDPStratumQueue = new MegaDPStratumQueue(this.mQueueDriver, "segment_name", vectorDAG.getAffiliateLayerGuid().toString(), meta2); AtlasPriorityProcessStrategy strategy = new AtlasPriorityProcessStrategy(); strategy.addStrategy( new MegaInDegreeFirstStrategy( this.mRuntimeAtlasInstrument, magnitudeDPQueue, megaDPStratumQueue,this.mLayerInstrument ) ); GenericTapedBFSGraphAdvancer advancer = new GenericTapedBFSGraphAdvancer( this.mRuntimeAtlasInstrument, magnitudeDPQueue,strategy ); advancer.traverse( vectorDAG ); } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/dag/TaskExecuteCallBack.java ================================================ package com.walnut.odin.conduct.dag; import java.util.ArrayList; import java.util.Deque; import java.util.List; import com.pinecone.hydra.orchestration.SequentialAction; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.unit.iqueue.ConfigurableMegaDeflectPriorityQueueMeta; import com.pinecone.hydra.unit.iqueue.MagnitudeDPQueue; import com.pinecone.hydra.unit.iqueue.DeflectPriorityQueue; import com.pinecone.hydra.unit.iqueue.MegaDeflectPriorityQueueMeta; import com.pinecone.hydra.unit.iqueue.entity.QueueElement; import com.pinecone.hydra.unit.vgraph.VectorDAG; import com.walnut.odin.atlas.graph.RuntimeAtlasInstrument; public class TaskExecuteCallBack implements ExecuteCallBack { private RuntimeAtlasInstrument mRuntimeAtlasInstrument; protected KOIMappingDriver mQueueDriver; private TaskGraphOrchestratorConfig mConfig; private DeflectPriorityQueue mDeflectPriorityQueue; private Deque mExecuteVectorDAG; private int mnTaskBatchSize; private int mnCurrentPos; public TaskExecuteCallBack(DeflectPriorityQueue deflectPriorityQueue, RuntimeAtlasInstrument runtimeAtlasInstrument, TaskGraphOrchestratorConfig config, KOIMappingDriver driver, Deque vectorDAGDeque, int taskBatchSize ) { this.mDeflectPriorityQueue = deflectPriorityQueue; this.mRuntimeAtlasInstrument = runtimeAtlasInstrument; this.mQueueDriver = driver; this.mConfig = config; this.mExecuteVectorDAG = vectorDAGDeque; this.mnTaskBatchSize = taskBatchSize; } @Override public List introduceTask() { List queueElements = this.mDeflectPriorityQueue.fetchElements(mnCurrentPos, mnTaskBatchSize); mnCurrentPos += queueElements.size(); // todo 目前不知道那边的逻辑先写成多次io的形式 ArrayList taskElements = new ArrayList<>(); for( QueueElement queueElement : queueElements ) { TaskElement node = this.mRuntimeAtlasInstrument.queryTaskElementByGuid(queueElement.getObjectGuid()); taskElements.add( node ); } return taskElements; } @Override public synchronized void nextTask() { VectorDAG pop = this.mExecuteVectorDAG.pop(); if( pop != null ) { SequentialAction action = new SequentialAction(); MegaDeflectPriorityQueueMeta meta = new ConfigurableMegaDeflectPriorityQueueMeta(); meta.setQueueTableName( this.mConfig.getQueueNodesTableName() ); MagnitudeDPQueue magnitudeDPQueue = new MagnitudeDPQueue(this.mQueueDriver, 0, "segment_name", pop.getAffiliateLayerGuid().toString(), meta); TaskExecuteCallBack callBack = new TaskExecuteCallBack( magnitudeDPQueue, this.mRuntimeAtlasInstrument,this.mConfig,this.mQueueDriver, this.mExecuteVectorDAG,this.mnTaskBatchSize); TaskExertium taskExertium = new TaskExertium( callBack ); action.add( taskExertium ); action.start(); } } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/dag/TaskExertium.java ================================================ package com.walnut.odin.conduct.dag; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.orchestration.Exertium; import com.pinecone.hydra.task.kom.entity.TaskElement; import java.util.ArrayDeque; import java.util.Deque; import java.util.List; public class TaskExertium extends Exertium { private Deque mDeque; private ExecuteCallBack mExecuteCallBack; private int mRemainingNums; public TaskExertium( ExecuteCallBack callBack ) { this.mExecuteCallBack = callBack; this.mRemainingNums = 0; this.mDeque = new ArrayDeque<>(); } @Override protected void doStart() { boolean flag = true; while( flag ) { while( !this.mDeque.isEmpty() ) { TaskElement node = this.mDeque.pop(); Debug.trace( "执行节点" + node.getId() ); this.mRemainingNums--; } if( this.mRemainingNums == 0 ) { List taskElements = this.mExecuteCallBack.introduceTask(); this.mRemainingNums = taskElements.size(); this.mDeque.addAll( taskElements ); } if( this.mRemainingNums == 0 ) { flag = false; this.mExecuteCallBack.nextTask(); } } } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/dag/TaskGraphOrchestrator.java ================================================ package com.walnut.odin.conduct.dag; import com.pinecone.framework.system.regime.Orchestrator; public interface TaskGraphOrchestrator extends Orchestrator { void execute(); } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/dag/TaskGraphOrchestratorConfig.java ================================================ package com.walnut.odin.conduct.dag; import com.pinecone.framework.system.prototype.Pinenut; public interface TaskGraphOrchestratorConfig extends Pinenut { String getQueueNodesTableName(); String getTemporaryQueueNodesTableName(); } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/dag/TaskGraphOrchestratorConstants.java ================================================ package com.walnut.odin.conduct.dag; public final class TaskGraphOrchestratorConstants { public static final String STANDARD_GLOBAL_QUEUE_NODES_TABLE = "hydra_global_queue_nodes"; public static final String STANDARD_GLOBAL_TEMPORARY_QUEUE_NODES_TABLE = "hydra_global_temporary_queue_nodes"; } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/entity/GenericInstanceAtlasAdjacent.java ================================================ package com.walnut.odin.conduct.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import java.util.Map; public class GenericInstanceAtlasAdjacent implements InstanceAtlasAdjacent { protected GUID guid; protected GUID parentGuid; public GenericInstanceAtlasAdjacent() { } public GenericInstanceAtlasAdjacent(Map joEntity) { BeanMapDecoder.BasicDecoder.decode(this, joEntity); } @Override public GUID getGuid() { return this.guid; } @Override public void setGuid(GUID guid) { this.guid = guid; } @Override public GUID getParentGuid() { return this.parentGuid; } @Override public void setParentGuid(GUID parentGuid) { this.parentGuid = parentGuid; } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/entity/GenericInstanceAtlasNode.java ================================================ package com.walnut.odin.conduct.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import java.util.Map; public class GenericInstanceAtlasNode implements InstanceAtlasNode { protected GUID guid; protected GUID instanceGuid; protected String nodeName; protected boolean isIsolated; public GenericInstanceAtlasNode() { } public GenericInstanceAtlasNode(Map joEntity) { BeanMapDecoder.BasicDecoder.decode(this, joEntity); } @Override public GUID getGuid() { return this.guid; } @Override public void setGuid(GUID guid) { this.guid = guid; } @Override public GUID getInstanceGuid() { return this.instanceGuid; } @Override public void setInstanceGuid(GUID instanceGuid) { this.instanceGuid = instanceGuid; } @Override public String getNodeName() { return this.nodeName; } @Override public void setNodeName(String nodeName) { this.nodeName = nodeName; } @Override public boolean isIsolated() { return this.isIsolated; } @Override public void setIsIsolated(boolean isIsolated) { this.isIsolated = isIsolated; } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/entity/GenericInstanceEvent.java ================================================ package com.walnut.odin.conduct.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import java.time.LocalDateTime; import java.util.Map; public class GenericInstanceEvent implements InstanceEvent { protected GUID guid; protected GUID taskGuid; protected GUID instanceGuid; protected String instanceName; protected int retryTimes; protected int currentRetryNumber; protected String eventType; protected String state; protected String eventContext; protected LocalDateTime execTime; public GenericInstanceEvent() { } public GenericInstanceEvent(Map joEntity) { BeanMapDecoder.BasicDecoder.decode(this, joEntity); } @Override public GUID getGuid() { return this.guid; } @Override public void setGuid(GUID guid) { this.guid = guid; } @Override public GUID getTaskGuid() { return this.taskGuid; } @Override public void setTaskGuid(GUID taskGuid) { this.taskGuid = taskGuid; } @Override public GUID getInstanceGuid() { return this.instanceGuid; } @Override public void setInstanceGuid(GUID instanceGuid) { this.instanceGuid = instanceGuid; } @Override public String getInstanceName() { return this.instanceName; } @Override public void setInstanceName(String instanceName) { this.instanceName = instanceName; } @Override public int getRetryTimes() { return this.retryTimes; } @Override public void setRetryTimes(int retryTimes) { this.retryTimes = retryTimes; } @Override public int getCurrentRetryNumber() { return this.currentRetryNumber; } @Override public void setCurrentRetryNumber(int currentRetryNumber) { this.currentRetryNumber = currentRetryNumber; } @Override public String getEventType() { return this.eventType; } @Override public void setEventType(String eventType) { this.eventType = eventType; } @Override public String getState() { return this.state; } @Override public void setState(String state) { this.state = state; } @Override public String getEventContext() { return this.eventContext; } @Override public void setEventContext(String eventContext) { this.eventContext = eventContext; } @Override public LocalDateTime getExecTime() { return this.execTime; } @Override public void setExecTime(LocalDateTime execTime) { this.execTime = execTime; } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/entity/GenericInstanceExec.java ================================================ package com.walnut.odin.conduct.entity; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanMapDecoder; import java.time.LocalDateTime; import java.util.Map; public class GenericInstanceExec implements InstanceExec { protected long id; protected GUID taskGuid; protected GUID instanceGuid; protected String taskName; protected String instanceName; protected String processorQueue; protected String clusterName; protected String execState; protected int currentRetryNumber; protected int retryTimes; protected LocalDateTime startTime; protected LocalDateTime runTime; protected LocalDateTime finishTime; public GenericInstanceExec() { } public GenericInstanceExec(Map joEntity) { BeanMapDecoder.BasicDecoder.decode(this, joEntity); } @Override public long getId() { return this.id; } @Override public void setId(long id) { this.id = id; } @Override public GUID getTaskGuid() { return this.taskGuid; } @Override public void setTaskGuid(GUID taskGuid) { this.taskGuid = taskGuid; } @Override public GUID getInstanceGuid() { return this.instanceGuid; } @Override public void setInstanceGuid(GUID instanceGuid) { this.instanceGuid = instanceGuid; } @Override public String getTaskName() { return this.taskName; } @Override public void setTaskName(String taskName) { this.taskName = taskName; } @Override public String getInstanceName() { return this.instanceName; } @Override public void setInstanceName(String instanceName) { this.instanceName = instanceName; } @Override public String getProcessorQueue() { return this.processorQueue; } @Override public void setProcessorQueue(String processorQueue) { this.processorQueue = processorQueue; } @Override public String getClusterName() { return this.clusterName; } @Override public void setClusterName(String clusterName) { this.clusterName = clusterName; } @Override public String getExecState() { return this.execState; } @Override public void setExecState(String execState) { this.execState = execState; } @Override public int getCurrentRetryNumber() { return this.currentRetryNumber; } @Override public void setCurrentRetryNumber(int currentRetryNumber) { this.currentRetryNumber = currentRetryNumber; } @Override public int getRetryTimes() { return this.retryTimes; } @Override public void setRetryTimes(int retryTimes) { this.retryTimes = retryTimes; } @Override public LocalDateTime getStartTime() { return this.startTime; } @Override public void setStartTime(LocalDateTime startTime) { this.startTime = startTime; } @Override public LocalDateTime getRunTime() { return this.runTime; } @Override public void setRunTime(LocalDateTime runTime) { this.runTime = runTime; } @Override public LocalDateTime getFinishTime() { return this.finishTime; } @Override public void setFinishTime(LocalDateTime finishTime) { this.finishTime = finishTime; } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/schedule/InstanceAtlasNodeManipular.java ================================================ package com.walnut.odin.conduct.schedule; import com.pinecone.hydra.system.ko.dao.GUIDNameManipulator; import com.walnut.odin.conduct.entity.InstanceAtlasNode; public interface InstanceAtlasNodeManipular extends GUIDNameManipulator { void insert( InstanceAtlasNode instanceAtlasNode); } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/schedule/RavenInstanceScheduleImpetus.java ================================================ package com.walnut.odin.conduct.schedule; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pinecone.framework.util.StringUtils; import com.pinecone.hydra.system.ko.MetaPersistenceException; import com.pinecone.hydra.task.InstanceEventType; import com.pinecone.hydra.task.TaskInstanceExecState; import com.pinecone.hydra.task.TaskInstanceStatus; import com.pinecone.hydra.task.kom.UniformTaskInstrument; import com.pinecone.hydra.task.kom.instance.InstanceEntry; import com.pinecone.hydra.task.kom.instance.InstanceInstrument; import com.pinecone.hydra.task.kom.source.TaskNodeManipulator; import com.pinecone.slime.meta.TableIndexMeta; import com.walnut.odin.atlas.graph.RuntimeAtlasInstrument; import com.walnut.odin.conduct.entity.GenericInstanceEvent; import com.walnut.odin.conduct.entity.GenericInstanceExec; import com.walnut.odin.conduct.entity.InstanceEvent; import com.walnut.odin.conduct.entity.InstanceExec; import com.walnut.odin.conduct.schedule.entity.DepartureChecklist; import com.walnut.odin.conduct.schedule.entity.ScheduleFittingContext; import com.walnut.odin.dispatch.PipelineLaunchReport; import com.walnut.odin.dispatch.TaskLaunchContext; import com.walnut.odin.task.CentralizedTaskInstrument; import com.walnut.odin.task.RavenTaskConfig; import com.walnut.odin.task.RavenTaskInstance; import com.walnut.odin.task.mapper.InstanceAtlasNodeMapper; import com.walnut.odin.task.source.RavenTaskMasterManipulator; import com.walnut.odin.task.source.ScheduleManipulator; import com.walnut.odin.task.troll.GenericRavenTaskInstance; import com.walnut.odin.task.troll.LaunchFeature; import com.walnut.odin.task.troll.TaskExecutionLauncher; public class RavenInstanceScheduleImpetus implements InstanceScheduleImpetus { private Logger log = LoggerFactory.getLogger( this.getClass() ); private RavenTaskConfig mRavenTaskConfig; private int mnScanThreadCount; private long mnScanIdWindow; private UniformTaskScheduler mTaskScheduler; private TaskExecutionLauncher mTaskExecutionLauncher; private UniformTaskInstrument mUniformTaskInstrument; private RuntimeAtlasInstrument mRuntimeAtlasInstrument; private InstanceInstrument mInstanceInstrument; private CentralizedTaskInstrument mCentralizedTaskInstrument; private RavenTaskMasterManipulator mRavenTaskMasterManipulator; private TaskNodeManipulator mTaskNodeManipulator; private ScheduleManipulator mScheduleManipulator; private InstanceAtlasNodeMapper mInstanceAtlasNodeMapper; private InstanceScheduleAllocator mInstanceScheduleAllocator; private ExecutorService mExecutorService; public RavenInstanceScheduleImpetus( UniformTaskScheduler taskScheduler ) { this.mTaskScheduler = taskScheduler; this.mRavenTaskConfig = taskScheduler.ravenTaskConfig(); this.mnScanThreadCount = this.mRavenTaskConfig.getScheduleScanThreadCount(); this.mnScanIdWindow = this.mRavenTaskConfig.getScheduleScanIdWindow(); this.mRuntimeAtlasInstrument = taskScheduler.atlasInstrument(); this.mTaskExecutionLauncher = taskScheduler.taskExecutionLauncher(); this.mCentralizedTaskInstrument = taskScheduler.taskInstrument(); this.mUniformTaskInstrument = this.mCentralizedTaskInstrument.getUniformTaskInstrument(); this.mInstanceInstrument = taskScheduler.instanceInstrument(); this.mRavenTaskMasterManipulator = this.mCentralizedTaskInstrument.getRavenTaskMasterManipulator(); this.mTaskNodeManipulator = this.mRavenTaskMasterManipulator.getTaskMasterManipulator().getTaskNodeManipulator(); this.mScheduleManipulator = this.mRavenTaskMasterManipulator.getScheduleManipulator(); this.mInstanceAtlasNodeMapper = this.mScheduleManipulator.getInstanceAtlasNodeMapper(); this.mInstanceScheduleAllocator = taskScheduler.instanceScheduleAllocator(); this.mExecutorService = Executors.newFixedThreadPool( this.mnScanThreadCount * 2 ); log.info( "[Odin] [CrucialSchedulerComponentLifecycle] (RavenInstanceScheduleImpetus Construction) " ); } // 依赖mapper记得看看 // protected DepartureChecklist prelaunch_check_instance( InstanceEntry that ) { // // // } // [Prelaunch-Stage2] 已完成并行调度配额分配,启动准备程序 protected Collection initializePrelaunchSequence( Collection fittedInstances ) { Collection li = new ArrayList<>(); for ( InstanceEntry fittedInstance : fittedInstances ) { RavenTaskInstance instance = new GenericRavenTaskInstance( fittedInstance, this.mCentralizedTaskInstrument ); LaunchFeature launchFeature = new LaunchFeature(); String szProcessor = fittedInstance.getProcessorName(); if ( StringUtils.isNoneEmpty(szProcessor) ) { // Not affinity(best-effort), but designated(compulsory). // 这里不是建议分配,而是绑核 launchFeature.withProcessorDesignated( szProcessor ); } TaskLaunchContext launchContext = TaskLaunchContext.of( instance, launchFeature ); li.add( launchContext ); } return li; } protected Collection prepareLaunchContexts( ScheduleFittingContext context ) { Collection fittedInstances = context.getFittedInstances(); Collection discardedInstances = context.getDiscardedInstances(); Collection li = this.initializePrelaunchSequence( fittedInstances ); for ( InstanceEntry discardedInstance : discardedInstances ) { log.info( "[DiscardInstance] ( Task `{}`, Instance `{}` ) has been discarded.", discardedInstance.getTaskName(), discardedInstance.getInstanceName() ); // TODO, Sophisticate upgradation. } return li; } @Override public void impelSchedulableInstances( Collection statuses, LocalDateTime targetTime ) { if ( targetTime == null ) { targetTime = LocalDateTime.now(); } TableIndexMeta range = this.mInstanceInstrument.querySchedulableIdRange( statuses, targetTime ); if ( range == null ) { return; } long idMin = range.getMinId(); long idMax = range.getMaxId(); if ( idMin <= 0 || idMax <= 0 || idMax < idMin ) { return; } long cursor = idMin; while ( cursor <= idMax ) { long windowStart = cursor; long windowEnd = cursor + this.mnScanIdWindow - 1; if ( windowEnd > idMax ) { windowEnd = idMax; } final long finalStart = windowStart; final long finalEnd = windowEnd; LocalDateTime finalTargetTime = targetTime; this.mExecutorService.submit( () -> { try { log.info( "[TaskSchedulerLifecycle] Impelling schedulable instances (Start: {}, End: {}) ", finalStart, finalEnd ); Collection entries = this.mInstanceInstrument.fetchSchedulableInstances( finalStart, finalEnd, statuses, finalTargetTime ); ScheduleFittingContext context = this.mInstanceScheduleAllocator.pipeFitting( entries ); Collection launchContexts = this.prepareLaunchContexts( context ); PipelineLaunchReport report = this.mTaskScheduler.taskDispatcher().pipeLaunch( launchContexts ); //elements = this.prepareScheduleTasks( elements, finalTargetTime ); log.info( "[TaskSchedulerLifecycle] Impelling schedulable instances (Start: {}, End: {}, Size: {}) ", finalStart, finalEnd, entries.size() ); } catch ( Exception e ) { log.error( "[TaskSchedulerLifecycle] Impelling schedulable instances (Start: {}, End: {}) ", finalStart, finalEnd, e ); } } ); cursor = windowEnd + 1; } } @Override public void impelPrelaunchInstances( LocalDateTime targetTime ) { this.impelSchedulableInstances( List.of( TaskInstanceStatus.New, TaskInstanceStatus.DependencyWait, TaskInstanceStatus.ResourceWait, TaskInstanceStatus.DepartureStandby ), targetTime ); } /*protected void processAndFireInstances( List instances ) throws MetaPersistenceException { for ( InstanceEntry instance : instances ) { try { log.info( "GUID: {}, Name: {}", instance.getGuid(), instance.getInstanceName() ); instance.setInstanceStatus( TaskInstanceStatus.ResourceWait ); instance.setRunStatus(TaskInstanceStatus.ResourceWait.getName()); instance.setStartTime( LocalDateTime.now() ); this.mInstanceInstrument.updateInstance( instance ); //log.info(this.mInstanceInstrument.getInstanceEntry(instance.getGuid()).getRunStatus()); InstanceExec execUpdate = new GenericInstanceExec(); execUpdate.setInstanceGuid( instance.getGuid() ); execUpdate.setExecState( TaskInstanceExecState.Submitted.getName() ); this.mScheduleManipulator.getInstanceExecMapper().updateStateByInstanceGuid( execUpdate ); InstanceEvent event = new GenericInstanceEvent(); event.setGuid( this.mCentralizedTaskInstrument.getGuidAllocator().nextGUID() ); event.setTaskGuid( instance.getTaskGuid() ); event.setInstanceGuid( instance.getGuid() ); event.setInstanceName( instance.getInstanceName() ); event.setEventType( instance.getTaskType() ); event.setState( InstanceEventType.CheckDependencyReady.getName() ); event.setExecTime( LocalDateTime.now() ); event.setEventContext( "{}" ); // this.mScheduleManipulator.getInstanceEventMapper().insert( event ); //LaunchFeature feature = new LaunchFeature(); // this.mTaskExecutionLauncher.launchLocally( instance, feature ); } catch ( MetaPersistenceException e ) { instance.setInstanceStatus( TaskInstanceStatus.Error ); this.mInstanceInstrument.updateInstance( instance ); } } }*/ @Override public UniformTaskScheduler taskScheduler() { return this.mTaskScheduler; } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/schedule/RavenScheduleAllocator.java ================================================ package com.walnut.odin.conduct.schedule; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pinecone.framework.util.CollectionUtils; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.task.kom.instance.InstanceEntry; import com.pinecone.hydra.task.marshal.TaskPriority; import com.walnut.odin.conduct.schedule.entity.ConcurrentQuota; import com.walnut.odin.conduct.schedule.entity.ScheduleFittingContext; public class RavenScheduleAllocator implements InstanceScheduleAllocator { private Logger log = LoggerFactory.getLogger( this.getClass() ); private String mszPartitionName; private Map mQuotaConfig; private ConcurrentMap mPriorityQuota; private AtomicLong mGlobalConcurrentInstance; private ConcurrentMap mPrioritySegLocks; private ConcurrentMap> mPriorityInstances; private Lock mGlobalInstanceLock; protected void from_config( JSONObject config ) { this.mszPartitionName = config.optString( "name" ); JSONObject joQuotaConfig = config.getJSONObject( "quota" ); this.mQuotaConfig = ConcurrentQuota.fromThose( joQuotaConfig ); this.mGlobalConcurrentInstance = new AtomicLong( config.optLong( "globalConcurrentInstance" ) ); for ( Map.Entry entry : this.mQuotaConfig.entrySet() ) { if ( entry == null ) { continue; } String szKey = entry.getKey(); ConcurrentQuota value = entry.getValue(); if ( szKey == null || value == null ) { continue; } if ( "default".equalsIgnoreCase( szKey ) ) { continue; } this.refreshQuotaCount( value, this.mGlobalConcurrentInstance.get() ); this.mPriorityQuota.put( (int) value.getPriority(), value ); } } protected void trace_dispatcher_config() { JSONObject jo = new JSONMaptron(); jo.put( "PartitionName", this.mszPartitionName ); jo.put( "ConcurrentInstance", this.mGlobalConcurrentInstance.get() ); jo.put( "QuotaConfig", new JSONMaptron( CollectionUtils.genericConvert( this.mQuotaConfig ), true ) ); log.info( "[ScheduleAllocator] Allocator configured with following configs: {}", jo.toJSONStringI( 2 ) ); } public RavenScheduleAllocator( JSONObject config ) { this.mPriorityQuota = new ConcurrentHashMap<>(); this.mPrioritySegLocks = new ConcurrentHashMap<>(); this.mPriorityInstances = new ConcurrentHashMap<>(); this.mGlobalInstanceLock = new ReentrantLock(); this.from_config( config ); this.trace_dispatcher_config(); } public RavenScheduleAllocator( UniformTaskScheduler taskScheduler ) { this( taskScheduler.ravenTaskConfig().getScheduleGlobalAllocatorConfig().optJSONObject( taskScheduler.ravenTaskConfig().getSchedulePartitionName() ) ); } protected ConcurrentQuota resolveQuotaTemplate( short nPriority ) { if ( this.mQuotaConfig == null || this.mQuotaConfig.isEmpty() ) { return null; } if ( isQuotaBypassedPriority( nPriority ) ) { ConcurrentQuota unlimitedQuota = this.mQuotaConfig.get( "unlimited" ); if ( unlimitedQuota != null ) { return unlimitedQuota.reproduce( nPriority ); } } ConcurrentQuota directQuota = this.mPriorityQuota.get( (int) nPriority ); if ( directQuota != null ) { return directQuota.reproduce( nPriority ); } ConcurrentQuota defaultQuota = this.mQuotaConfig.get( "default" ); if ( defaultQuota != null ) { return defaultQuota.reproduce( nPriority ); } return null; } protected static Map> groupInstancesByPriority( Collection instances ) { Map> grouped = new HashMap<>(); for ( InstanceEntry instance : instances ) { if ( instance == null ) { continue; } int nPriority = instance.getActuallyPriority(); List list = grouped.computeIfAbsent( nPriority, k -> new ArrayList<>() ); list.add( instance ); } return grouped; } protected Lock affirmPrioritySegLock( Integer nPriority ) { return this.mPrioritySegLocks.computeIfAbsent( nPriority, k -> new ReentrantLock() ); } protected ConcurrentQuota affirmQuota( short nPriority ) { ConcurrentQuota quota = this.mPriorityQuota.computeIfAbsent( (int) nPriority, k -> { ConcurrentQuota template = this.resolveQuotaTemplate( nPriority ); if ( template != null ) { return template; } return new ConcurrentQuota( nPriority ); } ); this.refreshQuotaCount( quota, this.mGlobalConcurrentInstance.get() ); return quota; } protected void refreshQuotaCount( ConcurrentQuota quota, long nGlobalConcurrentInstance ) { if ( quota == null ) { return; } if ( quota.isMaximumRatioMode() ) { long nMaximumCnt = (long) Math.floor( nGlobalConcurrentInstance * quota.getMaximumRatio() ); if ( nMaximumCnt < 0 ) { nMaximumCnt = 0; } quota.setMaximumCnt( nMaximumCnt ); } else { Long nMaximumCnt = quota.getMaximumCnt(); if ( nMaximumCnt == null ) { quota.setMaximumCnt( 0L ); } else if ( nMaximumCnt < 0 ) { quota.setMaximumCnt( Long.MAX_VALUE ); } } if ( quota.isMinimumRatioMode() ) { long nMinimumCnt = (long) Math.floor( nGlobalConcurrentInstance * quota.getMinimumRatio() ); if ( nMinimumCnt < 0 ) { nMinimumCnt = 0; } quota.setMinimumCnt( nMinimumCnt ); } else { Long nMinimumCnt = quota.getMinimumCnt(); if ( nMinimumCnt == null ) { quota.setMinimumCnt( 0L ); } else if ( nMinimumCnt < 0 ) { quota.setMinimumCnt( Long.MAX_VALUE ); } } } protected Map affirmPriorityInstances( int nPriority ) { return this.mPriorityInstances.computeIfAbsent( nPriority, k -> new HashMap<>() ); } public static boolean isQuotaBypassedPriority( int nPriority ) { return nPriority > TaskPriority.UNLIMITED.getValue(); } public long getGlobalConcurrentInstance() { return this.mGlobalConcurrentInstance.get(); } public void setGlobalConcurrentInstance( long nGlobalConcurrentInstance ) { this.mGlobalInstanceLock.lock(); try { this.mGlobalConcurrentInstance.set( nGlobalConcurrentInstance ); for ( ConcurrentQuota quota : this.mPriorityQuota.values() ) { if ( quota == null ) { continue; } this.refreshQuotaCount( quota, nGlobalConcurrentInstance ); } } finally { this.mGlobalInstanceLock.unlock(); } } public Collection queryFulledPriority() { Collection fulledPriorities = new ArrayList<>(); for ( Map.Entry kv : this.mPriorityQuota.entrySet() ) { Integer nPriority = kv.getKey(); ConcurrentQuota quota = kv.getValue(); if ( nPriority == null || quota == null ) { continue; } if ( isQuotaBypassedPriority( nPriority ) ) { continue; } Lock segLock = this.affirmPrioritySegLock( nPriority ); segLock.lock(); try { long nMaximumCnt = quota.getMaximumCnt(); if ( nMaximumCnt == Long.MAX_VALUE ) { continue; } Map instanceMap = this.mPriorityInstances.get( nPriority ); long nCurrentCnt = 0; if ( instanceMap != null ) { nCurrentCnt = instanceMap.size(); } if ( nCurrentCnt >= nMaximumCnt ) { fulledPriorities.add( nPriority ); } } finally { segLock.unlock(); } } return fulledPriorities; } public Collection queryPriorityInstances( int nPriority ) { Lock segLock = this.affirmPrioritySegLock( nPriority ); segLock.lock(); try { Map instanceMap = this.mPriorityInstances.get( nPriority ); if ( instanceMap == null || instanceMap.isEmpty() ) { return new ArrayList<>(); } return new ArrayList<>( instanceMap.values() ); } finally { segLock.unlock(); } } @Override public String getPartitionName() { return this.mszPartitionName; } protected void pipeFittingByPriority( int nPriority, Collection instances, ScheduleFittingContext context ) { Lock segLock = this.affirmPrioritySegLock( nPriority ); segLock.lock(); try { if ( instances == null || instances.isEmpty() ) { return; } Map instanceMap = this.affirmPriorityInstances( nPriority ); Collection launchedInstances = context.getFittedInstances(); Collection discardedInstances = context.getDiscardedInstances(); if ( isQuotaBypassedPriority( nPriority ) ) { for ( InstanceEntry instance : instances ) { if ( instance == null || instance.getGuid() == null ) { continue; } instanceMap.put( instance.getGuid(), instance ); launchedInstances.add( instance ); } return; } ConcurrentQuota quota = this.affirmQuota( (short) nPriority ); long nMaximumCnt = quota.getMaximumCnt(); long nRemaining = nMaximumCnt - instanceMap.size(); if ( nRemaining <= 0 ) { discardedInstances.addAll( instances ); return; } for ( InstanceEntry instance : instances ) { if ( instance == null ) { continue; } GUID instanceGuid = instance.getGuid(); if ( instanceGuid == null ) { discardedInstances.add( instance ); continue; } if ( instanceMap.containsKey( instanceGuid ) ) { continue; } if ( nRemaining <= 0 ) { discardedInstances.add( instance ); continue; } instanceMap.put( instanceGuid, instance ); launchedInstances.add( instance ); --nRemaining; } } finally { segLock.unlock(); } } @Override public ScheduleFittingContext pipeFitting( Collection instances ) { ScheduleFittingContext context = new ScheduleFittingContext(); if ( instances == null || instances.isEmpty() ) { return context; } Map> groupedInstances = groupInstancesByPriority( instances ); for ( Map.Entry> kv : groupedInstances.entrySet() ) { Integer priority = kv.getKey(); List instanceList = kv.getValue(); if ( priority == null || instanceList == null || instanceList.isEmpty() ) { continue; } this.pipeFittingByPriority( priority, instanceList, context ); } return context; } public InstanceEntry reclaimInstance( int nPriority, GUID instanceGuid ) { if ( instanceGuid == null ) { return null; } Lock segLock = this.affirmPrioritySegLock( nPriority ); segLock.lock(); try { Map instanceMap = this.mPriorityInstances.get( nPriority ); if ( instanceMap == null ) { return null; } return instanceMap.remove( instanceGuid ); } finally { segLock.unlock(); } } public InstanceEntry reclaimInstance( GUID instanceGuid ) { if ( instanceGuid == null ) { return null; } for ( Integer nPriority : this.mPriorityInstances.keySet() ) { if ( nPriority == null ) { continue; } Lock segLock = this.affirmPrioritySegLock( nPriority ); segLock.lock(); try { Map instanceMap = this.mPriorityInstances.get( nPriority ); if ( instanceMap == null ) { continue; } InstanceEntry removed = instanceMap.remove( instanceGuid ); if ( removed != null ) { return removed; } } finally { segLock.unlock(); } } return null; } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/schedule/RavenTaskSchedulePreparator.java ================================================ package com.walnut.odin.conduct.schedule; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.task.InstanceEventType; import com.pinecone.hydra.task.TaskInstanceExecState; import com.pinecone.hydra.task.kom.UniformTaskInstrument; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.task.kom.instance.InstanceEntry; import com.pinecone.hydra.task.kom.source.TaskNodeManipulator; import com.pinecone.hydra.task.marshal.TaskScheduleCycle; import com.pinecone.hydra.unit.vgraph.entity.GraphNode; import com.pinecone.slime.meta.TableIndex64Meta; import com.walnut.odin.atlas.graph.RuntimeAtlasInstrument; import com.walnut.odin.conduct.entity.GenericInstanceAtlasAdjacent; import com.walnut.odin.conduct.entity.GenericInstanceAtlasNode; import com.walnut.odin.conduct.entity.GenericInstanceEvent; import com.walnut.odin.conduct.entity.GenericInstanceExec; import com.walnut.odin.conduct.entity.InstanceAtlasAdjacent; import com.walnut.odin.conduct.entity.InstanceAtlasNode; import com.walnut.odin.conduct.entity.InstanceEvent; import com.walnut.odin.conduct.entity.InstanceExec; import com.walnut.odin.task.CentralizedTaskInstrument; import com.walnut.odin.task.RavenTask; import com.walnut.odin.task.RavenTaskConfig; import com.walnut.odin.task.RavenTaskInstance; import com.walnut.odin.task.mapper.InstanceAtlasAdjacentMapper; import com.walnut.odin.task.mapper.InstanceAtlasNodeMapper; import com.walnut.odin.task.mapper.InstanceEventMapper; import com.walnut.odin.task.mapper.InstanceExecMapper; import com.walnut.odin.task.source.RavenTaskMasterManipulator; import com.walnut.odin.task.source.ScheduleManipulator; import com.walnut.odin.task.troll.LaunchFeature; import com.walnut.odin.task.troll.TaskExecutionLauncher; public class RavenTaskSchedulePreparator implements TaskSchedulePreparator { // Generate daily batches in advance (24h/Cycle) // 每日提前生成当日批次(24h/Cycle) public static final Collection DailyTaskScheduleCycles = List.of( TaskScheduleCycle.Month, TaskScheduleCycle.Week, TaskScheduleCycle.Day, TaskScheduleCycle.Hour ); private Logger log = LoggerFactory.getLogger( this.getClass() ); private GuidAllocator mGuidAllocator; private RavenTaskConfig mRavenTaskConfig; private int mnScanThreadCount; private long mnScanIdWindow; private UniformTaskScheduler mTaskScheduler; private TaskExecutionLauncher mTaskExecutionLauncher; private UniformTaskInstrument mUniformTaskInstrument; private RuntimeAtlasInstrument mRuntimeAtlasInstrument; private CentralizedTaskInstrument mCentralizedTaskInstrument; private RavenTaskMasterManipulator mRavenTaskMasterManipulator; private TaskNodeManipulator mTaskNodeManipulator; private ScheduleManipulator mScheduleManipulator; private InstanceAtlasNodeMapper mInstanceAtlasNodeMapper; private InstanceAtlasAdjacentMapper mInstanceAtlasAdjacentMapper; private InstanceExecMapper mInstanceExecMapper; private InstanceEventMapper mInstanceEventMapper; private ExecutorService mExecutorService; public RavenTaskSchedulePreparator( UniformTaskScheduler taskScheduler ) { this.mTaskScheduler = taskScheduler; this.mRavenTaskConfig = taskScheduler.ravenTaskConfig(); this.mnScanThreadCount = this.mRavenTaskConfig.getScheduleScanThreadCount(); this.mnScanIdWindow = this.mRavenTaskConfig.getScheduleScanIdWindow(); this.mRuntimeAtlasInstrument = taskScheduler.atlasInstrument(); this.mTaskExecutionLauncher = taskScheduler.taskExecutionLauncher(); this.mCentralizedTaskInstrument = taskScheduler.taskInstrument(); this.mUniformTaskInstrument = this.mCentralizedTaskInstrument.getUniformTaskInstrument(); this.mGuidAllocator = this.mCentralizedTaskInstrument.getGuidAllocator(); this.mRavenTaskMasterManipulator = this.mCentralizedTaskInstrument.getRavenTaskMasterManipulator(); this.mTaskNodeManipulator = this.mRavenTaskMasterManipulator.getTaskMasterManipulator().getTaskNodeManipulator(); this.mScheduleManipulator = this.mRavenTaskMasterManipulator.getScheduleManipulator(); this.mInstanceAtlasNodeMapper = this.mScheduleManipulator.getInstanceAtlasNodeMapper(); this.mInstanceAtlasAdjacentMapper = this.mScheduleManipulator.getInstanceAtlasAdjacentMapper(); this.mInstanceExecMapper = this.mScheduleManipulator.getInstanceExecMapper(); this.mInstanceEventMapper = this.mScheduleManipulator.getInstanceEventMapper(); this.mExecutorService = Executors.newFixedThreadPool( this.mnScanThreadCount * 2 ); log.info( "[Odin] [CrucialSchedulerComponentLifecycle] (RavenTaskSchedulePreparator Construction) " ); } protected TaskScheduleContext prepareTaskScheduleTimeOffset( TaskElement element, LocalDateTime targetTime ) { TaskScheduleContext context = new TaskScheduleContext( element, targetTime ); TaskScheduleCycle cycle = element.getScheduleCycle(); String cron = element.getScheduleCron(); if ( cycle == null ) { return context; } if ( cron == null || cron.isBlank() ) { String defaultCron = ScheduleCronHelper.generateDefaultCron( cycle ); element.setScheduleCron( defaultCron ); cron = defaultCron; } LocalDateTime next = element.getNextScheduleTime(); context.setThisScheduleTime( next ); if ( next == null ) { LocalDateTime firstFireTime = ScheduleCronHelper.computeNextByCron( cron, targetTime.minusSeconds( 1 ) ); if ( firstFireTime == null ) { return context; } context.setThisScheduleTime( LocalDateTime.now() ); // 初始化用当前时间 context.setNextScheduleTime( firstFireTime ); // 已经向前推进了 element.setNextScheduleTime( firstFireTime ); this.mTaskNodeManipulator.update( element ); return context; // next 已经是下一次了 } LocalDateTime advanced = ScheduleCronHelper.computeNextByCron( cron, next ); if ( advanced == null ) { return context; } if ( !advanced.equals( next ) ) { element.setNextScheduleTime( advanced ); context.setNextScheduleTime( advanced ); //this.mTaskNodeManipulator.update( element ); } return context; } protected void prepareInstance( TaskScheduleContext context, RavenTaskInstance that ) { LaunchFeature feature = new LaunchFeature(); InstanceEntry it = that.getInstanceEntry(); it.setExpectTime( context.getThisScheduleTime() ); // 先更新,后面会插入,妈的 this.mTaskExecutionLauncher.initializeInstance( that, feature ); // 这里会完成实例插入 } protected void prepareInstanceLineage( TaskScheduleContext context, RavenTaskInstance instance ) { TaskElement element = context.getElement(); GUID instanceGuid = instance.getInstanceEntry().getGuid(); GraphNode graphNode = this.mRuntimeAtlasInstrument.queryGraphNodeByTaskGuid( element.getGuid() ); List parentIds = new ArrayList<>(); InstanceAtlasNode instanceNode = new GenericInstanceAtlasNode(); instanceNode.setGuid( this.mGuidAllocator.nextGUID() ); instanceNode.setInstanceGuid( instanceGuid ); instanceNode.setNodeName( instance.getOwnedTask().getName() ); if ( graphNode != null ) { parentIds = this.mRuntimeAtlasInstrument.fetchParentIds( graphNode.getId() ); instanceNode.setIsIsolated( parentIds == null || parentIds.isEmpty() ); } else { instanceNode.setIsIsolated( true ); } this.mInstanceAtlasNodeMapper.insert( instanceNode ); if ( parentIds != null && !parentIds.isEmpty() ) { for ( GUID parentId : parentIds ) { InstanceAtlasAdjacent adjacent = new GenericInstanceAtlasAdjacent(); adjacent.setGuid( this.mGuidAllocator.nextGUID() ); adjacent.setParentGuid( parentId ); this.mInstanceAtlasAdjacentMapper.insert( adjacent ); } } } protected void persistTaskExec( TaskScheduleContext context, RavenTaskInstance instance ) { TaskElement element = context.getElement(); GUID instanceGuid = instance.getInstanceEntry().getGuid(); InstanceExec exec = new GenericInstanceExec(); exec.setTaskGuid( element.getGuid() ); exec.setInstanceGuid( instanceGuid ); exec.setTaskName( instance.getOwnedTask().getName() ); exec.setInstanceName( instance.getInstanceEntry().getInstanceName() ); exec.setProcessorQueue( "default" ); exec.setClusterName( "local_cluster" ); exec.setExecState( TaskInstanceExecState.Submitted.getName() ); exec.setCurrentRetryNumber( 0 ); exec.setRetryTimes( instance.getInstanceEntry().getRetryCnt() ); this.mInstanceExecMapper.insert( exec ); } protected void triggerTaskEventTimeReady( TaskScheduleContext context, RavenTaskInstance instance ) { TaskElement element = context.getElement(); GUID instanceGuid = instance.getInstanceEntry().getGuid(); InstanceEvent event = new GenericInstanceEvent(); event.setGuid( this.mGuidAllocator.nextGUID() ); event.setTaskGuid( element.getGuid() ); event.setInstanceGuid( instanceGuid ); event.setInstanceName( instance.getInstanceEntry().getInstanceName() ); event.setRetryTimes( instance.getInstanceEntry().getRetryCnt() ); event.setCurrentRetryNumber( 0 ); event.setEventType( instance.getTaskType() ); event.setState( InstanceEventType.TaskTimeReady.getName() ); event.setExecTime( LocalDateTime.now() ); event.setEventContext( "{}" ); this.mScheduleManipulator.getInstanceEventMapper().insert( event ); } protected void prepareTaskInstances( Collection contexts, LocalDateTime targetTime ) { for ( TaskScheduleContext context : contexts ) { TaskElement element = context.getElement(); RavenTask task = this.mCentralizedTaskInstrument.constructTask( element ); RavenTaskInstance instance = task.createInstance(); this.prepareInstance( context, instance ); this.prepareInstanceLineage( context, instance ); this.persistTaskExec( context, instance ); this.triggerTaskEventTimeReady( context, instance ); } } protected Collection prepareScheduleTasks( Collection elements, LocalDateTime targetTime ) { if ( elements == null || elements.isEmpty() ) { return elements; } Collection contexts = new ArrayList<>(); for ( TaskElement element : elements ) { TaskScheduleContext context = this.prepareTaskScheduleTimeOffset( element, targetTime ); contexts.add( context ); } this.prepareTaskInstances( contexts, targetTime ); Debug.traceSyn( elements ); return elements; } @Override public UniformTaskScheduler taskScheduler() { return this.mTaskScheduler; } @Override public void prepareSchedulableTasks( Collection cycles, LocalDateTime targetTime ) { if ( targetTime == null ) { targetTime = LocalDateTime.now(); } TableIndex64Meta range = this.mTaskNodeManipulator.selectSchedulableIdRange( cycles, targetTime ); if ( range == null ) { return; } long idMin = range.getMinId(); long idMax = range.getMaxId(); if ( idMin <= 0 || idMax <= 0 || idMax < idMin ) { return; } long cursor = idMin; while ( cursor <= idMax ) { long windowStart = cursor; long windowEnd = cursor + this.mnScanIdWindow - 1; if ( windowEnd > idMax ) { windowEnd = idMax; } final long finalStart = windowStart; final long finalEnd = windowEnd; LocalDateTime finalTargetTime = targetTime; this.mExecutorService.submit( () -> { try { log.info( "[TaskSchedulerLifecycle] Preparing schedulable tasks (Start: {}, End: {}) ", finalStart, finalEnd ); Collection elements = this.mTaskNodeManipulator.fetchSchedulableTasksInRange( finalStart, finalEnd, cycles, finalTargetTime ); elements = this.prepareScheduleTasks( elements, finalTargetTime ); log.info( "[TaskSchedulerLifecycle] Preparing schedulable tasks (Start: {}, End: {}, Size: {}) ", finalStart, finalEnd, elements.size() ); } catch ( Exception e ) { log.error( "[TaskSchedulerLifecycle] Preparing schedulable tasks (Start: {}, End: {}) ", finalStart, finalEnd, e ); } } ); cursor = windowEnd + 1; } } @Override public void prepareSchedulableTasksDaily( LocalDateTime targetTime ) { this.prepareSchedulableTasks( DailyTaskScheduleCycles, targetTime ); } @Override public List fetchSchedulableTasksInRange( long idMin, long idMax, Collection cycles, LocalDateTime targetTime ) { return this.mTaskNodeManipulator.fetchSchedulableTasksInRange( idMin, idMax, cycles, targetTime ); } @Override public List fetchSchedulableTasksDaily( long idMin, long idMax, LocalDateTime targetTime ) { return this.mTaskNodeManipulator.fetchSchedulableTasksInRange( idMin, idMax, DailyTaskScheduleCycles, targetTime ); } public static class TaskScheduleContext { protected TaskElement element; protected LocalDateTime targetTime; protected LocalDateTime nextScheduleTime; protected LocalDateTime thisScheduleTime; public TaskScheduleContext( TaskElement element, LocalDateTime targetTime ) { this.element = element; this.targetTime = targetTime; } public TaskElement getElement() { return this.element; } public void setElement( TaskElement element ) { this.element = element; } public LocalDateTime getTargetTime() { return this.targetTime; } public void setTargetTime( LocalDateTime targetTime ) { this.targetTime = targetTime; } public LocalDateTime getNextScheduleTime() { return this.nextScheduleTime; } public void setNextScheduleTime( LocalDateTime nextScheduleTime ) { this.nextScheduleTime = nextScheduleTime; } public LocalDateTime getThisScheduleTime() { return this.thisScheduleTime; } public void setThisScheduleTime( LocalDateTime thisScheduleTime ) { this.thisScheduleTime = thisScheduleTime; } } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/schedule/RavenTaskScheduler.java ================================================ package com.walnut.odin.conduct.schedule; import java.time.LocalDateTime; import java.util.List; import com.pinecone.hydra.task.TaskInstanceStatus; import com.pinecone.hydra.task.kom.UniformTaskInstrument; import com.pinecone.hydra.task.kom.instance.InstanceInstrument; import com.walnut.odin.atlas.graph.RuntimeAtlasInstrument; import com.walnut.odin.dispatch.TaskDispatcher; import com.walnut.odin.task.CentralizedTaskInstrument; import com.walnut.odin.task.RavenTaskConfig; import com.walnut.odin.task.troll.TaskExecutionLauncher; import lombok.extern.slf4j.Slf4j; @Slf4j public class RavenTaskScheduler implements UniformTaskScheduler { private RavenTaskConfig mRavenTaskConfig; private InstanceInstrument mInstanceInstrument; private UniformTaskInstrument mUniformTaskInstrument; private RuntimeAtlasInstrument mRuntimeAtlasInstrument; private CentralizedTaskInstrument mCentralizedTaskInstrument; private TaskExecutionLauncher mTaskExecutionLauncher; private TaskDispatcher mTaskDispatcher; private TaskSchedulePreparator mTaskSchedulePreparator; private InstanceScheduleImpetus mInstanceScheduleImpetus; private InstanceScheduleAllocator mInstanceScheduleAllocator; private String mszPartitionName; public RavenTaskScheduler( CentralizedTaskInstrument taskInstrument, RuntimeAtlasInstrument atlasInstrument, TaskDispatcher dispatcher ) { log.info( "[Odin] [CrucialSchedulerComponentLifecycle] (RavenTaskScheduler Construction) " ); this.mCentralizedTaskInstrument = taskInstrument; this.mUniformTaskInstrument = taskInstrument.getUniformTaskInstrument(); this.mInstanceInstrument = this.mUniformTaskInstrument.getInstanceInstrument(); this.mRuntimeAtlasInstrument = atlasInstrument; this.mTaskExecutionLauncher = dispatcher.taskExecutionLauncher(); this.mTaskDispatcher = dispatcher; this.mRavenTaskConfig = (RavenTaskConfig) taskInstrument.getConfig(); this.mszPartitionName = this.mRavenTaskConfig.getSchedulePartitionName(); this.mInstanceScheduleAllocator = new RavenScheduleAllocator( this ); // [1] this.mTaskSchedulePreparator = new RavenTaskSchedulePreparator( this ); // [2] this.mInstanceScheduleImpetus = new RavenInstanceScheduleImpetus( this ); // [3] log.info( "[Odin] [CrucialSchedulerComponentLifecycle] (RavenTaskScheduler Construction) " ); } @Override public TaskSchedulePreparator taskSchedulePreparator() { return this.mTaskSchedulePreparator; } @Override public InstanceScheduleImpetus instanceScheduleImpetus() { return this.mInstanceScheduleImpetus; } @Override public InstanceScheduleAllocator instanceScheduleAllocator() { return this.mInstanceScheduleAllocator; } @Override public RavenTaskConfig ravenTaskConfig() { return this.mRavenTaskConfig; } @Override public CentralizedTaskInstrument taskInstrument() { return this.mCentralizedTaskInstrument; } @Override public InstanceInstrument instanceInstrument() { return this.mInstanceInstrument; } @Override public RuntimeAtlasInstrument atlasInstrument() { return this.mRuntimeAtlasInstrument; } @Override public TaskExecutionLauncher taskExecutionLauncher() { return this.mTaskExecutionLauncher; } @Override public TaskDispatcher taskDispatcher() { return this.mTaskDispatcher; } @Override public String getPartitionName() { return this.mszPartitionName; } public void fetch() { //this.mTaskSchedulePreparator.prepareSchedulableTasksDaily( LocalDateTime.now() ); this.mInstanceScheduleImpetus.impelPrelaunchInstances( LocalDateTime.now() ); } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/schedule/ScheduleCronHelper.java ================================================ package com.walnut.odin.conduct.schedule; import java.text.ParseException; import java.time.DayOfWeek; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.ZoneId; import java.util.Date; import org.quartz.CronExpression; import com.pinecone.hydra.task.marshal.TaskScheduleCycle; public final class ScheduleCronHelper { private static final ZoneId DEFAULT_ZONE_ID = ZoneId.systemDefault(); private ScheduleCronHelper() { } public static String generateDefaultCron( TaskScheduleCycle cycle ) { if ( cycle == null ) { throw new IllegalArgumentException( "TaskScheduleCycle is null." ); } switch ( cycle ) { case Minute: { return "0 * * * * ?"; } case Hour: { return "0 0 * * * ?"; } case Day: { return "0 0 0 * * ?"; } case Week: { return "0 0 0 ? * MON"; } case Month: { return "0 0 0 1 * ?"; } default: { throw new IllegalStateException( "Unsupported cycle: " + cycle ); } } } public static LocalDateTime alignToCycleStart( TaskScheduleCycle cycle, LocalDateTime referenceTime ) { if ( cycle == null ) { throw new IllegalArgumentException( "TaskScheduleCycle is null." ); } if ( referenceTime == null ) { throw new IllegalArgumentException( "Reference time is null." ); } switch ( cycle ) { case Minute: { return referenceTime .withSecond( 0 ) .withNano( 0 ); } case Hour: { return referenceTime .withMinute( 0 ) .withSecond( 0 ) .withNano( 0 ); } case Day: { LocalDate date = referenceTime.toLocalDate(); return date.atStartOfDay(); } case Week: { LocalDate date = referenceTime .toLocalDate() .with( DayOfWeek.MONDAY ); return date.atStartOfDay(); } case Month: { LocalDate date = referenceTime .withDayOfMonth( 1 ) .toLocalDate(); return date.atStartOfDay(); } default: { throw new IllegalStateException( "Unsupported cycle: " + cycle ); } } } public static LocalDateTime advanceByCycle( TaskScheduleCycle cycle, LocalDateTime currentTime ) { if ( cycle == null ) { throw new IllegalArgumentException( "TaskScheduleCycle is null." ); } if ( currentTime == null ) { throw new IllegalArgumentException( "Current time is null." ); } switch ( cycle ) { case Minute: { return currentTime.plusMinutes( 1 ); } case Hour: { return currentTime.plusHours( 1 ); } case Day: { return currentTime.plusDays( 1 ); } case Week: { return currentTime.plusWeeks( 1 ); } case Month: { return currentTime.plusMonths( 1 ); } default: { throw new IllegalStateException( "Unsupported cycle: " + cycle ); } } } public static LocalDateTime computeNextScheduleTime( TaskScheduleCycle cycle, LocalDateTime nextScheduleTime, LocalDateTime referenceTime ) { if ( cycle == null ) { throw new IllegalArgumentException( "TaskScheduleCycle is null." ); } if ( referenceTime == null ) { throw new IllegalArgumentException( "Reference time is null." ); } if ( nextScheduleTime == null ) { return ScheduleCronHelper.alignToCycleStart( cycle, referenceTime ); } if ( nextScheduleTime.isAfter( referenceTime ) ) { return nextScheduleTime; } LocalDateTime advanced = nextScheduleTime; while ( !advanced.isAfter( referenceTime ) ) { advanced = ScheduleCronHelper.advanceByCycle( cycle, advanced ); } return advanced; } public static LocalDateTime computeNextByCron( String cron, LocalDateTime currentFireTime ) { try { CronExpression expression = new CronExpression( cron ); Date next = expression.getNextValidTimeAfter( Date.from( currentFireTime .atZone( DEFAULT_ZONE_ID ) .toInstant() )); if ( next == null ) { return null; } return LocalDateTime.ofInstant( next.toInstant(), DEFAULT_ZONE_ID ); } catch ( ParseException e ) { throw new IllegalStateException( "Invalid cron expression: " + cron, e ); } } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/dispatch/AdaptiveCapacityDispatchStrategy.java ================================================ package com.walnut.odin.dispatch; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.PriorityQueue; import com.walnut.odin.task.troll.LaunchFeature; public class AdaptiveCapacityDispatchStrategy implements DispatchStrategy { protected static final int DEFAULT_HEAP_THRESHOLD = 16; protected final int mnHeapThreshold; protected static class ProcessorSlot { protected TaskExecutionProcessor mProcessor; protected int mnRemaining; protected ProcessorSlot( TaskExecutionProcessor processor, int nRemaining ) { this.mProcessor = processor; this.mnRemaining = nRemaining; } } public AdaptiveCapacityDispatchStrategy() { this( DEFAULT_HEAP_THRESHOLD ); } public AdaptiveCapacityDispatchStrategy( int nHeapThreshold ) { this.mnHeapThreshold = nHeapThreshold > 0 ? nHeapThreshold : DEFAULT_HEAP_THRESHOLD; } protected Map buildProcessorSlots( Collection processors ) { Map slotMap = new HashMap<>(); for ( TaskExecutionProcessor processor : processors ) { if ( processor.isExclusive() ) { continue; } int nPending = processor.getTaskExecutionQueue().pendingCapacity(); if ( nPending <= 0 ) { continue; } ProcessorSlot slot = new ProcessorSlot( processor, nPending ); slotMap.put( processor.getName(), slot ); } return slotMap; } protected List handleBindingContexts( Collection contexts, Map slotMap, Map> plan, TaskDispatcher dispatcher ) throws TaskDispatchException { List remaining = new ArrayList<>(); for ( TaskLaunchContext context : contexts ) { String szTarget = null; boolean bStrong = false; LaunchFeature feature = context.getLaunchFeature(); if ( feature != null && feature.getProcessorDesignated() != null ) { szTarget = feature.getProcessorDesignated(); bStrong = true; } else { szTarget = context.getAffinityProcessorName(); if ( szTarget == null ) { TaskExecutionProcessor p = dispatcher.getAffinityTasks( context.getTaskId() ); if ( p != null ) { szTarget = p.getName(); } } } if ( szTarget == null ) { remaining.add( context ); continue; } ProcessorSlot slot = slotMap.get( szTarget ); if ( slot == null ) { if ( bStrong ) { throw new TaskDispatchException( "Designated processor `" + szTarget + "` not available." ); } remaining.add( context ); continue; } if ( slot.mnRemaining <= 0 ) { if ( bStrong ) { throw new TaskDispatchException( "Designated processor `" + szTarget + "` capacity exceeded." ); } remaining.add( context ); continue; } plan.computeIfAbsent( slot.mProcessor, k -> new ArrayList<>() ).add( context ); --slot.mnRemaining; } return remaining; } protected void dispatchNormal( Map slotMap, List contexts, Map> plan ) { if ( slotMap.size() <= this.mnHeapThreshold ) { this.dispatchLinear( slotMap, contexts, plan ); } else { this.dispatchHeap( slotMap, contexts, plan ); } } @Override public Map> dispatch( Collection processors, Collection contexts, TaskDispatcher dispatcher ) throws TaskDispatchException { Map> plan = new HashMap<>(); if ( processors == null || processors.isEmpty() ) { return plan; } if ( contexts == null || contexts.isEmpty() ) { return plan; } Map slotMap = this.buildProcessorSlots( processors ); if ( slotMap.isEmpty() ) { return plan; } List remaining = this.handleBindingContexts( contexts, slotMap, plan, dispatcher ); if ( remaining.isEmpty() ) { return plan; } this.dispatchNormal( slotMap, remaining, plan ); return plan; } protected void dispatchLinear( Map slotMap, Collection contexts, Map> plan ) { List slots = new ArrayList<>( slotMap.values() ); for ( TaskLaunchContext context : contexts ) { ProcessorSlot best = null; for ( ProcessorSlot slot : slots ) { if ( slot.mnRemaining <= 0 ) { continue; } if ( best == null || this.compareSlot( slot, best ) < 0 ) { best = slot; } } if ( best == null ) { break; } plan.computeIfAbsent( best.mProcessor, k -> new ArrayList<>() ).add( context ); --best.mnRemaining; } } protected void dispatchHeap( Map slotMap, Collection contexts, Map> plan ) { PriorityQueue heap = new PriorityQueue<>( this::compareSlot ); for ( ProcessorSlot slot : slotMap.values() ) { if ( slot.mnRemaining > 0 ) { heap.offer( slot ); } } for ( TaskLaunchContext context : contexts ) { ProcessorSlot slot = heap.poll(); if ( slot == null ) { break; } plan.computeIfAbsent( slot.mProcessor, k -> new ArrayList<>() ).add( context ); --slot.mnRemaining; if ( slot.mnRemaining > 0 ) { heap.offer( slot ); } } } protected int compareSlot( ProcessorSlot a, ProcessorSlot b ) { if ( a.mnRemaining != b.mnRemaining ) { return Integer.compare( b.mnRemaining, a.mnRemaining ); } if ( a.mProcessor.getPriority() != b.mProcessor.getPriority() ) { return Integer.compare( b.mProcessor.getPriority(), a.mProcessor.getPriority() ); } return a.mProcessor.getName().compareTo( b.mProcessor.getName() ); } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/dispatch/DefaultPipelineLaunchReport.java ================================================ package com.walnut.odin.dispatch; import java.util.Collection; import java.util.Collections; import com.pinecone.hydra.proc.UProcess; public class DefaultPipelineLaunchReport implements PipelineLaunchReport { protected TaskExecutionProcessor mProcessor; protected Collection mLaunchedProcesses; protected Collection mLaunchedContext; protected Collection mWaitingContext; protected boolean mbPreparing; protected DefaultPipelineLaunchReport( TaskExecutionProcessor processor, Collection launchedProcesses, Collection launchedContext, Collection waitingContext, boolean preparing ) { this.mProcessor = processor; this.mLaunchedProcesses = launchedProcesses; this.mLaunchedContext = launchedContext; this.mWaitingContext = waitingContext; this.mbPreparing = preparing; } public static DefaultPipelineLaunchReport preparing( TaskExecutionProcessor processor, Collection launchedProcesses, Collection waitingContext ) { return new DefaultPipelineLaunchReport( processor, launchedProcesses, Collections.emptyList(), waitingContext, true ); } public static DefaultPipelineLaunchReport executed( TaskExecutionProcessor processor, Collection launchedProcesses, Collection launchedContext, Collection waitingContext ) { return new DefaultPipelineLaunchReport( processor, launchedProcesses, launchedContext, waitingContext, false ); } public static DefaultPipelineLaunchReport recycled( TaskExecutionProcessor processor, Collection recycled ) { return new DefaultPipelineLaunchReport( processor, Collections.emptyList(), Collections.emptyList(), recycled, false ); } @Override public Collection launchedProcesses() { return this.mLaunchedProcesses; } @Override public Collection launchedContext() { return this.mLaunchedContext; } @Override public Collection waitingContext() { return this.mWaitingContext; } @Override public boolean isPreparing() { return this.mbPreparing; } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/dispatch/GenericI32TaskQueue.java ================================================ package com.walnut.odin.dispatch; public class GenericI32TaskQueue extends ArchTaskExecutionI32Queue implements TaskExecutionQueue { public GenericI32TaskQueue( TaskQueueMeta queueMeta ) { super(); if ( queueMeta == null ) { throw new IllegalArgumentException( "TaskQueueMeta cannot be null." ); } this.mszName = queueMeta.getName(); this.mnMaxCapacity = queueMeta.getMaxCapacity(); this.mnMinCapacity = queueMeta.getMinCapacity(); this.mnRuntimeInstanceCapacity = queueMeta.getRuntimeInstanceCapacity(); this.validateInitialMeta(); } private void validateInitialMeta() { if ( this.mnMaxCapacity < 0 ) { throw new IllegalArgumentException( "Max capacity cannot be negative." ); } if ( this.mnMinCapacity < 0 ) { throw new IllegalArgumentException( "Min capacity cannot be negative." ); } if ( this.mnRuntimeInstanceCapacity < 0 ) { throw new IllegalArgumentException( "Runtime instance capacity cannot be negative." ); } if ( this.mnMinCapacity > this.mnMaxCapacity ) { throw new IllegalArgumentException( "Min capacity cannot exceed max capacity." ); } if ( this.mnRuntimeInstanceCapacity > this.mnMaxCapacity ) { throw new IllegalArgumentException( "Runtime instance capacity cannot exceed max capacity." ); } } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/dispatch/RavenTaskDispatcher.java ================================================ package com.walnut.odin.dispatch; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.concurrent.locks.ReentrantLock; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pinecone.framework.util.id.Identification; import com.pinecone.hydra.proc.UProcess; import com.walnut.odin.conduct.CollectiveTaskRegiment; import com.walnut.odin.dispatch.entity.TaskProcessorEntity; import com.walnut.odin.task.RavenTaskInstance; import com.walnut.odin.task.source.TaskProcessorManipulator; import com.walnut.odin.task.troll.InstanceLaunchException; import com.walnut.odin.task.troll.LaunchFeature; import com.walnut.odin.task.troll.TaskExecutionLauncher; public class RavenTaskDispatcher implements TaskDispatcher { protected final Logger log = LoggerFactory.getLogger( this.getClass() ); protected final ReentrantLock mLock; protected final Map mProcessors; protected final Map mClientProcessorsIndex; protected final Map mAffinityTable; protected TaskProcessorManipulator mTaskProcessorManipulator; protected DispatchStrategy mDispatchStrategy; protected TaskExecutionLauncher mTaskExecutionLauncher; protected CollectiveTaskRegiment mCollectiveTaskRegiment; public RavenTaskDispatcher( CollectiveTaskRegiment regiment, DispatchStrategy strategy ) { this.mLock = new ReentrantLock(); this.mProcessors = new LinkedHashMap<>(); this.mAffinityTable = new HashMap<>(); this.mDispatchStrategy = strategy; this.mClientProcessorsIndex = new HashMap<>(); this.mCollectiveTaskRegiment = regiment; this.mTaskExecutionLauncher = regiment.taskExecutionLauncher(); this.mTaskProcessorManipulator = regiment.taskInstrument().getRavenTaskMasterManipulator().getTaskProcessorManipulator(); } public RavenTaskDispatcher( CollectiveTaskRegiment regiment ) { this( regiment, new AdaptiveCapacityDispatchStrategy() ); } @Override public TaskExecutionLauncher taskExecutionLauncher() { return this.mTaskExecutionLauncher; } @Override public void registerProcessor( TaskExecutionProcessor processor ) { this.mLock.lock(); try { this.mProcessors.put( processor.getName(), processor ); this.mClientProcessorsIndex.put( processor.getControlClientId(), processor ); this.log.info( "Registered processor, name:`{}`, clientId:`{}` ", processor.getName(), processor.getControlClientId() ); } finally { this.mLock.unlock(); } } @Override public TaskProcessorEntity registerProcessor( String szProcessorName, long nClientId ) throws IllegalArgumentException { TaskProcessorEntity entity = this.mTaskProcessorManipulator.selectByProcessorName( szProcessorName ); if ( entity == null ) { throw new IllegalArgumentException( szProcessorName + " not found" ); } entity.setControlClientId( nClientId ); TaskExecutionProcessor processor = new RavenTaskExecutionProcessor( entity, this.mTaskExecutionLauncher ); this.registerProcessor( processor ); return entity; } @Override public void unregisterProcessor( String szProcessorName ) { this.mLock.lock(); try { this.mProcessors.remove( szProcessorName ); this.mAffinityTable.entrySet().removeIf( entry -> { if ( entry.getValue().processor.getName().equals( szProcessorName ) ) { return true; } return false; } ); this.log.info( "Unregistered processor, name:`{}`", szProcessorName ); } finally { this.mLock.unlock(); } } @Override public void unregisterProcessor( long nClientId ) { TaskExecutionProcessor processor = null; this.mLock.lock(); try { processor = this.mClientProcessorsIndex.remove( nClientId ); } finally { this.mLock.unlock(); if ( processor != null ) { this.unregisterProcessor( processor.getName() ); this.log.info( "Unregistered processor, name:`{}`, clientId:`{}` ", processor.getName(), processor.getControlClientId() ); } } } @Override public Collection fetchProcessors() { this.mLock.lock(); try { return Collections.unmodifiableCollection( new ArrayList<>( this.mProcessors.values() ) ); } finally { this.mLock.unlock(); } } @Override public void setProcessorAffinity( String szProcessorName, TaskLaunchContext launchContext ) { this.mLock.lock(); try { TaskExecutionProcessor processor = this.mProcessors.get( szProcessorName ); if ( processor == null ) { throw new IllegalArgumentException( "Processor not found: " + szProcessorName ); } this.mAffinityTable.put( launchContext.getTaskId(), new TaskProcPair( processor, launchContext ) ); } finally { this.mLock.unlock(); } } @Override public TaskExecutionProcessor getAffinityTasks( Identification taskId ) { TaskProcPair pair = this.mAffinityTable.get( taskId ); if ( pair != null ) { return pair.processor; } return null; } @Override public Collection queryAffinityTasks( String szProcessorName ) { this.mLock.lock(); try { Collection result = new ArrayList<>(); for ( TaskProcPair pair : this.mAffinityTable.values() ) { if ( pair.processor.getName().equals( szProcessorName ) ) { result.add( pair.launchContext ); } } return result; } finally { this.mLock.unlock(); } } @Override public PipelineLaunchReport pipeCreate( Collection contexts ) throws InstanceLaunchException, TaskDispatchException { Map> plan; this.mLock.lock(); try { plan = this.mDispatchStrategy.dispatch( new ArrayList<>( this.mProcessors.values() ), contexts, this ); } finally { this.mLock.unlock(); } return this.executeScheme( plan, true ); } @Override public PipelineLaunchReport pipeLaunch( Collection contexts ) throws InstanceLaunchException, TaskDispatchException { Map> plan; this.mLock.lock(); try { plan = this.mDispatchStrategy.dispatch( new ArrayList<>( this.mProcessors.values() ), contexts, this ); } finally { this.mLock.unlock(); } return this.executeScheme( plan, false ); } protected PipelineLaunchReport executeScheme( Map> scheme, boolean bCreation ) throws InstanceLaunchException, TaskDispatchException { List launched = new ArrayList<>(); List consumed = new ArrayList<>(); List waiting = new ArrayList<>(); for ( Map.Entry> entry : scheme.entrySet() ) { TaskExecutionProcessor processor = entry.getKey(); Collection assigned = entry.getValue(); PipelineLaunchReport report; if ( bCreation ) { report = processor.pipeCreate( assigned ); } else { report = processor.pipeLaunch( assigned ); } launched.addAll( report.launchedProcesses() ); consumed.addAll( report.launchedContext() ); waiting.addAll( report.waitingContext() ); } return DefaultPipelineLaunchReport.executed( null, launched, consumed, waiting ); } @Override public UProcess create( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException { TaskLaunchContext context = TaskLaunchContext.of( instance, feature ); PipelineLaunchReport _r = this.pipeCreate( List.of( context ) ); return context.getLaunchedProcess(); } @Override public UProcess launch( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException { TaskLaunchContext context = TaskLaunchContext.of( instance, feature ); PipelineLaunchReport _r = this.pipeLaunch( List.of( context ) ); return context.getLaunchedProcess(); } protected static class TaskProcPair { public TaskExecutionProcessor processor; public TaskLaunchContext launchContext; public TaskProcPair( TaskExecutionProcessor processor, TaskLaunchContext launchContext ) { this.processor = processor; this.launchContext = launchContext; } } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/dispatch/RavenTaskExecutionProcessor.java ================================================ package com.walnut.odin.dispatch; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.Identification; import com.pinecone.hydra.deploy.Server; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.proc.event.ProcessEvent; import com.pinecone.hydra.proc.event.ProcessEventHandler; import com.pinecone.hydra.proc.image.EntryPointRunnable; import com.walnut.odin.dispatch.entity.TaskProcessorEntity; import com.walnut.odin.task.RavenTaskInstance; import com.walnut.odin.task.troll.InstanceLaunchException; import com.walnut.odin.task.troll.LaunchFeature; import com.walnut.odin.task.troll.TaskExecutionLauncher; public class RavenTaskExecutionProcessor implements TaskExecutionProcessor { protected String mszName; protected Server mDeployClusterServer; protected String mszClusterPath; protected String mszClusterName; protected long mnControlClientId; protected boolean mbLocal; protected int mnPriority; protected boolean mbExclusive; protected TaskExecutionQueue mTaskExecutionQueue; protected TaskExecutionLauncher mTaskExecutionLauncher; protected Map mRunningProcesses; protected ConsumeCompromisedPolice mConsumeCompromisedPolice; protected Logger log = LoggerFactory.getLogger( this.getClass() ); public RavenTaskExecutionProcessor( TaskProcessorEntity processorEntity, TaskExecutionQueue queue, TaskExecutionLauncher launcher ) { this.mszName = processorEntity.getName(); this.mDeployClusterServer = processorEntity.getDeployClusterServer(); this.mszClusterPath = processorEntity.getClusterPath(); this.mszClusterName = processorEntity.getClusterName(); this.mnControlClientId = processorEntity.getControlClientId(); this.mbLocal = processorEntity.isLocal(); this.mnPriority = processorEntity.getPriority(); this.mbExclusive = processorEntity.isExclusive(); this.mTaskExecutionQueue = queue; this.mTaskExecutionLauncher = launcher; this.mRunningProcesses = new ConcurrentHashMap<>(); this.mConsumeCompromisedPolice = ConsumeCompromisedPolice.EvictionException; // TODO, Advance } public RavenTaskExecutionProcessor( TaskProcessorEntity processorEntity, TaskExecutionLauncher launcher ) { this( processorEntity, new GenericI32TaskQueue( processorEntity.getTaskQueueMeta() ), launcher ); } @Override public String getName() { return this.mszName; } @Override public Server getDeployClusterServer() { return this.mDeployClusterServer; } @Override public String getClusterPath() { return this.mszClusterPath; } @Override public String getClusterName() { return this.mszClusterName; } @Override public long getControlClientId() { return this.mnControlClientId; } @Override public TaskExecutionQueue getTaskExecutionQueue() { return this.mTaskExecutionQueue; } @Override public boolean isLocal() { return this.mbLocal; } @Override public int getPriority() { return this.mnPriority; } @Override public boolean isExclusive() { return this.mbExclusive; } @Override public TaskLaunchContext getTaskLaunchContextByPID( GUID pid ) { return this.mRunningProcesses.get( pid ); } @Override public int getRunningSize() { return this.mRunningProcesses.size(); } @Override public int getWaitingSize() { return this.mTaskExecutionQueue.waitingSize(); } protected void prepareSysEventHandle( LaunchFeature feature ) { feature.withSysProcEventHandlers(new ProcessEventHandler() { @Override public void fired( EntryPointRunnable runnable, ProcessEvent event ) { if ( ProcessEvent.Terminated == event || ProcessEvent.Error == event ) { UProcess process = runnable.ownedProcess(); TaskLaunchContext context = getTaskLaunchContextByPID( process.getPID() ); try { afterProcessTerminated( process, context ); log.info( "[ProcessSystemEventTriggered] ( ProcName:`{}`, ProcEvent:`{}`, PID:`{}`, InstanceId:`{}` ) ", runnable.ownedProcess().getName(), event.getName(), runnable.ownedProcess().getPID(), context.getTaskInstance().getId() ); } catch ( TaskDispatchException e ) { // 实例错误处理在实例元数据专门回调函数中统一处理,这里不用管了 log.error( "[ProcessSystemEventTriggered] ( ProcName:`{}`, ProcEvent:`{}`, PID:`{}`, InstanceId:`{}`, What:`{}` ) ", runnable.ownedProcess().getName(), event.getName(), runnable.ownedProcess().getPID(), context.getTaskInstance().getId(), e.getMessage(), e ); handleAsyncTaskDispatchException( runnable, event, e ); } } } }); } protected void handleAsyncTaskDispatchException( EntryPointRunnable runnable, ProcessEvent event, TaskDispatchException e ) { // TODO, 暂时默认驱逐,后面再说 } @Override public UProcess directlyCreate( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException { this.prepareSysEventHandle( feature ); if ( this.mbLocal ) { return this.mTaskExecutionLauncher.createLocally( instance, feature ); } return this.mTaskExecutionLauncher.createRemotely( instance, this.mnControlClientId, feature ); } @Override public UProcess directlyLaunch( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException { this.prepareSysEventHandle( feature ); if ( this.mbLocal ) { return this.mTaskExecutionLauncher.launchLocally( instance, feature ); } return this.mTaskExecutionLauncher.launchRemotely( instance, this.mnControlClientId, feature ); } protected Collection subtractContext( Collection source, Collection consumed ) { if ( source == null || source.isEmpty() ) { return Collections.emptyList(); } if ( consumed == null || consumed.isEmpty() ) { return source; } Set consumedIds = new HashSet<>( consumed.size() ); for ( TaskLaunchContext ctx : consumed ) { consumedIds.add( ctx.getTaskInstance().getId() ); } List waiting = new ArrayList<>(); for ( TaskLaunchContext ctx : source ) { if ( !consumedIds.contains( ctx.getTaskInstance().getId() ) ) { waiting.add( ctx ); } } return waiting; } protected void afterProcessLaunched( UProcess process, TaskLaunchContext context ) { context.afterProcessLaunched( process ); this.mRunningProcesses.put( process.getPID(), context ); } protected void afterProcessTerminated( UProcess process, TaskLaunchContext context ) throws TaskDispatchException { this.mRunningProcesses.remove( process.getPID() ); this.shiftLaunchsPipeline( List.of( context.getTaskInstance().getId() ) ); } @Override public PipelineLaunchReport prepare( Collection contexts ) throws TaskDispatchException { this.mTaskExecutionQueue.offer( contexts ); return DefaultPipelineLaunchReport.preparing( this, Collections.emptyList(), contexts ); } protected PipelineLaunchReport pipeOpt( Collection contexts, boolean directlyLaunch ) throws TaskDispatchException { RTaskInstanceConsumer consumer = new RTaskInstanceConsumer( directlyLaunch ); Collection consumed = this.mTaskExecutionQueue.pipeConsume( contexts, consumer ); List launched = consumer.getLaunched(); Collection waiting = this.subtractContext( contexts, consumed ); return DefaultPipelineLaunchReport.executed( this, launched, consumed, waiting ); } @Override public PipelineLaunchReport pipeCreate(Collection contexts ) throws TaskDispatchException { return this.pipeOpt( contexts, false ); } @Override public PipelineLaunchReport pipeLaunch(Collection contexts ) throws TaskDispatchException { return this.pipeOpt( contexts, true ); } @Override public PipelineLaunchReport recycleTerminated(Collection terminatedIds ) { Collection recycled = this.mTaskExecutionQueue.recycleTerminated( terminatedIds ); return DefaultPipelineLaunchReport.recycled( this, recycled ); } @Override public PipelineLaunchReport launchsPending() throws TaskDispatchException { RTaskInstanceConsumer consumer = new RTaskInstanceConsumer( true ); Collection consumed = this.mTaskExecutionQueue.consumePending( consumer ); List launched = consumer.getLaunched(); return DefaultPipelineLaunchReport.executed( this, launched, consumed, Collections.emptyList() ); } @Override public PipelineLaunchReport shiftLaunchsPipeline(Collection terminatedIds ) throws TaskDispatchException { RTaskInstanceConsumer consumer = new RTaskInstanceConsumer( true ); Collection consumed = this.mTaskExecutionQueue.shiftPipeline( terminatedIds, consumer ); List launched = consumer.getLaunched(); return DefaultPipelineLaunchReport.executed( this, launched, consumed, Collections.emptyList() ); } protected class RTaskInstanceConsumer implements TaskInstanceConsumer { public List launched; public boolean directlyLaunch; public RTaskInstanceConsumer( boolean directlyLaunch ) { this.launched = new ArrayList<>(); this.directlyLaunch = directlyLaunch; } @Override public void tryConsume( TaskLaunchContext context ) throws TaskConsumeException { try { UProcess proc; if ( this.directlyLaunch ) { proc = directlyLaunch( context.getTaskInstance(), context.getLaunchFeature() ); } else { proc = directlyCreate( context.getTaskInstance(), context.getLaunchFeature() ); } this.launched.add( proc ); afterProcessLaunched( proc, context ); } catch ( InstanceLaunchException e ) { log.error( "Error during shift pipeline, what:'{}' ", e.getMessage(), e ); throw new TaskConsumeException( e ); } } @Override public ConsumeCompromisedPolice compromisedPolice() { return mConsumeCompromisedPolice; } public List getLaunched() { return this.launched; } } } ================================================ FILE: Odin/odin-framework-conduct/src/main/java/com/walnut/odin/task/mapper/TaskProcessorMapper.java ================================================ package com.walnut.odin.task.mapper; import java.util.List; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import com.pinecone.framework.util.id.GUID; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import com.walnut.odin.dispatch.entity.GenericTaskProcessorEntity; import com.walnut.odin.dispatch.entity.TaskProcessorEntity; import com.walnut.odin.task.source.TaskProcessorManipulator; @Mapper @IbatisDataAccessObject public interface TaskProcessorMapper extends TaskProcessorManipulator { @Select( "SELECT " + " `id`, " + " `guid`, " + " `processor_name` AS name, " + " `cluster_path` AS clusterPath, " + " `cluster_name` AS clusterName, " + " `is_local` AS `local`, " + " `is_exclusive` AS exclusive, " + " `priority`, " + " `queue_name` AS queueName, " + " `queue_max_capacity` AS queueMaxCapacity, " + " `queue_min_capacity` AS queueMinCapacity, " + " `queue_runtime_instance_capacity` AS queueRuntimeInstanceCapacity, " + " `enable`, " + " `create_time` AS createTime, " + " `update_time` AS updateTime " + "FROM `odin_task_processor` " + "WHERE `processor_name` = #{name}" ) GenericTaskProcessorEntity selectByProcessorName( @Param("name") String szProcessorName ); @Select( "SELECT " + " `id`, " + " `guid`, " + " `processor_name` AS name, " + " `cluster_path` AS clusterPath, " + " `cluster_name` AS clusterName, " + " `is_local` AS `local`, " + " `is_exclusive` AS exclusive, " + " `priority`, " + " `queue_name` AS queueName, " + " `queue_max_capacity` AS queueMaxCapacity, " + " `queue_min_capacity` AS queueMinCapacity, " + " `queue_runtime_instance_capacity` AS queueRuntimeInstanceCapacity, " + " `enable`, " + " `create_time` AS createTime, " + " `update_time` AS updateTime " + "FROM `odin_task_processor` " + "WHERE `guid` = #{guid} AND `enable` = 1" ) GenericTaskProcessorEntity selectByGuid( @Param("guid") GUID guid ); @Select( "SELECT " + " `id`, " + " `guid`, " + " `processor_name` AS name, " + " `cluster_path` AS clusterPath, " + " `cluster_name` AS clusterName, " + " `is_local` AS `local`, " + " `is_exclusive` AS exclusive, " + " `priority`, " + " `queue_name` AS queueName, " + " `queue_max_capacity` AS queueMaxCapacity, " + " `queue_min_capacity` AS queueMinCapacity, " + " `queue_runtime_instance_capacity` AS queueRuntimeInstanceCapacity, " + " `enable`, " + " `create_time` AS createTime, " + " `update_time` AS updateTime " + "FROM `odin_task_processor` " + "WHERE `cluster_name` = #{clusterName} AND `enable` = 1" ) List selectByClusterName0( @Param("clusterName") String clusterName ); @Override @SuppressWarnings("unchecked") default List selectByClusterName( @Param("clusterName") String clusterName ) { return (List) this.selectByClusterName0( clusterName ); } @Select( "SELECT " + " `id`, " + " `guid`, " + " `processor_name` AS name, " + " `cluster_path` AS clusterPath, " + " `cluster_name` AS clusterName, " + " `is_local` AS `local`, " + " `is_exclusive` AS exclusive, " + " `priority`, " + " `queue_name` AS queueName, " + " `queue_max_capacity` AS queueMaxCapacity, " + " `queue_min_capacity` AS queueMinCapacity, " + " `queue_runtime_instance_capacity` AS queueRuntimeInstanceCapacity, " + " `enable`, " + " `create_time` AS createTime, " + " `update_time` AS updateTime " + "FROM `odin_task_processor`" ) List selectAll0(); @Override @SuppressWarnings("unchecked") default List selectAll() { return (List) this.selectAll0(); } @Insert( "INSERT INTO `odin_task_processor` ( " + " `guid`, " + " `processor_name`, " + " `cluster_path`, " + " `cluster_name`, " + " `is_local`, " + " `is_exclusive`, " + " `priority`, " + " `queue_name`, " + " `queue_max_capacity`, " + " `queue_min_capacity`, " + " `queue_runtime_instance_capacity` " + ") VALUES ( " + " #{entity.guid}, " + " #{entity.name}, " + " #{entity.clusterPath}, " + " #{entity.clusterName}, " + " #{entity.local}, " + " #{entity.exclusive}, " + " #{entity.priority}, " + " #{entity.queueName}, " + " #{entity.queueMaxCapacity}, " + " #{entity.queueMinCapacity}, " + " #{entity.queueRuntimeInstanceCapacity} " + ")" ) int insert( @Param("entity") TaskProcessorEntity entity ); @Update( "UPDATE `odin_task_processor` SET " + " `cluster_path` = #{clusterPath}, " + " `cluster_name` = #{clusterName}, " + " `is_local` = #{local}, " + " `is_exclusive` = #{exclusive}, " + " `priority` = #{priority}, " + " `processor_name` = #{name}, " + " `queue_name` = #{queueName}, " + " `queue_max_capacity` = #{queueMaxCapacity}, " + " `queue_min_capacity` = #{queueMinCapacity}, " + " `queue_runtime_instance_capacity` = #{queueRuntimeInstanceCapacity}, " + " `enable` = #{enable} " + "WHERE `guid` = #{guid}" ) int updateByGuid( GenericTaskProcessorEntity entity ); @Update( "UPDATE `odin_task_processor` SET " + " `queue_max_capacity` = #{maxCapacity}, " + " `queue_min_capacity` = #{minCapacity}, " + " `queue_runtime_instance_capacity` = #{runtimeCapacity} " + "WHERE `guid` = #{guid}" ) int updateQueueCapacity( @Param("guid") GUID guid, @Param("maxCapacity") int maxCapacity, @Param("minCapacity") int minCapacity, @Param("runtimeCapacity") int runtimeCapacity ); @Update( "DELETE FROM `odin_task_processor` WHERE `guid` = #{guid}" ) int deleteByGuid( @Param("guid") GUID guid ); @Update( "UPDATE `odin_task_processor` SET `enable` = 1 WHERE `guid` = #{guid}" ) int enable( @Param("guid") GUID guid ); @Update( "UPDATE `odin_task_processor` SET `enable` = 0 WHERE `guid` = #{guid}" ) int disable( @Param("guid") GUID guid ); } ================================================ FILE: Odin/odin-framework-runtime/pom.xml ================================================ odin com.walnut.odin 2.5.1 odin-framework-runtime 2.5.1 4.0.0 com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.pinecone.slime.jelly jelly 2.1.0 compile com.pinecone.hydra.kom.driver.default hydra-kom-default-driver 2.1.0 compile com.pinecone.tritium hydra-system-tritium 2.1.0 compile com.walnut.odin odin-architecture 2.5.1 compile ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/ArchRemoteProcessManagerNode.java ================================================ package com.walnut.odin.proc; import java.net.URI; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pinecone.framework.system.RuntimeSystem; import com.pinecone.framework.system.Unsafe; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.proc.ProcessManager; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.proc.event.ProcessEvent; import com.pinecone.hydra.proc.event.ProcessLifecycleHandler; import com.pinecone.hydra.proc.image.EntryPointRunnable; import com.pinecone.hydra.proc.image.ExecutionImage; import com.pinecone.hydra.proc.image.URLImageLoader; import com.pinecone.hydra.proc.image.kom.ImageElement; import com.pinecone.hydra.system.centrum.UniformCentralSystem; import com.pinecone.hydra.unit.imperium.entity.EntityNode; import com.walnut.odin.proc.client.RemoteProcessManagerClient; public abstract class ArchRemoteProcessManagerNode implements RemoteProcessManagerNode { protected Logger mLogger; protected ProcessManager mProcessManager; protected List mLifecycleHandlers; protected ReadWriteLock mnClientLock = new ReentrantReadWriteLock(); protected ArchRemoteProcessManagerNode( ProcessManager processManager ) { this.mLogger = LoggerFactory.getLogger( this.getClass() ); this.mProcessManager = processManager; this.mLifecycleHandlers = new ArrayList<>(); } @Override public URLImageLoader imageLoader() { return (URLImageLoader) this.mProcessManager.getImageLoader(); } @Override public GuidAllocator getGuidAllocator() { return this.mProcessManager.getGuidAllocator(); } @Override public ProcessManager localProcessManager() { return this.mProcessManager; } @Override public RuntimeSystem superiorSystem() { return this.mProcessManager.superiorSystem(); } @Override public Logger getLogger() { return this.mLogger; } @Override public ExecutionImage queryExecutionImage( String path ) { ExecutionImage image = this.imageLoader().queryExecutionImage( path ); if ( image != null ) { return image; } if ( this.superiorSystem() instanceof UniformCentralSystem ) { EntityNode e = ((UniformCentralSystem) this.superiorSystem()).imperiumPrivy().getExpressInstrument().queryNode( path ); if ( e instanceof ImageElement ) { return ((ImageElement) e).getImage(); } } return null; } @Override public ExecutionImage queryExecutionImage( URI uri ) { return this.imageLoader().queryExecutionImage( uri ); } @Override public void registerLocalScopeExecutionImage( String dirPath, ExecutionImage image ) { this.imageLoader().registerLocalScopeExecutionImage( dirPath, image ); } @Override public UProcess getProcess( GUID pid ) { return this.mProcessManager.getProcess( pid ); } @Override public boolean hasOwnProcess( GUID pid ) { UProcess process = this.mProcessManager.getProcess( pid ); if ( process instanceof RemoteProcess) { return false; } return process != null; } @Override public boolean containProcess( GUID pid ) { return this.mProcessManager.containProcess( pid ); } @Override public Collection searchProcessesByName( String procName ) { return this.mProcessManager.searchProcessesByName( procName ); } @Override public Collection searchProcessesByNameNoCase( String procName ) { return this.mProcessManager.searchProcessesByNameNoCase( procName ); } protected void afterMediatedRemoteProcess( MediatedRemoteProcess process, String imageAddress, boolean isURI ) { this.notifyProcessLifecycleHandlers( imageAddress, null, ProcessEvent.Prepare ); ExecutionImage image; if ( isURI ) { image = this.queryExecutionImage( URI.create( imageAddress ) ); } else { image = this.queryExecutionImage( imageAddress ); } if ( image == null ) { throw new IllegalStateException( "[MirrorCompromised] `" + imageAddress + "` is not a valid image address." ); } this.mProcessManager.getImageModifier().applyImageAddress( image, imageAddress ); process.mExecutionImage = image; process.mProcessManager = this.mProcessManager; } @Override public RemoteProcessManagerNode addProcessLifecycleHandler(ProcessLifecycleHandler handler ) { this.mnClientLock.writeLock().lock(); try { this.mLifecycleHandlers.add( handler ); return this; } finally { this.mnClientLock.writeLock().unlock(); } } @Override public RemoteProcessManagerNode removeProcessLifecycleHandler( ProcessLifecycleHandler handler ) { this.mnClientLock.writeLock().lock(); try { this.mLifecycleHandlers.remove( handler ); return this; } finally { this.mnClientLock.writeLock().unlock(); } } @Override public int getProcessLifecycleHandlersSize() { this.mnClientLock.readLock().lock(); try { return this.mLifecycleHandlers.size(); } finally { this.mnClientLock.readLock().unlock(); } } @Override @Unsafe public void notifyProcessLifecycleHandlers( String imageAddress, EntryPointRunnable runnable, ProcessEvent event ) { this.mnClientLock.readLock().lock(); try { for ( ProcessLifecycleHandler handler : this.mLifecycleHandlers ) { handler.fired( imageAddress, runnable, event ); } } finally { this.mnClientLock.readLock().unlock(); } } } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/MediatedRemoteProcess.java ================================================ package com.walnut.odin.proc; import com.pinecone.framework.system.ApoptosisRejectSignalException; import com.pinecone.framework.system.NotImplementedException; import com.pinecone.framework.system.ProvokeHandleException; import com.pinecone.framework.system.RuntimeSystem; import com.pinecone.framework.system.executum.Executum; import com.pinecone.framework.system.executum.Lifecycle; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.system.executum.TaskManager; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.proc.ControllableLevel; import com.pinecone.hydra.proc.ProcessActionTape; import com.pinecone.hydra.proc.ProcessManager; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.proc.entity.ElementNode; import com.pinecone.hydra.proc.event.ProcessEvent; import com.pinecone.hydra.proc.image.ExecutionImage; import com.pinecone.hydra.proc.ns.ProcSpace; import com.pinecone.hydra.proc.tomb.RuntimeTombstone; import com.pinecone.hydra.system.ko.entity.ObjectTable; import com.walnut.odin.proc.entity.UProcessRuntimeMeta; import com.walnut.odin.proc.server.RemoteProcessManagerServer; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.List; import java.util.Map; public class MediatedRemoteProcess implements RemoteProcess { protected RemoteProcessManagerServer mRemoteProcessManagerServer; protected ProcessManager mProcessManager; protected ExecutionImage mExecutionImage; protected String mszName; protected long mnControlClientId; protected long mnLocalPID; protected GUID mParentPID; protected GUID mProcessId; protected Map mStartupArguments; protected Map mEnvironmentVariables; protected List mRemoteEventHandlers; public MediatedRemoteProcess( long controlClientId, RemoteProcessManagerServer server, String name, long localPID, GUID processId, Map startupArguments, Map environmentVariables ) { this.mnControlClientId = controlClientId; this.mRemoteProcessManagerServer = server; this.mszName = name; this.mnLocalPID = localPID; this.mProcessId = processId; this.mStartupArguments = startupArguments; this.mEnvironmentVariables = environmentVariables; this.mRemoteEventHandlers = new ArrayList<>(); } public MediatedRemoteProcess( long controlClientId, RemoteProcessManagerServer server, String name, long pid, GUID guid ) { this( controlClientId, server, name, pid, guid, null, null ); } @Override public void addRemoteEventHandler( ProcessRemoteEventHandler handler ) { this.mRemoteEventHandlers.add( handler ); } @Override public void removeRemoteEventHandler( ProcessRemoteEventHandler handler ) { this.mRemoteEventHandlers.remove( handler ); } @Override public int remoteEventHandlerSize() { return this.mRemoteEventHandlers.size(); } @Override public void notifyRemoteEvent( long pmClientId, ProcessEvent event, Object caused ) { for ( ProcessRemoteEventHandler handler : this.mRemoteEventHandlers ) { handler.fired( pmClientId, event, caused ); } } @Override public String getName() { return this.mszName; } public long getControlClientId() { return this.mnControlClientId; } @Override public long getLocalPID() { return this.mnLocalPID; } @Override public GUID actualParentPID() { return this.mParentPID; } @Override public void applyActualParentPID( GUID pid ) { this.mParentPID = pid; } @Override public void setName( String szName ) { this.mszName = szName; } @Override public long getExecutumId() { return this.mnLocalPID; } @Override public UProcessRuntimeMeta retrieveRemoteRuntimeMeta() throws RemoteProcessLifecycleException { return this.mRemoteProcessManagerServer.queryProcessRuntimeMeta( this.mProcessId ); } protected UProcessRuntimeMeta optRemoteRuntimeMeta() throws IllegalStateException { try { return this.mRemoteProcessManagerServer.queryProcessRuntimeMeta( this.mProcessId ); } catch ( RemoteProcessLifecycleException e ) { throw new IllegalStateException( e ); } } @Override public RuntimeSystem parentSystem() { return null; } @Override public RuntimeSystem revealNearestSystem() { return null; } @Override public Executum parentExecutum() { return null; } @Override public Executum setThreadAffinity( Thread affinity ) { throw new NotImplementedException( "`RemoteProcess` has no thread affinity, so it cannot be set." ); } @Override public Thread getAffiliateThread() { return null; } @Override public boolean isTerminated() { UProcessRuntimeMeta meta = this.optRemoteRuntimeMeta(); return meta.isTerminated(); } @Override public GUID getGuid() { return this.mProcessId; } @Override public GUID getParentProcessId() { return this.mParentPID; } @Override public long getParentLocalPID() { return 0; } @Override public LocalDateTime remoteGetEndTime() { return null; } @Override public LocalDateTime remoteGetLastUpdateTime() { return null; } @Override public UProcess parentProcess() { return null; } @Override public ProcessManager getOwnedProcessManager() { return this.mProcessManager; } @Override public ProcSpace getProcNamespace() { return null; } @Override public RuntimeTombstone getRuntimeTombstone() { return null; } @Override public ObjectTable getObjectTable() { return null; } @Override public ExecutionImage getExecutionImage() { return this.mExecutionImage; } @Override public ControllableLevel getControllableLevel() { return null; } @Override public LocalDateTime getEndTime() { return null; } @Override public LocalDateTime getLastUpdateTime() { return null; } @Override public Map getStartupArguments() { return this.mStartupArguments; } @Override public Map getEnvironmentVariables() { return this.mEnvironmentVariables; } @Override public Processum affinityLocalProcess() { return null; } @Override public void triggerUpdateTerminationStatus() { } @Override public void triggerAfterRunnableTerminationStatus() { } @Override public void start() throws ProvokeHandleException { try { this.mRemoteProcessManagerServer.startRemoteUProcess( this.mProcessId ); } catch ( RemoteProcessServiceRPCException e ) { throw new ProvokeHandleException( e ); } } @Override public Map getOwnThreadGroup() { return null; } @Override public TaskManager getTaskManager() { return null; } @Override public LocalDateTime getCreateTime() { return null; } @Override public LocalDateTime getStartTime() { return null; } @Override public void apoptosis() throws ApoptosisRejectSignalException { } @Override public void kill() { } @Override public void interrupt() { } @Override public void suspend() { } @Override public void resume() { } @Override public void entreatLive() { } @Override public Thread.State getState() { return null; } @Override public ElementNode getAccount() { return null; } @Override public int getExceptionRestartTime() { return 0; } @Override public Lifecycle applyExceptionRestartTime( int time ) { return null; } @Override public ProcessActionTape actionTape() { return null; } } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/ProcessesUtils.java ================================================ package com.walnut.odin.proc; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.HashMap; import java.util.Map; import com.pinecone.framework.util.datetime.DatePattern; import com.pinecone.framework.util.json.JSONArray; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.proc.UProcess; import com.walnut.odin.proc.entity.UProcessRuntimeMeta; public final class ProcessesUtils { public static Map decode( String json ) { Map map = new HashMap<>(); if ( json == null || json.isEmpty() ) { return map; } JSONObject jo = new JSONMaptron( json ); for ( Map.Entry kv : jo.entrySet() ) { JSONArray ja = (JSONArray) kv.getValue(); String[] vs = new String[ ja.size() ]; for ( int i = 0; i < ja.size(); ++i ) { vs[ i ] = ja.optString( i ); } map.put( kv.getKey(), vs ); } return map; } private static String formatTime( LocalDateTime time ) { if ( time == null ) { return null; } DateTimeFormatter formatter = DatePattern.createFormatter( "yyyy-MM-dd HH:mm:ss.nnnnnnnnn" ); return time.format( formatter ); } public static UProcessRuntimeMeta extractProcessMeta( UProcess that ) { UProcessRuntimeMeta meta = new UProcessRuntimeMeta(); meta.setPID( that.getPID().toString() ); meta.setParentPID( that.getParentProcessId().toString() ); meta.setName( that.getName() ); meta.setLocalPID( that.getLocalPID() ); meta.setCreateTime( formatTime( that.getCreateTime() ) ); meta.setStartTime( formatTime( that.getStartTime() ) ); meta.setEndTime( formatTime( that.getEndTime() ) ); meta.setLastUpdateTime( formatTime( that.getLastUpdateTime() ) ); meta.setMainThreadStatus( that.getState().toString() ); meta.setTerminated( that.isTerminated() ); return meta; } } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/RemoteProcessLifecycleExaminer.java ================================================ package com.walnut.odin.proc; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pinecone.hydra.proc.ProcessManager; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.proc.event.ProcessEvent; import com.pinecone.hydra.proc.image.ExecutionImage; import com.pinecone.hydra.proc.image.ImageModifier; import com.walnut.odin.proc.client.RPCRecallSysProcessEventHandler; import com.walnut.odin.proc.client.SlaveProcessLifecycleIface; public class RemoteProcessLifecycleExaminer implements ProcessLifecycleExaminer { protected Logger mLogger; protected RemoteProcessManagerNode mRemoteProcessManagerNode; protected SlaveProcessLifecycleIface mSlaveProcessLifecycleIface; protected ProcessManager mProcessManager; protected ImageModifier mImageModifier; public RemoteProcessLifecycleExaminer( RemoteProcessManagerNode remoteProcessManagerNode, SlaveProcessLifecycleIface slaveProcessLifecycleIface ) { this.mSlaveProcessLifecycleIface = slaveProcessLifecycleIface; this.mRemoteProcessManagerNode = remoteProcessManagerNode; this.mProcessManager = remoteProcessManagerNode.localProcessManager(); this.mImageModifier = this.mProcessManager.getImageModifier(); this.mLogger = LoggerFactory.getLogger( this.getClass() ); } @Override public ImageModifier imageModifier() { return this.mImageModifier; } @Override public void startProcess( UProcess process ) { this.mLogger.info( "[RemoteProcessVitalization] (Process: `{}`, PID: `{}`) ", process.getName(), process.getPID() ); ExecutionImage image = process.getExecutionImage(); this.mImageModifier.addSystemProcessEventHandler( image.getEntryPoint(), new RPCRecallSysProcessEventHandler( this.mRemoteProcessManagerNode, this.mSlaveProcessLifecycleIface ) ); process.start(); // TODO, Process Joint this.mRemoteProcessManagerNode.notifyProcessLifecycleHandlers( process.getExecutionImage().getImageAddress(), process.getExecutionImage().getEntryPoint(), ProcessEvent.Vitalized ); this.mLogger.info( "[RemoteProcessVitalization] (Process: `{}`, PID: `{}`) ", process.getName(), process.getPID() ); } } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/client/RPCRecallSysProcessEventHandler.java ================================================ package com.walnut.odin.proc.client; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.proc.event.ProcessEvent; import com.pinecone.hydra.proc.event.ProcessEventHandler; import com.pinecone.hydra.proc.image.EntryPointRunnable; import com.walnut.odin.proc.RemoteProcessManagerNode; import com.walnut.odin.proc.RemoteTerminationStatus; import com.walnut.odin.proc.entity.RemoteTerminationReport; public class RPCRecallSysProcessEventHandler implements ProcessEventHandler { protected RemoteProcessManagerNode mRemoteProcessManagerNode; protected SlaveProcessLifecycleIface mSlaveProcessLifecycleIface; protected long mnClientId; public RPCRecallSysProcessEventHandler( long clientId, RemoteProcessManagerNode node, SlaveProcessLifecycleIface iface ) { this.mRemoteProcessManagerNode = node; this.mSlaveProcessLifecycleIface = iface; this.mnClientId = clientId; } public RPCRecallSysProcessEventHandler( RemoteProcessManagerNode node, SlaveProcessLifecycleIface iface ) { this( -1, node, iface ); if ( node instanceof RemoteProcessManagerClient ) { this.mnClientId = ((RemoteProcessManagerClient) node).getClientId(); } } @Override public void fired( EntryPointRunnable runnable, ProcessEvent event ) { switch ( event ) { case Terminated: { this.notifyProcessTerminated( runnable ); break; } case Prepare: case Created: case Vitalized: default: { break; } } } protected void notifyProcessTerminated( EntryPointRunnable runnable ) { UProcess process = runnable.ownedProcess(); RemoteTerminationReport report = new RemoteTerminationReport(); report.setProcessID( process.getPID() ); report.setExitCode( process.actionTape().getExitCode() ); report.setLocalPID( process.getLocalPID() ); report.setRemoteTerminationStatus( RemoteTerminationStatus.Expected ); Throwable lastError = process.actionTape().getLastError(); if ( lastError != null ) { report.setErrorMsg( lastError.getMessage() ); report.setRemoteTerminationStatus( RemoteTerminationStatus.Error ); this.mRemoteProcessManagerNode.notifyProcessLifecycleHandlers( process.getExecutionImage().getImageAddress(), process.getExecutionImage().getEntryPoint(), ProcessEvent.Error ); } else { this.mRemoteProcessManagerNode.notifyProcessLifecycleHandlers( process.getExecutionImage().getImageAddress(), process.getExecutionImage().getEntryPoint(), ProcessEvent.Terminated ); } this.mSlaveProcessLifecycleIface.reportProcessTerminated( this.mnClientId, report ); } } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/client/RavenRemoteProcessManagerClient.java ================================================ package com.walnut.odin.proc.client; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.proc.LocalUProcess; import com.pinecone.hydra.proc.ProcessManager; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.proc.event.ProcessEvent; import com.pinecone.hydra.proc.image.ExecutionImage; import com.pinecone.hydra.system.component.LogStatuses; import com.pinecone.hydra.uma.DuplexAppointClient; import com.pinecone.hydra.uma.wolf.WolvesAppointClient; import com.pinecone.hydra.umc.wolf.client.UlfClient; import com.walnut.odin.proc.ArchRemoteProcessManagerNode; import com.walnut.odin.proc.ProcessesUtils; import com.walnut.odin.proc.RemoteProcess; import com.walnut.odin.proc.RemoteProcessLifecycleExaminer; import com.walnut.odin.proc.ProcessLifecycleExaminer; import com.walnut.odin.proc.RemoteProcessLifecycleException; import com.walnut.odin.proc.RemoteProcessServiceRPCException; import com.walnut.odin.proc.RemoteVitalizationStatus; import com.walnut.odin.proc.entity.RemoteVitalizationResponse; import com.walnut.odin.proc.entity.UProcessMirrorDTO; import com.walnut.odin.proc.entity.UProcessRuntimeMeta; import java.net.URI; import java.net.URISyntaxException; import java.util.Map; public class RavenRemoteProcessManagerClient extends ArchRemoteProcessManagerNode implements RemoteProcessManagerClient { protected DuplexAppointClient mDuplexAppointClient; protected SlaveProcessLifecycleIface mProcessLifecycleIface; protected ProcessLifecycleExaminer mProcessLifecycleExaminer; protected long mnClientId; protected UlfClient mRPCClient; public RavenRemoteProcessManagerClient( ProcessManager processManager, UlfClient rpcClient ) { super( processManager ); this.mRPCClient = rpcClient; this.mnClientId = rpcClient.getMessageNodeId(); } protected void initRPCSubsystem() throws RemoteProcessServiceRPCException { if ( this.mDuplexAppointClient != null && !this.mDuplexAppointClient.getMessageNode().isTerminated() ) { throw new IllegalStateException( "DuplexAppointClient has started." ); } this.mDuplexAppointClient = new WolvesAppointClient( this.mRPCClient ); try { this.mDuplexAppointClient.compile( SlaveProcessLifecycleIface.class,false ); this.mProcessLifecycleIface = this.mDuplexAppointClient.getIface( SlaveProcessLifecycleIface.class ); this.mDuplexAppointClient.getRouteDispatcher().registerController( new ReactiveMasterProcessLifecycleController( this ) ); this.mProcessLifecycleExaminer = new RemoteProcessLifecycleExaminer( this, this.mProcessLifecycleIface ); this.infoLifecycle( "RPC Subsystem Register Controllers", LogStatuses.StatusDone ); } catch ( Exception e ) { this.mProcessLifecycleIface = null; throw new RemoteProcessServiceRPCException( e ); } } protected void vitalizeRPCSubsystem() throws RemoteProcessServiceRPCException { try { if ( this.mDuplexAppointClient.getMessageNode().isTerminated() ) { this.mDuplexAppointClient.execute(); this.mDuplexAppointClient.embraces( 2 ); this.mProcessLifecycleIface.reportClientInitialized( this.mnClientId ); this.infoLifecycle( "RPC Subsystem Service Vitalization, ( ClientId: `" + this.mnClientId + "` )", LogStatuses.StatusDone ); } } catch ( Exception e ) { throw new RemoteProcessServiceRPCException( e ); } } @Override public long getClientId() { return this.mnClientId; } @Override public void startService() throws RemoteProcessServiceRPCException { this.initRPCSubsystem(); this.vitalizeRPCSubsystem(); } @Override public void terminateService() { if ( this.mDuplexAppointClient == null ) { throw new IllegalStateException( "RPCClient dose not started yet." ); } this.mDuplexAppointClient.terminate(); this.mDuplexAppointClient = null; } @Override public UProcess createLocalUProcess( ExecutionImage image, UProcess parent, Map startupArgs, Map contextEnvironmentVars ) { LocalUProcess localHostedProcess = this.mProcessManager.createLocalHostedProcess( image, parent, startupArgs, contextEnvironmentVars ); if ( this.mProcessLifecycleIface != null ) { UProcessMirrorDTO processMirrorDTO = new UProcessMirrorDTO( localHostedProcess.getName(), localHostedProcess.getLocalPID(), localHostedProcess.getGuid().toString() ); this.mProcessLifecycleIface.registerRemoteProcess( this.mnClientId, processMirrorDTO); this.getLogger().info( "[SuperiorRegister] [createLocalUProcess] " ); } else { this.getLogger().info( "[SuperiorRegister] [createLocalUProcess] " ); // Missing central connection, skip reporting; 失联,跳过上报中央. } return localHostedProcess; } @Override public RemoteVitalizationResponse createLocalUProcess( UProcessMirrorDTO handlerDTO, UProcess[] lpProcess ) throws RemoteProcessLifecycleException { try { String imageAddress = handlerDTO.getImageAddress(); boolean isURI = handlerDTO.isImageAddressURI(); RemoteVitalizationResponse response = new RemoteVitalizationResponse(); response.setRemoteVitalizationStatus( RemoteVitalizationStatus.New ); this.notifyProcessLifecycleHandlers( imageAddress, null, ProcessEvent.Prepare ); ExecutionImage image; if ( isURI ) { URI uri = new URI( imageAddress ); image = this.queryExecutionImage( uri ); } else { image = this.queryExecutionImage( imageAddress ); } this.mProcessManager.getImageModifier().applyImageAddress( image, imageAddress ); if ( image == null ) { response.setRemoteVitalizationStatus( RemoteVitalizationStatus.NoImage ); return response; } String szStartupArguments = handlerDTO.getStartupArguments(); String szEnvironmentVariables = handlerDTO.getEnvironmentVariables(); String szParentPID = handlerDTO.getParentPID(); Map startupArgs = ProcessesUtils.decode( szStartupArguments ); Map envVariables = ProcessesUtils.decode( szEnvironmentVariables ); GUID parentPID = null; if ( szParentPID != null ) { parentPID = this.mProcessManager.getGuidAllocator().parse( szParentPID ); } LocalUProcess localHostedProcess = this.mProcessManager.createLocalHostedProcess( image, this.mProcessManager.getRootUProcess(), startupArgs, envVariables ); localHostedProcess.applyActualParentPID( parentPID ); response.setName( localHostedProcess.getName() ); response.setProcessID( localHostedProcess.getPID() ); response.setLocalPID( localHostedProcess.getLocalPID() ); response.setEnvironmentVariables( szEnvironmentVariables ); response.setStartupArguments( szStartupArguments ); response.setImageAddress(imageAddress); response.setImageAddressURI(isURI); if ( lpProcess != null && lpProcess.length > 0 ) { lpProcess[0] = localHostedProcess; } this.notifyProcessLifecycleHandlers( imageAddress, null, ProcessEvent.Created ); return response; } catch ( URISyntaxException e ) { throw new RemoteProcessLifecycleException( e ); } } @Override public RemoteVitalizationResponse vitalizeLocalUProcess( UProcessMirrorDTO handlerDTO ) throws RemoteProcessLifecycleException { UProcess[] lpProcess = new UProcess[1]; RemoteVitalizationResponse response = this.createLocalUProcess( handlerDTO, lpProcess ); LocalUProcess localHostedProcess = (LocalUProcess) lpProcess[ 0 ]; if ( response.getStatus() != RemoteVitalizationStatus.New.getCode() && response.getStatus() != RemoteVitalizationStatus.Vitalized.getCode() ) { return response; } // Asynchronous startup may cause consistency errors if local execution finishes before the remote mirror is ready to handle events. // Sync and confirmation are required. // 进程启动为异步过程,若本地执行过快,远端镜像未就绪即本地完成(远端进程可能无法被后续事件清理),将导致一致性错误,需上报并等待同步。 // Note: Strong consistency is required. RPC sync must precede remote mirror process initialization. // PS:该过程要求强一致性,必须先通过 RPC 同步,等待远端镜像进程完成创建。 String pid = this.mProcessLifecycleIface.reportProcessCreated( this.mnClientId, response ); if ( !response.getPID().equals( pid ) ) { throw new RemoteProcessLifecycleException( "An internal error has been happened, whit unmatched remote-process PID." ); } this.mProcessLifecycleExaminer.startProcess( localHostedProcess ); return response; } @Override public void startLocalUProcess( GUID pid ) throws IllegalArgumentException { UProcess process = this.mProcessManager.getProcess( pid ); if ( process == null ) { throw new IllegalArgumentException( "No such process, PID => `" + pid + "`" ); } this.mProcessLifecycleExaminer.startProcess( process ); } @Override public void register( UProcess that ) { this.mProcessManager.register( that ); } @Override public void erase( UProcess that ) { this.mProcessManager.erase( that ); } @Override public UProcessRuntimeMeta queryProcessRuntimeMeta( GUID pid ) throws RemoteProcessLifecycleException { UProcess process = this.mProcessManager.getProcess( pid ); if ( process instanceof RemoteProcess ) { RemoteProcess remoteProcess = (RemoteProcess) process; return remoteProcess.retrieveRemoteRuntimeMeta(); // Cascading retrieval of runtime meta information } if ( process == null ) { return null; } UProcessRuntimeMeta meta = ProcessesUtils.extractProcessMeta( process ); // 不要直接return 老子好打断点. return meta; } @Override public DuplexAppointClient duplexAppointClient() { return this.mDuplexAppointClient; } } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/client/ReactiveMasterProcessLifecycleController.java ================================================ package com.walnut.odin.proc.client; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.umct.AddressMapping; import com.pinecone.hydra.umct.stereotype.Controller; import com.walnut.odin.proc.RemoteProcessLifecycleException; import com.walnut.odin.proc.entity.RemoteVitalizationResponse; import com.walnut.odin.proc.entity.UProcessMirrorDTO; import com.walnut.odin.proc.entity.UProcessRuntimeMeta; @Controller @AddressMapping( "com.walnut.odin.proc.server.MasterProcessLifecycleIface." ) public class ReactiveMasterProcessLifecycleController implements Pinenut { private final RemoteProcessManagerClient mRemoteProcessManagerClient; private final GuidAllocator mGuidAllocator; public ReactiveMasterProcessLifecycleController( RemoteProcessManagerClient remoteProcessManagerClient ) { this.mRemoteProcessManagerClient = remoteProcessManagerClient; this.mGuidAllocator = remoteProcessManagerClient.getGuidAllocator(); } @AddressMapping("startRemoteUProcess") public void startRemoteUProcess( String szPid ) { this.mRemoteProcessManagerClient.startLocalUProcess( this.mGuidAllocator.parse(szPid) ); } @AddressMapping("vitalizeRemoteUProcess") public RemoteVitalizationResponse vitalizeRemoteUProcess( UProcessMirrorDTO handlerDTO ) throws RemoteProcessLifecycleException { String imageAddress = handlerDTO.getImageAddress(); this.mRemoteProcessManagerClient.getLogger().info( "[RemoteProcessVitalization] [PRC] (Process: `{}`) ", imageAddress ); RemoteVitalizationResponse response = this.mRemoteProcessManagerClient.vitalizeLocalUProcess( handlerDTO ); this.mRemoteProcessManagerClient.getLogger().info( "[RemoteProcessVitalization] [PRC] (Process: `{}`) ", imageAddress ); return response; } @AddressMapping("createRemoteUProcess") public RemoteVitalizationResponse createRemoteUProcess( UProcessMirrorDTO handlerDTO ) throws RemoteProcessLifecycleException { String imageAddress = handlerDTO.getImageAddress(); this.mRemoteProcessManagerClient.getLogger().info( "[RemoteProcessCreation] [PRC] (Process: `{}`) ", imageAddress ); RemoteVitalizationResponse response = this.mRemoteProcessManagerClient.createLocalUProcess( handlerDTO, null ); this.mRemoteProcessManagerClient.getLogger().info( "[RemoteProcessCreation] [PRC] (Process: `{}`) ", imageAddress ); return response; } @AddressMapping("hasOwnProcess") public boolean hasOwnProcess( String processId ) { boolean has = this.mRemoteProcessManagerClient.hasOwnProcess( this.mGuidAllocator.parse(processId) ); return has; } @AddressMapping("containProcess") public boolean containProcess( String processId ) { boolean has = this.mRemoteProcessManagerClient.containProcess( this.mGuidAllocator.parse(processId) ); return has; } @AddressMapping("queryRemoteProcessRuntimeMeta") public UProcessRuntimeMeta queryRemoteProcessRuntimeMeta( String processId ) throws RemoteProcessLifecycleException { return this.mRemoteProcessManagerClient.queryProcessRuntimeMeta( this.mGuidAllocator.parse(processId) ); } } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/client/SlaveProcessLifecycleIface.java ================================================ package com.walnut.odin.proc.client; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umct.stereotype.Iface; import com.walnut.odin.proc.entity.RemoteTerminationReport; import com.walnut.odin.proc.entity.RemoteVitalizationResponse; import com.walnut.odin.proc.entity.UProcessMirrorDTO; @Iface public interface SlaveProcessLifecycleIface extends Pinenut { long reportClientInitialized( long clientId ); void registerRemoteProcess( long clientId, UProcessMirrorDTO processDTO ); void reportProcessTerminated( long clientId, RemoteTerminationReport terminationReport ); String reportProcessCreated( long clientId, RemoteVitalizationResponse vitalizationResponse ); } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/server/MasterProcessLifecycleIface.java ================================================ package com.walnut.odin.proc.server; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umct.stereotype.Iface; import com.walnut.odin.proc.entity.RemoteVitalizationResponse; import com.walnut.odin.proc.entity.UProcessMirrorDTO; import com.walnut.odin.proc.entity.UProcessRuntimeMeta; @Iface public interface MasterProcessLifecycleIface extends Pinenut { void startRemoteUProcess( String processId ); RemoteVitalizationResponse vitalizeRemoteUProcess( UProcessMirrorDTO handlerDTO ); RemoteVitalizationResponse createRemoteUProcess( UProcessMirrorDTO handlerDTO ); boolean hasOwnProcess( String processId ); boolean containProcess( String processId ); UProcessRuntimeMeta queryRemoteProcessRuntimeMeta( String processId ); } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/server/RavenRemoteProcessManagerServer.java ================================================ package com.walnut.odin.proc.server; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.json.JSON; import com.pinecone.hydra.proc.ArchProcessManager; import com.pinecone.hydra.proc.ProcessManager; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.system.component.LogStatuses; import com.pinecone.hydra.uma.DuplexAppointServer; import com.pinecone.hydra.uma.HuskyDuplexExpress; import com.pinecone.hydra.uma.wolf.WolvesAppointServer; import com.pinecone.hydra.umc.wolf.server.UlfServer; import com.walnut.odin.proc.ArchRemoteProcessManagerNode; import com.walnut.odin.proc.ProcessesUtils; import com.walnut.odin.proc.RemoteProcess; import com.walnut.odin.proc.MediatedRemoteProcess; import com.walnut.odin.proc.RemoteProcessLifecycleException; import com.walnut.odin.proc.RemoteProcessServiceRPCException; import com.walnut.odin.proc.RemoteVitalizationStatus; import com.walnut.odin.proc.entity.RemoteVitalizationResponse; import com.walnut.odin.proc.entity.UProcessMirrorDTO; import com.walnut.odin.proc.entity.UProcessRuntimeMeta; import java.io.IOException; import java.net.URI; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; public class RavenRemoteProcessManagerServer extends ArchRemoteProcessManagerNode implements RemoteProcessManagerServer { protected GuidAllocator mGuidAllocator; //protected Map mPidClientIdMap; protected Map mLifecycleIfaceCMap; protected UlfServer mRPCServer; protected DuplexAppointServer mDuplexAppointServer; public RavenRemoteProcessManagerServer( ProcessManager localProcessManager, UlfServer ulfServer ) { super( localProcessManager ); //this.mPidClientIdMap = new ConcurrentHashMap<>(); this.mLifecycleIfaceCMap = new ConcurrentHashMap<>(); this.mGuidAllocator = localProcessManager.getGuidAllocator(); this.mRPCServer = ulfServer; } protected void initRPCSubsystem() throws RemoteProcessServiceRPCException { if ( this.mDuplexAppointServer != null && !this.mDuplexAppointServer.getMessageNode().isTerminated() ) { throw new IllegalStateException( "DuplexAppointServer has started." ); } try { this.mDuplexAppointServer = new WolvesAppointServer( this.mRPCServer, HuskyDuplexExpress.class ); ReactiveSlaveProcessLifecycleController controller = new ReactiveSlaveProcessLifecycleController( this ); this.mDuplexAppointServer.registerController( controller ); this.mDuplexAppointServer.compile( MasterProcessLifecycleIface.class, false ); this.infoLifecycle( "RPC Subsystem Register Controllers", LogStatuses.StatusDone ); } catch ( Exception e ) { throw new RemoteProcessServiceRPCException( e ); } } protected void vitalizeRPCSubsystem() throws RemoteProcessServiceRPCException { try { if ( this.mDuplexAppointServer.getMessageNode().isTerminated() ) { this.mDuplexAppointServer.execute(); this.infoLifecycle( "RPC Subsystem Service Vitalization", LogStatuses.StatusDone ); } } catch ( Exception e ) { throw new RemoteProcessServiceRPCException( e ); } } @Override public DuplexAppointServer duplexAppointServer() { return this.mDuplexAppointServer; } @Override public void startService() throws RemoteProcessServiceRPCException { this.initRPCSubsystem(); this.vitalizeRPCSubsystem(); } @Override public void terminateService() throws IllegalStateException { if ( this.mDuplexAppointServer == null ) { throw new IllegalStateException( "RPCServer dose not started yet." ); } this.mDuplexAppointServer.terminate(); this.mDuplexAppointServer = null; } @Override public void registerProcess( long clientId, UProcessMirrorDTO processDTO ) { this.createMediatedRemoteProcess( clientId, processDTO ); this.getLogger().info( "[SubordinateRegister] [RegisterProcess (ClientId: {}, PID: {})] ", clientId, processDTO.getPID() ); } @Override public void startRemoteUProcess( GUID pid ) throws RemoteProcessServiceRPCException { UProcess process = this.mProcessManager.getProcess( pid ); if ( process == null ) { throw new IllegalArgumentException( "No such process, PID => `" + pid + "`" ); } if ( !( process instanceof RemoteProcess ) ) { throw new IllegalArgumentException( "{Target process is not remote process, PID => `" + pid + "`" ); } RemoteProcess rp = (RemoteProcess) process; long clientId = rp.getControlClientId(); try { this.mDuplexAppointServer.invokeInform( clientId, "com.walnut.odin.proc.server.MasterProcessLifecycleIface.startRemoteUProcess", pid ); } catch ( IOException e ) { throw new RemoteProcessServiceRPCException( e ); } } protected RemoteVitalizationResponse vitalizeRemoteUProcess0( long clientId, String imageAddress, boolean isURI, GUID parentPID, Map startupArgs, Map contextEnvironmentVars, boolean directStart ) throws RemoteProcessLifecycleException { UProcessMirrorDTO handlerDTO = new UProcessMirrorDTO(); String szParentPID = this.mProcessManager.getRootUProcess().getPID().toString(); if ( parentPID != null ) { szParentPID = parentPID.toString(); } handlerDTO.setParentPID( szParentPID ); if ( startupArgs != null ) { handlerDTO.setStartupArguments( JSON.stringify( startupArgs ) ); } if ( contextEnvironmentVars != null ) { handlerDTO.setEnvironmentVariables( JSON.stringify( contextEnvironmentVars ) ); } handlerDTO.setImageAddress( imageAddress ); handlerDTO.setImageAddressURI( isURI ); try { Object ret; if ( directStart ) { ret = this.mDuplexAppointServer.invokeInform( clientId, "com.walnut.odin.proc.server.MasterProcessLifecycleIface.vitalizeRemoteUProcess", handlerDTO ); } else { ret = this.mDuplexAppointServer.invokeInform( clientId, "com.walnut.odin.proc.server.MasterProcessLifecycleIface.createRemoteUProcess", handlerDTO ); } RemoteVitalizationResponse response = (RemoteVitalizationResponse) ret; if ( response.getPID() != null ) { response.setProcessID( this.mGuidAllocator.parse( response.getPID() ) ); } return response; } catch ( IOException e ) { throw new RemoteProcessLifecycleException( e ); } } @Override public RemoteVitalizationResponse vitalizeRemoteUProcess( long clientId, String imageAddress, boolean isURI, GUID parentPID, Map startupArgs, Map contextEnvironmentVars ) throws RemoteProcessLifecycleException { return this.vitalizeRemoteUProcess0( clientId, imageAddress, isURI, parentPID, startupArgs, contextEnvironmentVars, true ); } @Override public RemoteVitalizationResponse vitalizeRemoteUProcess( long clientId, String imagePath, GUID parentPID, Map startupArgs, Map contextEnvironmentVars ) throws RemoteProcessLifecycleException { return this.vitalizeRemoteUProcess( clientId, imagePath, false, parentPID, startupArgs, contextEnvironmentVars ); } @Override public RemoteVitalizationResponse vitalizeRemoteUProcess( long clientId, URI imageURI, GUID parentPID, Map startupArgs, Map contextEnvironmentVars ) throws RemoteProcessLifecycleException { return this.vitalizeRemoteUProcess( clientId, imageURI.toString(), true, parentPID, startupArgs, contextEnvironmentVars ); } @Override public RemoteCreationResult createRemoteUProcess( long clientId, String imageAddress, boolean isURI, GUID parentPID, Map startupArgs, Map contextEnvironmentVars ) throws RemoteProcessLifecycleException { RemoteVitalizationResponse response = this.vitalizeRemoteUProcess0( clientId, imageAddress, isURI, parentPID, startupArgs, contextEnvironmentVars, false ); RemoteCreationResult result = new RemoteCreationResult(); result.response = response; if ( response.getStatus() != RemoteVitalizationStatus.New.getCode() ) { return result; } RemoteProcess remoteProcess = this.createMediatedRemoteProcess( clientId, response ); if ( remoteProcess != null ) { String pid = remoteProcess.getPID().toString(); this.getLogger().info( "[RemoteProcessCreated] [New::PendingVitalization] [MirrorHooked] (ClientId: `{}`, PID: `{}`) ", clientId, pid ); } else { this.getLogger().warn( "[RemoteProcessCreated] [New::PendingVitalization] [MirrorHooked] (ClientId: `{}`, ClientProvidedPID: `{}`) ", clientId, response.getPID() ); } result.process = remoteProcess; return result; } @Override public RemoteCreationResult createRemoteUProcess( long clientId, String imagePath, GUID parentPID, Map startupArgs, Map contextEnvironmentVars ) throws RemoteProcessLifecycleException { return this.createRemoteUProcess( clientId, imagePath, false, parentPID, startupArgs, contextEnvironmentVars ); } @Override public RemoteCreationResult createRemoteUProcess( long clientId, URI imageURI, GUID parentPID, Map startupArgs, Map contextEnvironmentVars ) throws RemoteProcessLifecycleException { return this.createRemoteUProcess( clientId, imageURI.toString(), true, parentPID, startupArgs, contextEnvironmentVars ); } @Override public void register( UProcess that ) { this.mProcessManager.register( that ); } @Override public Long queryClientIdByPID( GUID pid ) { UProcess process = this.mProcessManager.getProcess( pid ); if ( process instanceof RemoteProcess ) { RemoteProcess rp = (RemoteProcess) process; return rp.getControlClientId(); } return null; } protected void expungeSelf( GUID pid ) { // this.mPidClientIdMap.remove( pid ); // Reserved. } @Override public void erase( UProcess that ) { this.mProcessManager.erase( that ); this.expungeSelf( that.getPID() ); } protected void expunge( UProcess that ) { ArchProcessManager.invokeExpunge( this.mProcessManager, that ); this.expungeSelf( that.getPID() ); } protected UProcess expunge( GUID pid ) { UProcess that = this.mProcessManager.getProcess( pid ); if ( that != null ) { this.expunge( that ); } return that; } protected void registerProcess( long clientId, RemoteProcess process ) { //this.mPidClientIdMap.put( process.getPID(), clientId ); this.register( process ); } protected RemoteProcess createMediatedRemoteProcess( long clientId, String name, long localPID, GUID processId, String szStartupArguments, String szEnvironmentVariables, String imageAddress, boolean isURI ) { MediatedRemoteProcess process = new MediatedRemoteProcess( clientId,this, name, localPID, processId, ProcessesUtils.decode( szStartupArguments ), ProcessesUtils.decode( szEnvironmentVariables ) ); this.afterMediatedRemoteProcess( process, imageAddress, isURI ); this.registerProcess( clientId, process ); return process; } @Override public RemoteProcess createMediatedRemoteProcess( long clientId, RemoteVitalizationResponse response ) { return this.createMediatedRemoteProcess( clientId, response.getName(), response.getLocalPID(), this.mGuidAllocator.parse( response.getPID() ), response.getStartupArguments(), response.getEnvironmentVariables(), response.getImageAddress(), response.isImageAddressURI() ); } @Override public RemoteProcess createMediatedRemoteProcess( long clientId, UProcessMirrorDTO processDTO ) { return this.createMediatedRemoteProcess( clientId, processDTO.getName(), processDTO.getLocalPID(), this.mGuidAllocator.parse( processDTO.getPID() ), processDTO.getStartupArguments(), processDTO.getEnvironmentVariables(), processDTO.getImageAddress(), processDTO.isImageAddressURI() ); } public static UProcess invokeExpunge( RemoteProcessManagerServer server, String pid ) { if ( server instanceof RavenRemoteProcessManagerServer ) { RavenRemoteProcessManagerServer ravenServer = (RavenRemoteProcessManagerServer) server; return ravenServer.expunge( ravenServer.mGuidAllocator.parse( pid ) ); } return null; } @Override public UProcessRuntimeMeta queryProcessRuntimeMeta( GUID pid ) throws RemoteProcessLifecycleException { try { UProcess process = this.mProcessManager.getProcess( pid ); if ( process instanceof RemoteProcess ) { RemoteProcess remoteProcess = (RemoteProcess) process; long clientId = remoteProcess.getControlClientId(); Object ret = this.mDuplexAppointServer.invokeInform( clientId, "com.walnut.odin.proc.server.MasterProcessLifecycleIface.queryRemoteProcessRuntimeMeta", pid.toString() ); return (UProcessRuntimeMeta) ret; // Cascading retrieval of runtime meta information } if ( process == null ) { return null; } UProcessRuntimeMeta meta = ProcessesUtils.extractProcessMeta( process ); // 不要直接return 老子好打断点. return meta; } catch ( IOException e ) { throw new RemoteProcessLifecycleException( e ); } } } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/server/ReactiveSlaveProcessLifecycleController.java ================================================ package com.walnut.odin.proc.server; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.proc.event.ProcessEvent; import com.pinecone.hydra.umct.AddressMapping; import com.pinecone.hydra.umct.stereotype.Controller; import com.walnut.odin.proc.RemoteProcess; import com.walnut.odin.proc.entity.RemoteTerminationReport; import com.walnut.odin.proc.entity.RemoteVitalizationResponse; import com.walnut.odin.proc.entity.UProcessMirrorDTO; @Controller @AddressMapping( "com.walnut.odin.proc.client.SlaveProcessLifecycleIface." ) public class ReactiveSlaveProcessLifecycleController implements Pinenut { protected RemoteProcessManagerServer mRemoteProcessManagerServer; public ReactiveSlaveProcessLifecycleController( RemoteProcessManagerServer remoteProcessManagerServer ) { this.mRemoteProcessManagerServer = remoteProcessManagerServer; } @AddressMapping( "registerRemoteProcess" ) public void registerRemoteProcess( long clientId, UProcessMirrorDTO processDTO ) { this.mRemoteProcessManagerServer.registerProcess( clientId, processDTO ); } @AddressMapping( "reportClientInitialized" ) public long reportClientInitialized( long clientId ) { this.mRemoteProcessManagerServer.getLogger().info( "[ClientInitializedRecall] [RPC] (ClientId: `{}`) ", clientId ); return clientId; } @AddressMapping( "reportProcessTerminated" ) public void reportProcessTerminated( long clientId, RemoteTerminationReport terminationReport ) { this.mRemoteProcessManagerServer.getLogger().info( "[RemoteProcessTerminated] [RPC] (ClientId: `{}`, PID: `{}`, ExitCode: `{}`) ", clientId, terminationReport.getPID(), terminationReport.getExitCode() ); UProcess that = RavenRemoteProcessManagerServer.invokeExpunge( this.mRemoteProcessManagerServer, terminationReport.getPID() ); String procName = "NonExistent"; if ( that != null ) { procName = that.getName(); RemoteProcess remoteProcess = (RemoteProcess) that; remoteProcess.notifyRemoteEvent( clientId, ProcessEvent.Terminated, terminationReport ); } this.mRemoteProcessManagerServer.getLogger().info( "[RemoteProcessTerminated] [RPC] [MirrorUnhook] (ClientId: `{}`, PID: `{}`, Process: `{}`) ", clientId, terminationReport.getPID(), procName ); } @AddressMapping( "reportProcessCreated" ) public String reportProcessCreated( long clientId, RemoteVitalizationResponse vitalizationResponse ) { String pid = null; RemoteProcess remoteProcess = this.mRemoteProcessManagerServer.createMediatedRemoteProcess( clientId, vitalizationResponse ); if ( remoteProcess != null ) { pid = remoteProcess.getPID().toString(); this.mRemoteProcessManagerServer.getLogger().info( "[RemoteProcessCreated] [RPC] [MirrorHooked] (ClientId: `{}`, PID: `{}`) ", clientId, pid ); } else { this.mRemoteProcessManagerServer.getLogger().warn( "[RemoteProcessCreated] [RPC] [MirrorHooked] (ClientId: `{}`, ClientProvidedPID: `{}`) ", clientId, vitalizationResponse.getPID() ); } return pid; } } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/RavenTaskInstrument.java ================================================ package com.walnut.odin.task; import java.util.List; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.id.Identification; import com.pinecone.hydra.system.ko.CascadeInstrument; import com.pinecone.hydra.system.ko.KernelObjectConfig; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.hydra.system.ko.kom.KOMInstrument; import com.pinecone.hydra.task.ibatis.hydranium.TaskMappingDriver; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.UniformTaskInstrument; import com.pinecone.hydra.task.kom.entity.ElementNode; import com.pinecone.hydra.task.kom.entity.AppElement; import com.pinecone.hydra.task.kom.entity.Namespace; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.task.kom.entity.TaskTreeNode; import com.pinecone.hydra.task.kom.instance.InstanceInstrument; import com.pinecone.hydra.unit.imperium.ImperialTree; import com.pinecone.hydra.unit.imperium.entity.EntityNode; import com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.walnut.odin.task.service.CategoryService; import com.walnut.odin.task.service.RavenCategoryService; import com.walnut.odin.task.source.RavenTaskMasterManipulator; import com.walnut.odin.task.system.TaskPathInvalidException; import com.walnut.odin.task.troll.GenericRavenTask; public class RavenTaskInstrument implements CentralizedTaskInstrument { protected RavenTaskMasterManipulator ravenTaskMasterManipulator; protected UniformTaskInstrument uniformTaskInstrument; protected CategoryService categoryService; protected void overrideTaskInstrument( Processum superiorProcess, TaskMappingDriver driver, TaskInstrument parent, String name, KernelObjectConfig config, @Nullable GuidAllocator guidAllocator ) { this.uniformTaskInstrument = new UniformTaskInstrument( superiorProcess, driver.getMasterManipulator(), parent, name, config, guidAllocator ) { /* @Override public RavenTaskElement affirmTask( String path ,TaskElement metaInfos ) { TaskElement taskElement = super.affirmTask( path , metaInfos); if ( taskElement == null ) { return null; } return RavenTaskInstrument.this.transformTaskElement( taskElement, true ); } @Override public ElementNode queryElement( String path ) { ElementNode proto = super.queryElement( path ); if ( proto instanceof TaskElement ) { return RavenTaskInstrument.this.transformTaskElement( (TaskElement) proto, false ); } return proto; } @Override public TaskTreeNode get( GUID guid ) { TaskTreeNode treeNode = super.get( guid ); return RavenTaskInstrument.this.transformTreeNode( treeNode, false ); } @Override public TreeNode get( GUID guid, int depth ) { TreeNode treeNode = super.get( guid, depth ); return RavenTaskInstrument.this.transformTreeNode( (TaskTreeNode) treeNode, false ); } @Override public TreeNode getAsRootDepth( GUID guid ) { TreeNode treeNode = super.getAsRootDepth( guid ); return RavenTaskInstrument.this.transformTreeNode( (TaskTreeNode) treeNode, false ); }*/ }; } public RavenTaskInstrument( Processum superiorProcess, KOIMasterManipulator masterManipulator, TaskInstrument parent, String name, KernelObjectConfig config, @Nullable GuidAllocator guidAllocator ) { this.ravenTaskMasterManipulator = (RavenTaskMasterManipulator) masterManipulator; TaskMappingDriver driver = (TaskMappingDriver) this.ravenTaskMasterManipulator.getTaskMappingDriver(); this.overrideTaskInstrument ( superiorProcess, driver, parent, name, config, guidAllocator ); this.categoryService = new RavenCategoryService( this ); } public RavenTaskInstrument( Processum superiorProcess, KOIMasterManipulator masterManipulator, KernelObjectConfig config ) { this( superiorProcess, masterManipulator, null, CentralizedTaskInstrument.class.getSimpleName(), config, null ); } public RavenTaskInstrument( KOIMappingDriver driver, CentralizedTaskInstrument parent, String name, KernelObjectConfig config ){ this( driver.getSuperiorProcess(), driver.getMasterManipulator(), parent, name, config, null ); } public RavenTaskInstrument( KOIMappingDriver driver, KernelObjectConfig config ) { this( driver.getSuperiorProcess(), driver.getMasterManipulator(), config ); } @Override public void applyGuidAllocator( GuidAllocator guidAllocator ) { this.uniformTaskInstrument.applyGuidAllocator( guidAllocator ); } @Override public InstanceInstrument getInstanceInstrument() { return this.uniformTaskInstrument.getInstanceInstrument(); } @Override public GUID assertGUIDByPath ( String taskTreePath ) throws TaskPathInvalidException { GUID guid = this.uniformTaskInstrument.queryGUIDByPath( taskTreePath ); if ( guid == null ) { throw new TaskPathInvalidException( taskTreePath ); } return guid; } @Override public GUID assertTaskGUIDByPath ( String taskTreePath ) throws TaskPathInvalidException, IllegalArgumentException { ElementNode node = this.uniformTaskInstrument.queryElement( taskTreePath ); if ( node == null ) { throw new TaskPathInvalidException( taskTreePath ); } if ( node.evinceTaskElement() == null ) { throw new IllegalArgumentException( "Path `" + taskTreePath + "` is not a task." ); } return node.getGuid(); } @Override public UniformTaskInstrument getUniformTaskInstrument() { return this.uniformTaskInstrument; } @Override public RavenTaskMasterManipulator getRavenTaskMasterManipulator() { return this.ravenTaskMasterManipulator; } @Override public void newLinkTag( String originalPath, String dirPath, String tagName ) { this.uniformTaskInstrument.newLinkTag( originalPath, dirPath, tagName ); } @Override public void removeReparseLink( GUID guid ) { this.uniformTaskInstrument.removeReparseLink( guid ); } @Override public void affirmOwnedNode( GUID parentGuid, GUID childGuid ) { this.uniformTaskInstrument.affirmOwnedNode( parentGuid, childGuid ); } @Override public void newHardLink( GUID sourceGuid, GUID targetGuid ) { this.uniformTaskInstrument.newHardLink( sourceGuid, targetGuid ); } @Override public void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName ) { this.uniformTaskInstrument.newLinkTag( originalGuid, dirGuid, tagName ); } @Override public void updateLinkTag( GUID tagGuid, String tagName ) { this.uniformTaskInstrument.updateLinkTag( tagGuid, tagName ); } @Override public ReparseLinkNode queryReparseLinkByNS( String path, String szBadSep, String szTargetSep ) { return this.uniformTaskInstrument.queryReparseLinkByNS( path, szBadSep, szTargetSep ); } @Override public ReparseLinkNode queryReparseLink( String path ) { return this.uniformTaskInstrument.queryReparseLink( path ); } @Override public CategoryService getCategoryService() { return this.categoryService; } @Override public AppElement affirmJob(String path ) { return this.uniformTaskInstrument.affirmJob( path ); } @Override public Namespace affirmNamespace( String path ) { return this.uniformTaskInstrument.affirmNamespace( path ); } @Override public TaskElement affirmTask( String path ,TaskElement metaInfos) { return (TaskElement) this.uniformTaskInstrument.affirmTask( path ,metaInfos); } @Override public ElementNode queryElement( String path ) { return this.uniformTaskInstrument.queryElement( path ); } @Override public boolean containsChild( GUID parentGuid, String childName ) { return this.uniformTaskInstrument.containsChild( parentGuid, childName ); } @Override public void update( TreeNode treeNode ) { this.uniformTaskInstrument.update( treeNode ); } @Override public TreeNode get( GUID guid ) { return this.uniformTaskInstrument.get( guid ); } @Override public TreeNode get( GUID guid, int depth ) { return this.uniformTaskInstrument.get( guid, depth ); } @Override public TreeNode getAsRootDepth( GUID guid ) { return this.uniformTaskInstrument.getAsRootDepth( guid ); } /** Directly proxied **/ @Override public KOMInstrument parent() { return this.uniformTaskInstrument.parent(); } @Override public void setParent( CascadeInstrument parent ) { this.uniformTaskInstrument.setParent( parent ); } @Override public com.pinecone.framework.util.name.Namespace getTargetingName() { return this.uniformTaskInstrument.getTargetingName(); } @Override public void setTargetingName( com.pinecone.framework.util.name.Namespace name ) { this.uniformTaskInstrument.setTargetingName( name ); } @Override public String getPath( GUID guid ) { return this.uniformTaskInstrument.getPath( guid ); } @Override public String querySystemKernelObjectPath( GUID objectGuid ) { return this.uniformTaskInstrument.querySystemKernelObjectPath( objectGuid ); } @Override public String getFullName( GUID guid ) { return this.uniformTaskInstrument.getFullName( guid ); } @Override public GUID queryGUIDByPath( String path ) { return this.uniformTaskInstrument.queryGUIDByPath( path ); } @Override public GUID queryGUIDByFN( String fullName ) { return this.uniformTaskInstrument.queryGUIDByFN( fullName ); } @Override public boolean contains( GUID nodeGuid ) { return this.uniformTaskInstrument.contains( nodeGuid ); } @Override public GUID put( TreeNode treeNode ) { return this.uniformTaskInstrument.put( treeNode ); } @Override public GUID queryGUIDByNS( String path, String szBadSep, String szTargetSep ) { return this.uniformTaskInstrument.queryGUIDByNS( path, szBadSep, szTargetSep ); } @Override public void remove( GUID guid ) { this.uniformTaskInstrument.remove( guid ); } @Override public void remove( String path ) { this.uniformTaskInstrument.remove( path ); } @Override public List getChildren( GUID guid ) { return this.uniformTaskInstrument.getChildren( guid ); } @Override public List fetchChildrenGuids( GUID guid ) { return this.uniformTaskInstrument.fetchChildrenGuids( guid ); } @Override public Object queryEntityHandleByNS( String path, String szBadSep, String szTargetSep ) { return this.uniformTaskInstrument.queryEntityHandleByNS( path, szBadSep, szTargetSep ); } @Override public EntityNode queryNode( String path ) { return this.uniformTaskInstrument.queryNode( path ); } @Override public TreeNode queryTreeNode( String path ) { return this.uniformTaskInstrument.queryTreeNode( path ); } @Override public List fetchRoot() { return this.uniformTaskInstrument.fetchRoot(); } @Override public void rename( GUID guid, String name ) { this.uniformTaskInstrument.rename( guid, name ); } @Override public Processum getSuperiorProcess() { return this.uniformTaskInstrument.getSuperiorProcess(); } @Override public GuidAllocator getGuidAllocator() { return this.uniformTaskInstrument.getGuidAllocator(); } @Override public ImperialTree getMasterTrieTree() { return this.uniformTaskInstrument.getMasterTrieTree(); } @Override public KernelObjectConfig getConfig() { return this.uniformTaskInstrument.getConfig(); } @Override public String getSuperiorPathScope() { return this.uniformTaskInstrument.getSuperiorPathScope(); } @Override public void applySuperiorPathScope( String superiorPathScope ) { this.uniformTaskInstrument.applySuperiorPathScope( superiorPathScope ); } @Override public RavenTask constructTask( TaskElement taskElement ) { return this.constructTask( taskElement, null ); } @Override public RavenTask constructTask( TaskElement taskElement, @Nullable Identification serviceId ) { if ( serviceId == null ) { //serviceId = taskElement. } RavenTask task = new GenericRavenTask( this, serviceId, taskElement ); return task; } @Override public RavenTask createTask( TaskElement taskElement, Identification serviceId ) { this.put( taskElement ); RavenTask task = this.constructTask( taskElement, serviceId ); return task; } } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/dto/GenericCategoryTag.java ================================================ package com.walnut.odin.task.dto; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.slime.entity.ArchEnumIndexableEntity; public class GenericCategoryTag extends ArchEnumIndexableEntity implements CategoryTag { protected GUID mTaskGuid; protected String mszCategoryType; protected String mszCategoryName; public GenericCategoryTag() { super(); } @Override public void setEnumId( long id ) { this.mnEnumId = id; } @Override public void setTaskGuid( GUID taskGuid ) { this.mTaskGuid = taskGuid; } @Override public GUID getTaskGuid() { return this.mTaskGuid; } @Override public void setCategoryName( String categoryName ) { this.mszCategoryName = categoryName; } @Override public String getCategoryName() { return this.mszCategoryName; } @Override public void setCategoryType( String categoryType ) { this.mszCategoryType = categoryType; } @Override public String getCategoryType() { return this.mszCategoryType; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/entity/pyramid/ArchCategory.java ================================================ package com.walnut.odin.task.entity.pyramid; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.slime.entity.ArchEnumIndexableEntity; public abstract class ArchCategory extends ArchEnumIndexableEntity implements Category { protected String mszName; protected String mszAlias; protected String mszDescription; public ArchCategory() { super(); } @Override public void setEnumId( long id ) { this.mnEnumId = id; } @Override public void setName( String name ) { this.mszName = name; } @Override public String getName() { return this.mszName; } @Override public void setAlias( String alias ) { this.mszAlias = alias; } @Override public String getAlias() { return this.mszAlias; } @Override public void setDescription( String description ) { this.mszDescription = description; } @Override public String getDescription() { return this.mszDescription; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/entity/pyramid/GenericCategoryType.java ================================================ package com.walnut.odin.task.entity.pyramid; public class GenericCategoryType extends ArchCategory implements CategoryType { public GenericCategoryType() { super(); } } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/entity/pyramid/GenericTaskCategory.java ================================================ package com.walnut.odin.task.entity.pyramid; public class GenericTaskCategory extends ArchCategory implements TaskCategory { public GenericTaskCategory() { super(); } } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/mapper/CategoryMappingMapper.java ================================================ package com.walnut.odin.task.mapper; import java.util.List; import com.pinecone.framework.util.id.GUID; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import com.walnut.odin.task.dto.CategoryTag; import com.walnut.odin.task.dto.GenericCategoryTag; import com.walnut.odin.task.source.CategoryMappingManipulator; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; @Mapper @IbatisDataAccessObject public interface CategoryMappingMapper extends CategoryMappingManipulator { @Override @Insert( "INSERT INTO `odin_task_category_mapping` ( `task_guid`, `category_type`, `category_name` ) " + "VALUES ( #{taskGuid}, #{categoryType}, #{categoryName} )" ) void insert( CategoryTag categoryTag ); @Select( "SELECT `id` AS enumId, `task_guid` AS taskGuid, `category_type` AS categoryType, `category_name` AS categoryName " + "FROM `odin_task_category_mapping` " + "WHERE `task_guid` = #{taskGuid}" ) List queryByTaskGuid0( @Param( "taskGuid" ) GUID taskGuid ); @Override @SuppressWarnings( "unchecked" ) default List queryByTaskGuid( GUID taskGuid ) { return ( List ) this.queryByTaskGuid0( taskGuid ); } @Override @Select( "SELECT `id` AS enumId, `task_guid` AS taskGuid, `category_type` AS categoryType, `category_name` AS categoryName " + "FROM `odin_task_category_mapping` " + "WHERE `task_guid` = #{taskGuid} " + "AND `category_type` = #{type} " + "AND `category_name` = #{name} " + "LIMIT 1" ) GenericCategoryTag queryOwnedTag( @Param( "taskGuid" ) GUID taskGuid, @Param( "type" ) String type, @Param( "name" ) String name ); @Override @Select( "SELECT COUNT( * ) " + "FROM `odin_task_category_mapping` " + "WHERE `category_type` = #{type} " + "AND `category_name` = #{name}" ) long countTag( @Param( "type" ) String type, @Param( "name" ) String name ); @Select( "SELECT `id` AS enumId, `task_guid` AS taskGuid, `category_type` AS categoryType, `category_name` AS categoryName " + "FROM `odin_task_category_mapping` " + "WHERE `category_type` = #{type} " + "AND `category_name` = #{name} " + "ORDER BY `id` ASC " + "LIMIT #{offset}, #{pageSize}" ) List queryTag0( @Param( "type" ) String type, @Param( "name" ) String name, @Param( "offset" ) long offset, @Param( "pageSize" ) long pageSize ); @Override @SuppressWarnings( "unchecked" ) default List queryTag( String type, String name, long offset, long pageSize ) { return ( List ) this.queryTag0( type, name, offset, pageSize ); } @Override @Select( "SELECT COUNT( * ) " + "FROM `odin_task_category_mapping` " + "WHERE `category_name` = #{name}" ) long countTagsByName( @Param( "name" ) String name ); @Select( "SELECT `id` AS enumId, `task_guid` AS taskGuid, `category_type` AS categoryType, `category_name` AS categoryName " + "FROM `odin_task_category_mapping` " + "WHERE `category_name` = #{name} " + "ORDER BY `id` ASC " + "LIMIT #{offset}, #{pageSize}" ) List fetchByName0( @Param( "name" ) String name, @Param( "offset" ) long offset, @Param( "pageSize" ) long pageSize ); @Override @SuppressWarnings( "unchecked" ) default List fetchByName( String name, long offset, long pageSize ) { return ( List ) this.fetchByName0( name, offset, pageSize ); } @Override @Update( "UPDATE `odin_task_category_mapping` " + "SET `task_guid` = #{taskGuid}, " + "`category_type` = #{categoryType}, " + "`category_name` = #{categoryName} " + "WHERE `id` = #{enumId}" ) void update( CategoryTag categoryTag ); @Delete( "" ) void purge( @Param( "taskGuid" ) GUID taskGuid, @Param( "type" ) String type, @Param( "name" ) String name ); } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/mapper/CategoryTypeMapper.java ================================================ package com.walnut.odin.task.mapper; import java.util.List; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import com.walnut.odin.task.entity.pyramid.GenericCategoryType; import com.walnut.odin.task.entity.pyramid.CategoryType; import com.walnut.odin.task.source.CategoryTypeManipulator; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; @Mapper @IbatisDataAccessObject public interface CategoryTypeMapper extends CategoryTypeManipulator { @Override @Insert("INSERT INTO `odin_task_category_type` (`name`, `alias`, `description`) " + "VALUES (#{name}, #{alias}, #{description})") void insert( CategoryType categoryType ); @Override @Select("SELECT `id` as enumId, `name`, `alias`, `description` " + "FROM `odin_task_category_type` " + "WHERE `name` = #{name}") GenericCategoryType queryType( String name ); @Override @Select( "SELECT COUNT(*) FROM `odin_task_category_type`" ) long countTypes( ); @Select( "SELECT `id` AS enumId, `name`, `alias`, `description` " + "FROM `odin_task_category_type` " + "ORDER BY `id` ASC " + "LIMIT #{offset}, #{pageSize}" ) List fetchType0( @Param( "offset" ) long offset, @Param( "pageSize" ) long pageSize ); @Override @SuppressWarnings( "unchecked" ) default List fetchType( long offset, long pageSize ) { return (List) this.fetchType0( offset, pageSize ); } @Override @Delete("DELETE FROM `odin_task_category_type` " + "WHERE `name` = #{name}") void remove( String name ); @Override @Update("UPDATE `odin_task_category_type` " + "SET `alias` = #{alias}, `description` = #{description} " + "WHERE `name` = #{name}") void update( CategoryType categoryType); } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/mapper/TaskCategoryMapper.java ================================================ package com.walnut.odin.task.mapper; import java.util.List; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import com.walnut.odin.task.entity.pyramid.GenericTaskCategory; import com.walnut.odin.task.entity.pyramid.TaskCategory; import com.walnut.odin.task.source.TaskCategoryManipulator; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; @Mapper @IbatisDataAccessObject public interface TaskCategoryMapper extends TaskCategoryManipulator { @Override @Insert( "INSERT INTO `odin_task_category` ( `name`, `alias`, `description` ) " + "VALUES ( #{name}, #{alias}, #{description} )" ) void insert( TaskCategory taskCategory ); @Override @Select( "SELECT `id` AS enumId, `name`, `alias`, `description` " + "FROM `odin_task_category` " + "WHERE `name` = #{name}" ) GenericTaskCategory queryTaskCategory( String name ); @Override @Select( "SELECT COUNT(*) FROM `odin_task_category`" ) long countCategories( ); @Select( "SELECT `id` AS enumId, `name`, `alias`, `description` " + "FROM `odin_task_category` " + "ORDER BY `id` ASC " + "LIMIT #{offset}, #{pageSize}" ) List fetchCategory0( @Param( "offset" ) long offset, @Param( "pageSize" ) long pageSize ); @Override @SuppressWarnings( "unchecked" ) default List fetchCategory( long offset, long pageSize ) { return (List) this.fetchCategory0( offset, pageSize ); } @Override @Delete( "DELETE FROM `odin_task_category` " + "WHERE `name` = #{name}" ) void remove( String name ); @Override @Update( "UPDATE `odin_task_category` " + "SET `alias` = #{alias}, `description` = #{description} " + "WHERE `name` = #{name}" ) void update( TaskCategory taskCategory ); } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/service/RavenCategoryService.java ================================================ package com.walnut.odin.task.service; import java.util.List; import com.pinecone.framework.system.NonNull; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.task.kom.UniformTaskInstrument; import com.walnut.odin.task.CentralizedTaskInstrument; import com.walnut.odin.task.dto.CategoryTag; import com.walnut.odin.task.source.CategoryMappingManipulator; import com.walnut.odin.task.source.CategoryTypeManipulator; import com.walnut.odin.task.source.RavenTaskMasterManipulator; import com.walnut.odin.task.source.TaskCategoryManipulator; import com.walnut.odin.task.system.TaskPathInvalidException; public class RavenCategoryService implements CategoryService { private RavenTaskMasterManipulator ravenTaskMasterManipulator; private CategoryTypeManipulator categoryTypeManipulator; private TaskCategoryManipulator taskCategoryManipulator; private CategoryMappingManipulator categoryMappingManipulator; private CentralizedTaskInstrument centralizedTaskInstrument; private UniformTaskInstrument uniformTaskInstrument; public RavenCategoryService( CentralizedTaskInstrument instrument ) { this.ravenTaskMasterManipulator = instrument.getRavenTaskMasterManipulator(); this.categoryTypeManipulator = this.ravenTaskMasterManipulator.getCategoryTypeManipulator(); this.taskCategoryManipulator = this.ravenTaskMasterManipulator.getTaskCategoryManipulator(); this.categoryMappingManipulator = this.ravenTaskMasterManipulator.getCategoryMappingManipulator(); this.centralizedTaskInstrument = instrument; this.uniformTaskInstrument = this.centralizedTaskInstrument.getUniformTaskInstrument(); } @Override public void addCategoryTag ( CategoryTag categoryTag ) { this.categoryMappingManipulator.insert( categoryTag ); } @Override public void addCategoryTag ( String taskTreePath, CategoryTag categoryTag ) throws TaskPathInvalidException, IllegalArgumentException { GUID guid = this.centralizedTaskInstrument.assertTaskGUIDByPath( taskTreePath ); categoryTag.setTaskGuid( guid ); this.categoryMappingManipulator.insert( categoryTag ); } @Override public CategoryTag setCategoryTag ( String taskTreePath, CategoryTag categoryTag ) throws TaskPathInvalidException, IllegalArgumentException { GUID guid = this.centralizedTaskInstrument.assertTaskGUIDByPath( taskTreePath ); CategoryTag tag = this.queryOwnedTag( guid, categoryTag.getCategoryType(), categoryTag.getCategoryName() ); if ( tag != null ) { return tag; } categoryTag.setTaskGuid( guid ); this.categoryMappingManipulator.insert( categoryTag ); return categoryTag; } @Override public void updateCategoryTag ( CategoryTag categoryTag ) { this.categoryMappingManipulator.update( categoryTag ); } @Override public CategoryTag queryOwnedTag( GUID taskGuid, String type, String name ) { return this.categoryMappingManipulator.queryOwnedTag( taskGuid, type, name ); } @Override public List queryCategoryTag ( GUID taskGuid ) { return this.categoryMappingManipulator.queryByTaskGuid( taskGuid ); } @Override public List queryCategoryTag( String taskTreePath ) { GUID guid = this.uniformTaskInstrument.queryGUIDByPath( taskTreePath ); if ( guid == null ) { return null; } return this.queryCategoryTag( guid ); } @Override public long countCategoryTag( String type, String name ) { return this.categoryMappingManipulator.countTag( type, name ); } @Override public List queryCategoryTag ( String type, String name, long offset, long pageSize ) { return this.categoryMappingManipulator.queryTag( type, name, offset, pageSize ); } @Override public long countCategoryTagsByName( String name ) { return this.categoryMappingManipulator.countTagsByName( name ); } @Override public List fetchCategoryTagByName ( String name, long offset, long pageSize ) { return this.categoryMappingManipulator.fetchByName( name, offset, pageSize ); } @Override public void purgeCategoryTag( @Nullable GUID taskGuid, @Nullable String type, @Nullable String name ) { this.categoryMappingManipulator.purge( taskGuid, type, name ); } @Override public void purgeCategoryTag( @NonNull String name ) { this.categoryMappingManipulator.purgeByName( name ); } @Override public void purgeCategoryTag( @NonNull GUID taskGuid ) { this.categoryMappingManipulator.purgeByTaskGuid( taskGuid ); } @Override public void removeCategoryTag( @NonNull GUID taskGuid, @NonNull String type, @NonNull String name ) { this.categoryMappingManipulator.remove( taskGuid, type, name ); } @Override public void eraseCategoryTag( @NonNull String taskTreePath, @Nullable String type, @Nullable String name ) throws TaskPathInvalidException, IllegalArgumentException { GUID guid = this.centralizedTaskInstrument.assertTaskGUIDByPath( taskTreePath ); this.categoryMappingManipulator.remove( guid, type, name ); } } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/troll/ArchRavenTask.java ================================================ package com.walnut.odin.task.troll; import java.util.Map; import com.pinecone.framework.util.id.Identification; import com.pinecone.hydra.task.ArchTask; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.walnut.odin.task.RavenTask; public abstract class ArchRavenTask extends ArchTask implements RavenTask { public ArchRavenTask( Identification serviceId, TaskElement serviceElement, Map metaDataScope ){ super( serviceId, serviceElement, metaDataScope ); } public ArchRavenTask( Identification serviceId, TaskElement serviceElement ){ this( serviceId, serviceElement, null ); } @Override public TaskElement getTaskElement() { return super.getTaskElement(); } } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/troll/ArchRavenTaskInstance.java ================================================ package com.walnut.odin.task.troll; import java.net.URI; import java.net.URISyntaxException; import com.pinecone.hydra.task.ArchTaskInstance; import com.pinecone.hydra.task.Task; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.task.kom.instance.InstanceEntry; import com.pinecone.hydra.task.kom.instance.InstanceInstrument; import com.walnut.odin.task.RavenTaskInstance; public abstract class ArchRavenTaskInstance extends ArchTaskInstance implements RavenTaskInstance { protected URI processImageURI; protected InstanceInstrument instanceInstrument; public ArchRavenTaskInstance( InstanceEntry instanceEntry, Task ownedTask ) { super( instanceEntry, ownedTask ); try { TaskElement taskElement = ownedTask.getTaskElement(); if ( taskElement != null ) { String imagePath = taskElement.getImagePath(); if ( imagePath != null ) { this.processImageURI = URI.create( imagePath ); } } } catch ( IllegalArgumentException e ) { this.processImageURI = null; } this.instanceInstrument = instanceEntry.getTaskInstrument().getInstanceInstrument(); } @Override public URI getProcessImageURI() { return this.processImageURI; } @Override public InstanceInstrument instanceInstrument() { return this.instanceInstrument; } } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/troll/GenericRavenTask.java ================================================ package com.walnut.odin.task.troll; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.id.Identification; import com.pinecone.hydra.system.ko.MetaPersistenceException; import com.pinecone.hydra.task.TaskInstanceStatus; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.task.kom.instance.GenericInstanceEntry; import com.pinecone.hydra.task.kom.instance.InstanceEntry; import com.pinecone.hydra.task.kom.instance.InstanceInstrument; import com.walnut.odin.task.CentralizedTaskInstrument; import com.walnut.odin.task.RavenTaskInstance; import com.walnut.odin.task.RavenTask; import java.time.LocalDateTime; import java.util.Map; public class GenericRavenTask extends ArchRavenTask implements RavenTask { protected CentralizedTaskInstrument mTaskInstrument; protected InstanceInstrument mInstanceInstrument; protected GuidAllocator mGuidAllocator; public GenericRavenTask(CentralizedTaskInstrument taskInstrument, @Nullable Identification serviceId, TaskElement serviceElement, Map metaDataScope ) { super( serviceId, serviceElement, metaDataScope ); this.mTaskInstrument = taskInstrument; this.mInstanceInstrument = taskInstrument.getInstanceInstrument(); this.mGuidAllocator = taskInstrument.getGuidAllocator(); } public GenericRavenTask( CentralizedTaskInstrument taskInstrument, @Nullable Identification serviceId, TaskElement serviceElement ) { this( taskInstrument, serviceId, serviceElement, null ); } public GenericRavenTask( CentralizedTaskInstrument taskInstrument, TaskElement serviceElement ) { this( taskInstrument, null, serviceElement ); } @Override public RavenTaskInstance createInstance() { GUID guid = this.mGuidAllocator.nextGUID(); GenericInstanceEntry entry = new GenericInstanceEntry( this.mTaskInstrument, this.mTaskElement ); entry.setGuid( guid ); entry.setActuallyPriority( this.mTaskElement.getActuallyPriority() ); entry.setImagePath( this.mTaskElement.getImagePath() ); entry.setTaskGuid( this.mTaskElement.getGuid()); entry.setCreateTime( LocalDateTime.now() ); entry.setScheduleCycle( this.mTaskElement.getScheduleCycle() ); entry.setScheduleType( this.mTaskElement.getScheduleType() ); entry.setRunCount( 1 ); entry.setSequenceCnt( 1 ); entry.setRetryCnt( 0 ); entry.setTaskType( this.mTaskElement.getType() ); entry.setInstanceStatus( TaskInstanceStatus.New ); entry.setTaskName( this.mTaskElement.getName() ); entry.setProcessorName( this.mTaskElement.getProcessorName() ); GenericRavenTaskInstance instance = new GenericRavenTaskInstance( entry, this ); return instance; } public RavenTaskInstance constructInstance( InstanceEntry instanceEntry ) { return this.constructInstance( instanceEntry, null ); } public RavenTaskInstance constructInstance( InstanceEntry instanceEntry, Identification serviceId ) { if ( serviceId == null ) { //serviceId = taskElement. } return null; } public RavenTask createInstance( InstanceEntry instanceEntry, Identification serviceId ) { // RavenTaskInstance task = this.constructInstance( taskElement, serviceId ); // // // return task; return null; } public void removeInstance( GUID insGuid ) { this.mInstanceInstrument.removeInstance( insGuid ); } public void updateInstanceMeta( RavenTaskInstance instance ) throws MetaPersistenceException { this.updateTaskMeta( instance.getInstanceEntry() ); } public void updateTaskMeta( InstanceEntry instanceEntry ) throws MetaPersistenceException { this.mInstanceInstrument.updateInstance( instanceEntry ); } } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/troll/GenericRavenTaskInstance.java ================================================ package com.walnut.odin.task.troll; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.system.ko.MetaPersistenceException; import com.pinecone.hydra.task.ArchTaskInstance; import com.pinecone.hydra.task.Task; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.task.kom.instance.InstanceEntry; import com.walnut.odin.task.CentralizedTaskInstrument; import com.walnut.odin.task.RavenTaskInstance; public class GenericRavenTaskInstance extends ArchRavenTaskInstance implements RavenTaskInstance { public GenericRavenTaskInstance( InstanceEntry instanceEntry, Task ownedTask ) { super( instanceEntry, ownedTask ); } public GenericRavenTaskInstance( InstanceEntry instanceEntry, CentralizedTaskInstrument instrument ) { super( instanceEntry, new GenericRavenTask( instrument, instanceEntry.taskElement() ) ); } @Override public Object getProcessObject() { return null; } @Override public UProcess affinityProcess() { return null; } @Override public void startLocalProcess() { } @Override public void startRemoteProcess() { } @Override public void startRemoteProcess( boolean bDirectlyVitalize ) { } @Override public void startRemoteProcess( boolean bDirectlyVitalize, long processClientId ) { } @Override public void update() throws MetaPersistenceException { this.instanceInstrument.updateInstance( this.mInstanceEntry ); } @Override public void persist() throws MetaPersistenceException { InstanceEntry leg = this.instanceInstrument.getInstanceEntry( this.getGuid() ); if ( leg == null ) { this.instanceInstrument.addInstance( this.mInstanceEntry ); } else { this.update(); } } } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/troll/LaunchErrorCauses.java ================================================ package com.walnut.odin.task.troll; public final class LaunchErrorCauses { public static final String NoSuchImage = "NoSuchImage"; public static final String RemoteProcessCreationFailure = "RemoteProcessCreationFailure"; } ================================================ FILE: Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/troll/TrollTaskExecutionLauncher.java ================================================ package com.walnut.odin.task.troll; import java.net.URI; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pinecone.framework.util.CollectionUtils; import com.pinecone.framework.util.datetime.DatePattern; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.hydra.proc.ProcessManager; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.proc.event.ProcessEvent; import com.pinecone.hydra.proc.event.ProcessEventHandler; import com.pinecone.hydra.proc.image.EntryPointRunnable; import com.pinecone.hydra.proc.image.ExecutionImage; import com.pinecone.hydra.proc.image.ImageLoader; import com.pinecone.hydra.proc.image.ImageModifier; import com.pinecone.hydra.proc.image.URLImageLoader; import com.pinecone.hydra.system.component.LogStatuses; import com.pinecone.hydra.system.component.Slf4jTraceable; import com.pinecone.hydra.system.ko.MetaPersistenceException; import com.pinecone.hydra.task.TaskInstanceStatus; import com.pinecone.hydra.task.kom.instance.InstanceEntry; import com.pinecone.hydra.task.kom.instance.InstanceInstrument; import com.pinecone.hydra.task.marshal.TaskScheduleCycle; import com.walnut.odin.conduct.CollectiveTaskRegiment; import com.walnut.odin.proc.ProcessRemoteEventHandler; import com.walnut.odin.proc.RemoteProcess; import com.walnut.odin.proc.RemoteVitalizationStatus; import com.walnut.odin.proc.server.RemoteProcessManagerServer; import com.walnut.odin.task.CentralizedTaskInstrument; import com.walnut.odin.task.RavenTaskConfig; import com.walnut.odin.task.RavenTaskInstance; public class TrollTaskExecutionLauncher implements TaskExecutionLauncher, Slf4jTraceable { protected Logger mLogger; protected RemoteProcessManagerServer mRemoteProcessManagerServer; protected CollectiveTaskRegiment mCollectiveTaskRegiment; protected CentralizedTaskInstrument mTaskInstrument; protected InstanceInstrument mInstanceInstrument; protected ProcessManager mProcessManager; protected RavenTaskConfig mRavenTaskConfig; protected DateTimeFormatter mInstanceTitleTimeFormat; protected DateTimeFormatter mDefaultDateTimeFormat; protected GuidAllocator mGuidAllocator; protected ImageModifier mImageModifier; public TrollTaskExecutionLauncher( CollectiveTaskRegiment taskRegiment ) { this.mLogger = LoggerFactory.getLogger( this.getClass() ); this.mRemoteProcessManagerServer = taskRegiment.remoteProcessManagerServer(); this.mProcessManager = taskRegiment.processManager(); this.mCollectiveTaskRegiment = taskRegiment; this.mTaskInstrument = taskRegiment.taskInstrument(); this.mInstanceInstrument = this.mTaskInstrument.getInstanceInstrument(); this.mRavenTaskConfig = (RavenTaskConfig) this.mTaskInstrument.getConfig(); this.mGuidAllocator = this.mTaskInstrument.getGuidAllocator(); this.mInstanceTitleTimeFormat = DatePattern.createFormatter( this.mRavenTaskConfig.getInstanceTitleTimeFormat() ); this.mDefaultDateTimeFormat = DatePattern.createFormatter( this.mRavenTaskConfig.getDefaultDateTimeFormat() ); this.mImageModifier = this.mProcessManager.getImageModifier(); this.infoLifecycle( "Welcome to use Skynet cloud deployment system, Odin Troll task execution system.", LogStatuses.StatusReady ); } @Override public Logger getLogger() { return this.mLogger; } @Override public ProcessManager processManager() { return this.mProcessManager; } @Override public LocalDateTime evalBusinessTime( RavenTaskInstance instance, LocalDateTime biz ) { TaskScheduleCycle cycle = instance.getKernelScheduleCycle(); LocalDateTime adjustedTime; switch ( cycle ) { case Month: case Week: case Day: { adjustedTime = biz.withHour(0).withMinute(0).withSecond(0).withNano(0); break; } case Hour: { adjustedTime = biz.withMinute(0).withSecond(0).withNano(0); break; } case Minute: { adjustedTime = biz.withSecond(0).withNano(0); break; } case Undefined: default: { adjustedTime = biz; break; } } return adjustedTime; } @Override public LocalDateTime evalBusinessTime( RavenTaskInstance instance ) { return this.evalBusinessTime( instance, LocalDateTime.now() ); } @Override public String evalBusinessTimeLabel( RavenTaskInstance instance, LocalDateTime biz ) { return this.evalBusinessTime( instance, biz ).format( this.mInstanceTitleTimeFormat ); } @Override public String evalBusinessTimeLabel( RavenTaskInstance instance ) { return this.evalBusinessTime( instance ).format( this.mInstanceTitleTimeFormat ); } @Override public String evalInstanceName( RavenTaskInstance instance, LocalDateTime now, LocalDateTime bizTimeEpoch ) { String bizTimeLab = this.evalBusinessTimeLabel( instance, bizTimeEpoch ); String execTimeLab = now.format( this.mInstanceTitleTimeFormat ); String szInstanceName = String.format( "%s_%s_ET_%s", instance.getOwnedTask().getName(), bizTimeLab, execTimeLab ); return szInstanceName; } @Override public String evalInstanceName( RavenTaskInstance instance, LocalDateTime bizTimeEpoch ) { return this.evalInstanceName( instance, LocalDateTime.now(), bizTimeEpoch ); } @Override public void initializeInstance( RavenTaskInstance instance, LaunchFeature feature ) { LocalDateTime now = LocalDateTime.now(); this.getLogger().info( "[TaskLaunchSequence] (TaskName: `{}`, KernelHandleName: `/{}`, TaskGuid: `{}`, Time: `{}`) ", instance.getOwnedTask().getName(), instance.getOwnedTask().getFullName(), instance.getOwnedTask().getId(), now.format( this.mDefaultDateTimeFormat ) ); String szInstanceName = this.evalInstanceName( instance, now, feature.getBizTimeEpoch() ); InstanceEntry entry = instance.getInstanceEntry(); entry.setInstanceName( szInstanceName ); String bizTimeLab = this.evalBusinessTimeLabel( instance, feature.getBizTimeEpoch() ); InstanceEntry previous = this.mInstanceInstrument.findLastExecuted( instance.getTaskGuid(), bizTimeLab ); int runCount = 0; int sequenceCnt = 0; int retryCnt = 0; if ( previous != null ) { runCount = previous.getRunCount() + 1; if ( feature.isRetry() ) { sequenceCnt = previous.getSequenceCnt(); retryCnt = previous.getRetryCnt() + 1; } else { sequenceCnt = previous.getSequenceCnt() + 1; retryCnt = 0; } } if ( previous == null ) { runCount = 1; sequenceCnt = 1; retryCnt = 0; } LocalDateTime bizTime = this.evalBusinessTime( instance, feature.getBizTimeEpoch() ); entry.setRunCount( runCount ); entry.setSequenceCnt( sequenceCnt ); entry.setRetryCnt( retryCnt ); if ( entry.getGuid() == null ) { entry.setGuid( this.mGuidAllocator.nextGUID() ); } entry.setInstanceStatus( TaskInstanceStatus.New ); entry.setBusinessTime( bizTime ); this.mTaskInstrument.getInstanceInstrument().addInstance( entry ); this.getLogger().info( "[TaskLaunchSequence] [Schema] (Task: `{}`, InstanceName: `{}`, InsGuid: `{}`, RunCount: {}, SequenceCnt: {}, RetryCnt: {}, RetryMode: {}, BusinessTime: {}) ", instance.getOwnedTask().getName(), szInstanceName, entry.getGuid(), runCount, sequenceCnt, retryCnt, feature.isRetry(), bizTime ); } protected void afterProcessCreated( RavenTaskInstance instance, UProcess process ) throws MetaPersistenceException { instance.getInstanceEntry().setInstanceStatus( TaskInstanceStatus.ProcessStandby ); instance.update(); } protected URI evalImageURI( RavenTaskInstance instance, LaunchFeature feature ) { URI imageURI = feature.getDesignatedImageURI(); if ( imageURI == null ) { imageURI = instance.getProcessImageURI(); } return imageURI; } protected UProcess prepareProcessHandle( UProcess process, LaunchFeature feature ) { List handlers = feature.getSysProcEventHandlers(); if ( CollectionUtils.isNoneEmpty(handlers) ) { for ( ProcessEventHandler handler : handlers ) { this.mImageModifier.addSystemProcessEventHandler( process.getExecutionImage().getEntryPoint(), handler ); } } return process; } @Override public UProcess createLocally( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException { try { this.initializeInstance( instance, feature ); URI imageURI = this.evalImageURI( instance, feature ); ImageLoader imageLoader = this.mProcessManager.getImageLoader(); ExecutionImage image; UProcess process = null; if ( imageLoader instanceof URLImageLoader ) { URLImageLoader urlImageLoader = (URLImageLoader) imageLoader; image = urlImageLoader.queryExecutionImage( imageURI ); } else { image = imageLoader.queryExecutionImage( imageURI.getPath() ); } if ( image == null ) { instance.getInstanceEntry().setErrorCause( LaunchErrorCauses.NoSuchImage ); } else { instance.getInstanceEntry().setImagePath( imageURI.toString() ); this.mLogger.info( "[TaskLaunchSequence] [LocalProcessAnchored] (Process: `{}`) ", imageURI ); process = this.mProcessManager.createLocalHostedProcess( image, feature.getParentProcess(), feature.getStartupArgs(), feature.getContextEnvironmentVars() ); } this.prepareProcessHandle( process, feature ); this.afterProcessCreated( instance, process ); return process; } catch ( Exception e ) { throw new InstanceLaunchException( e ); } } @Override public UProcess createRemotely( RavenTaskInstance instance, long pmClientId, LaunchFeature feature ) throws InstanceLaunchException { try { this.initializeInstance( instance, feature ); URI imageURI = this.evalImageURI( instance, feature ); RemoteProcess process = null; GUID parentPid = null; if ( feature.getParentProcess() != null ) { parentPid = feature.getParentProcess().getPID(); } else if ( feature.getParentPid() != null ) { parentPid = feature.getParentPid(); } instance.getInstanceEntry().setImagePath( imageURI.toString() ); this.mLogger.info( "[TaskLaunchSequence] [RemoteProcessAnchored] (Process: `{}`, DestinationDeployClient: `{}`) ", imageURI, pmClientId ); RemoteProcessManagerServer.RemoteCreationResult result = this.mRemoteProcessManagerServer.createRemoteUProcess( pmClientId, imageURI.toString(), true, parentPid, feature.getStartupArgs(), feature.getContextEnvironmentVars() ); process = result.getProcess(); if ( result.getResponse().getStatus() != RemoteVitalizationStatus.New.getCode() || process == null ) { instance.getInstanceEntry().setErrorCause( LaunchErrorCauses.RemoteProcessCreationFailure ); } this.prepareProcessHandle( process, feature ); this.afterProcessCreated( instance, process ); return process; } catch ( Exception e ) { throw new InstanceLaunchException( e ); } } protected void afterOwnedProcessTerminated( RavenTaskInstance instance, UProcess process ) { try { instance.getInstanceEntry().setInstanceStatus( TaskInstanceStatus.Finished ); instance.getInstanceEntry().setLastEndTime( LocalDateTime.now() ); instance.update(); } catch ( MetaPersistenceException e ) { mLogger.error( "[TaskLaunchSequence] [MetaPersistenceException] (Process: `{}`, PID: `{}`) ", process.getName(), process.getPID() ); mLogger.error( "[TaskLaunchSequence] [MetaPersistenceException: `{}`]", e ); } } protected void afterOwnedProcessStarted( RavenTaskInstance instance, UProcess process ) throws InstanceLaunchException { try { instance.getInstanceEntry().setInstanceStatus( TaskInstanceStatus.Running ); instance.getInstanceEntry().setLastStartTime( LocalDateTime.now() ); instance.update(); } catch ( MetaPersistenceException e ) { throw new InstanceLaunchException( e ); } } @Override public UProcess launchLocally( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException { this.getLogger().info( "[TaskLaunchSequence] [LaunchLocally] (TaskName: `{}`, KernelHandleName: `/{}`, TaskGuid: `{}`) ", instance.getOwnedTask().getName(), instance.getOwnedTask().getFullName(), instance.getOwnedTask().getId() ); UProcess process = this.createLocally( instance, feature ); if ( process == null ) { return null; } this.mLogger.info( "[TaskLaunchSequence] [LocalProcessStandby] (Process: `{}`, PID: `{}`) ", process.getName(), process.getPID() ); this.mLogger.info( "[TaskLaunchSequence] [ExecutingVitalizationInstruction] (Process: `{}`, PID: `{}`) ", process.getName(), process.getPID() ); this.mImageModifier.addSystemProcessEventHandler(process.getExecutionImage().getEntryPoint(), new ProcessEventHandler() { @Override public void fired( EntryPointRunnable runnable, ProcessEvent event ) { if ( event == ProcessEvent.Terminated ) { afterOwnedProcessTerminated( instance, process ); mLogger.info( "[TaskLaunchSequence] [LocalTaskFinished] (TaskName: `{}`, KernelHandleName: `/{}`, TaskGuid: `{}`) ", instance.getOwnedTask().getName(), instance.getOwnedTask().getFullName(), instance.getOwnedTask().getId() ); } } }); process.start(); this.afterOwnedProcessStarted( instance, process ); this.mLogger.info( "[TaskLaunchSequence] [LocalTaskLaunched] (TaskName: `{}`, KernelHandleName: `/{}`, TaskGuid: `{}`, PID: `{}`) ", instance.getOwnedTask().getName(), instance.getOwnedTask().getFullName(), instance.getOwnedTask().getId(), process.getPID() ); return process; } @Override public UProcess launchRemotely( RavenTaskInstance instance, long pmClientId, LaunchFeature feature ) throws InstanceLaunchException { this.getLogger().info( "[TaskLaunchSequence] [LaunchRemotely] (TaskName: `{}`, KernelHandleName: `/{}`, TaskGuid: `{}`) ", instance.getOwnedTask().getName(), instance.getOwnedTask().getFullName(), instance.getOwnedTask().getId() ); UProcess process = this.createRemotely( instance, pmClientId, feature ); if ( process == null ) { return null; } this.mLogger.info( "[TaskLaunchSequence] [RemoteProcessStandby] (Process: `{}`, PID: `{}`) ", process.getName(), process.getPID() ); this.mLogger.info( "[TaskLaunchSequence] [SendingVitalizationInstruction] (Process: `{}`, PID: `{}`, DestinationClient: `{}`) ", process.getName(), process.getPID(), pmClientId ); RemoteProcess remoteProcess = (RemoteProcess) process; remoteProcess.addRemoteEventHandler(new ProcessRemoteEventHandler() { @Override public void fired( long pmClientId, ProcessEvent event, Object caused ) { if ( event == ProcessEvent.Terminated ) { afterOwnedProcessTerminated( instance, process ); mLogger.info( "[TaskLaunchSequence] [RemoteTaskFinished] (TaskName: `{}`, KernelHandleName: `/{}`, TaskGuid: `{}`) ", instance.getOwnedTask().getName(), instance.getOwnedTask().getFullName(), instance.getOwnedTask().getId() ); } } }); process.start(); this.afterOwnedProcessStarted( instance, process ); this.mLogger.info( "[TaskLaunchSequence] [RemoteTaskLaunched] (TaskName: `{}`, KernelHandleName: `/{}`, TaskGuid: `{}`, PID: `{}`) ", instance.getOwnedTask().getName(), instance.getOwnedTask().getFullName(), instance.getOwnedTask().getId(), process.getPID() ); return process; } } ================================================ FILE: Odin/odin-mapper-driver/pom.xml ================================================ odin com.walnut.odin 2.5.1 odin-mapper-driver 2.5.1 4.0.0 com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.pinecone.slime.jelly jelly 2.1.0 compile com.pinecone.hydra.kom.driver.default hydra-kom-default-driver 2.1.0 compile com.pinecone.tritium hydra-system-tritium 2.1.0 compile com.walnut.odin odin-architecture 2.5.1 compile com.walnut.odin odin-framework-conduct 2.5.1 compile com.walnut.odin odin-framework-atlas 2.5.1 compile ================================================ FILE: Odin/odin-mapper-driver/src/main/java/com/walnut/odin/atlas/mapper/ArchAtlasMappingDriver.java ================================================ package com.walnut.odin.atlas.mapper; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.system.construction.UnifyStructureInjector; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.system.homotype.StereotypicInjector; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.system.component.ResourceDispenserCenter; import com.pinecone.hydra.unit.vgraph.source.AtlasMappingDriver; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.pinecone.slime.jelly.source.ibatis.ProxySessionMapperPool; import java.util.List; import java.util.Map; public abstract class ArchAtlasMappingDriver implements AtlasMappingDriver { protected Hydrogen mSystem; protected Processum mSuperiorProcess; protected IbatisClient mIbatisClient; //protected SqlSession mSqlSession; protected List > mMapperCandidates; protected ResourceDispenserCenter mResourceDispenserCenter; public ArchAtlasMappingDriver( Processum superiorProcess ) { this.mSuperiorProcess = superiorProcess; if ( this.mSuperiorProcess instanceof Hydrogen) { this.mSystem = (Hydrogen) this.mSuperiorProcess; } else { this.mSystem = (Hydrogen) superiorProcess.parentSystem(); } } // Temp , TODO public ArchAtlasMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter, String szPackageName ) { this( superiorProcess ); this.mIbatisClient = ibatisClient; //this.mSqlSession = ibatisClient.openSession( true ); //SqlSessionTemplate ibatisClient.getConfiguration().getTypeHandlerRegistry().register( GUID72TypeHandler.class ); ibatisClient.getConfiguration().getTypeHandlerRegistry().register( GUIDTypeHandler.class ); ibatisClient.getConfiguration().getTypeHandlerRegistry().register( UOITypeHandler.class ); ibatisClient.addXMLObjectScope( "mapper.kernel.task" ); this.mMapperCandidates = ibatisClient.addDataAccessObjectScope( szPackageName ); for( Class mapperClass : this.mMapperCandidates ) { dispenserCenter.getInstanceDispenser().register( mapperClass, //new SoloSessionMapperPool( this.mSqlSession, mapperClass ) new ProxySessionMapperPool( ibatisClient, mapperClass ) ); } this.mResourceDispenserCenter = dispenserCenter; } @Override public StereotypicInjector autoConstruct(Class stereotype, Map config, Object instance ) { UnifyStructureInjector injector = new UnifyStructureInjector( stereotype, this.mResourceDispenserCenter.getInstanceDispenser() ); try{ injector.inject( config, instance ); } catch ( Exception e ){ throw new ProxyProvokeHandleException( e ); } return injector; } @Override public String getVersionSignature() { return "HydraniumV2.1"; } @Override public Hydrogen getSystem() { return this.mSystem; } @Override public Processum getSuperiorProcess() { return this.mSuperiorProcess; } } ================================================ FILE: Odin/odin-mapper-driver/src/main/java/com/walnut/odin/atlas/mapper/GUID72TypeHandler.java ================================================ package com.walnut.odin.atlas.mapper; import com.pinecone.ulf.util.guid.i64.GUID72; import org.apache.ibatis.type.BaseTypeHandler; import org.apache.ibatis.type.JdbcType; import org.apache.ibatis.type.MappedJdbcTypes; import org.apache.ibatis.type.MappedTypes; import java.sql.CallableStatement; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; @MappedTypes(GUID72.class) @MappedJdbcTypes(JdbcType.VARCHAR) public class GUID72TypeHandler extends BaseTypeHandler { @Override public void setNonNullParameter(PreparedStatement ps, int i, GUID72 parameter, JdbcType jdbcType) throws SQLException { ps.setString(i, parameter.toString()); } @Override public GUID72 getNullableResult(ResultSet rs, String columnName) throws SQLException { String value = rs.getString(columnName); if (value == null) { return null; // 如果值为 null,则直接返回 null } return new GUID72( value ); } @Override public GUID72 getNullableResult(ResultSet rs, int columnIndex) throws SQLException { String value = rs.getString(columnIndex); if (value == null) { return null; // 如果值为 null,则直接返回 null } return new GUID72( value ); } @Override public GUID72 getNullableResult(CallableStatement cs, int columnIndex) throws SQLException { String value = cs.getString(columnIndex); if (value == null) { return null; // 如果值为 null,则直接返回 null } return new GUID72( value ); } } ================================================ FILE: Odin/odin-mapper-driver/src/main/java/com/walnut/odin/atlas/mapper/GUIDTypeHandler.java ================================================ package com.walnut.odin.atlas.mapper; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.UUID128; import org.apache.ibatis.type.BaseTypeHandler; import org.apache.ibatis.type.JdbcType; import org.apache.ibatis.type.MappedJdbcTypes; import org.apache.ibatis.type.MappedTypes; import java.sql.CallableStatement; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; @MappedTypes(GUID.class) @MappedJdbcTypes(JdbcType.VARCHAR) public class GUIDTypeHandler extends BaseTypeHandler { @Override public void setNonNullParameter(PreparedStatement ps, int i, GUID parameter, JdbcType jdbcType) throws SQLException { ps.setString(i, parameter.toString()); } @Override public GUID getNullableResult(ResultSet rs, String columnName) throws SQLException { String value = rs.getString(columnName); if (value == null) { return null; // 如果值为 null,则直接返回 null } return new UUID128( value ); } @Override public GUID getNullableResult(ResultSet rs, int columnIndex) throws SQLException { String value = rs.getString(columnIndex); if (value == null) { return null; // 如果值为 null,则直接返回 null } return new UUID128( value ); } @Override public GUID getNullableResult(CallableStatement cs, int columnIndex) throws SQLException { String value = cs.getString(columnIndex); if (value == null) { return null; // 如果值为 null,则直接返回 null } return new UUID128( value ); } } ================================================ FILE: Odin/odin-mapper-driver/src/main/java/com/walnut/odin/atlas/mapper/OdinAtlasMappingDriver.java ================================================ package com.walnut.odin.atlas.mapper; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.system.component.ResourceDispenserCenter; import com.pinecone.hydra.unit.vgraph.source.AtlasMappingDriver; import com.pinecone.hydra.unit.vgraph.source.AtlasMasterManipulator; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; public class OdinAtlasMappingDriver extends ArchAtlasMappingDriver implements AtlasMappingDriver { protected AtlasMasterManipulator mVectorGraphMasterManipulator; public OdinAtlasMappingDriver( Processum superiorProcess ){ super( superiorProcess ); } public OdinAtlasMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) { super( superiorProcess, ibatisClient, dispenserCenter, OdinAtlasMappingDriver.class.getPackageName() ); this.mVectorGraphMasterManipulator = new OdinAtlasMasterManipulatorImpl( this ); } @Override public AtlasMasterManipulator getMasterManipulator() { return this.mVectorGraphMasterManipulator; } } ================================================ FILE: Odin/odin-mapper-driver/src/main/java/com/walnut/odin/atlas/mapper/OdinAtlasMasterGraphManipulatorImpl.java ================================================ package com.walnut.odin.atlas.mapper; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.unit.vgraph.source.AtlasMappingDriver; import com.pinecone.hydra.unit.vgraph.source.VectorGraphManipulator; import com.pinecone.hydra.unit.vgraph.source.VectorGraphMasterManipulator; import com.pinecone.hydra.unit.vgraph.source.VectorGraphPathCacheManipulator; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class OdinAtlasMasterGraphManipulatorImpl implements VectorGraphMasterManipulator { @Resource @Structure( type = RuntimeVGraphMapper.class) VectorGraphManipulator mVectorGraphManipulator; @Resource @Structure( type = RuntimeVectorGraphPathCacheMapper.class) VectorGraphPathCacheManipulator mVectorGraphPathCacheManipulator; public OdinAtlasMasterGraphManipulatorImpl(){} public OdinAtlasMasterGraphManipulatorImpl(AtlasMappingDriver driver){ driver.autoConstruct( OdinAtlasMasterGraphManipulatorImpl.class, Map.of(), this); } @Override public VectorGraphManipulator getVectorGraphManipulator() { return this.mVectorGraphManipulator; } @Override public VectorGraphPathCacheManipulator getVectorGraphPathCacheManipulator() { return this.mVectorGraphPathCacheManipulator; } } ================================================ FILE: Odin/odin-mapper-driver/src/main/java/com/walnut/odin/atlas/mapper/OdinAtlasMasterManipulatorImpl.java ================================================ package com.walnut.odin.atlas.mapper; import javax.annotation.Resource; import java.util.Map; import org.springframework.stereotype.Component; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.unit.vgraph.source.AtlasMappingDriver; import com.pinecone.hydra.unit.vgraph.source.VectorGraphMasterManipulator; @Component public class OdinAtlasMasterManipulatorImpl implements RunAtlasMasterManipulator { @Resource @Structure( type = OdinAtlasMasterGraphManipulatorImpl.class ) VectorGraphMasterManipulator mVectorGraphMasterManipulator; @Resource @Structure( type = QueueStratumMapper.class ) QueueStratumManipulator mQueueStratumManipulator; public OdinAtlasMasterManipulatorImpl() {} public OdinAtlasMasterManipulatorImpl(AtlasMappingDriver driver ) { driver.autoConstruct(OdinAtlasMasterManipulatorImpl.class, Map.of(),this); this.mVectorGraphMasterManipulator = new OdinAtlasMasterGraphManipulatorImpl(driver); } @Override public VectorGraphMasterManipulator getVectorGraphMasterManipulator() { return this.mVectorGraphMasterManipulator; } @Override public QueueStratumManipulator getQueueStratumManipulator() { return this.mQueueStratumManipulator; } } ================================================ FILE: Odin/odin-mapper-driver/src/main/java/com/walnut/odin/atlas/mapper/UOITypeHandler.java ================================================ package com.walnut.odin.atlas.mapper; import com.pinecone.framework.util.uoi.UOI; import org.apache.ibatis.type.BaseTypeHandler; import org.apache.ibatis.type.JdbcType; import org.apache.ibatis.type.MappedJdbcTypes; import org.apache.ibatis.type.MappedTypes; import java.sql.CallableStatement; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; @MappedTypes(UOI.class) @MappedJdbcTypes(JdbcType.VARCHAR) public class UOITypeHandler extends BaseTypeHandler { @Override public void setNonNullParameter( PreparedStatement ps, int i, UOI parameter, JdbcType jdbcType ) throws SQLException { ps.setString(i, parameter.toString()); } @Override public UOI getNullableResult( ResultSet rs, String columnName ) throws SQLException { String value = rs.getString(columnName); if (value == null) { return null; // 如果值为 null,则直接返回 null } return new UOI( value ); } @Override public UOI getNullableResult( ResultSet rs, int columnIndex ) throws SQLException { String value = rs.getString(columnIndex); if (value == null) { return null; // 如果值为 null,则直接返回 null } return new UOI( value ); } @Override public UOI getNullableResult(CallableStatement cs, int columnIndex) throws SQLException { String value = cs.getString(columnIndex); if (value == null) { return null; // 如果值为 null,则直接返回 null } return new UOI( value ); } } ================================================ FILE: Odin/odin-mapper-driver/src/main/java/com/walnut/odin/task/mapper/OdinUniformTaskMappingDriver.java ================================================ package com.walnut.odin.task.mapper; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver; import com.pinecone.hydra.system.component.ResourceDispenserCenter; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMasterManipulator; import com.pinecone.hydra.task.ibatis.hydranium.TaskMappingDriver; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; public class OdinUniformTaskMappingDriver extends ArchMappingDriver implements OdinTaskMappingDriver { protected KOIMasterManipulator mKOIMasterManipulator; protected KOIMappingDriver mParentDriver; public OdinUniformTaskMappingDriver( Processum superiorProcess ) { super( superiorProcess ); } public OdinUniformTaskMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) { super( superiorProcess, ibatisClient, dispenserCenter, OdinUniformTaskMappingDriver.class.getPackageName().replace( "hydranium", "" ) ); this.mParentDriver = new TaskMappingDriver( superiorProcess, ibatisClient, dispenserCenter ); this.mKOIMasterManipulator = new RavenTaskMasterManipulatorImpl( this, (TaskMappingDriver)this.getParentDriver() ); } @Override public KOIMasterManipulator getMasterManipulator() { return this.mKOIMasterManipulator; } @Override public KOIMappingDriver getParentDriver() { return this.mParentDriver; } } ================================================ FILE: Odin/odin-mapper-driver/src/main/java/com/walnut/odin/task/mapper/RavenTaskMasterManipulatorImpl.java ================================================ package com.walnut.odin.task.mapper; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; import com.pinecone.hydra.task.ibatis.hydranium.TaskMappingDriver; import com.pinecone.hydra.task.kom.source.TaskMasterManipulator; import com.walnut.odin.task.source.ScheduleManipulator; import com.walnut.odin.task.source.CategoryMappingManipulator; import com.walnut.odin.task.source.CategoryTypeManipulator; import com.walnut.odin.task.source.RavenTaskMasterManipulator; import com.walnut.odin.task.source.TaskCategoryManipulator; import com.walnut.odin.task.source.TaskProcessorManipulator; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class RavenTaskMasterManipulatorImpl implements RavenTaskMasterManipulator { protected KOISkeletonMasterManipulator skeletonMasterManipulator; protected TaskMappingDriver taskMappingDriver; protected TaskMasterManipulator taskMasterManipulator; @Resource @Structure( type = CategoryTypeMapper.class ) protected CategoryTypeManipulator categoryTypeManipulator; @Resource @Structure( type = TaskCategoryMapper.class ) protected TaskCategoryManipulator taskCategoryManipulator; @Resource @Structure( type = CategoryMappingMapper.class ) protected CategoryMappingManipulator categoryMappingManipulator; @Resource @Structure( type = TaskProcessorMapper.class ) protected TaskProcessorManipulator taskProcessorManipulator; protected ScheduleManipulator scheduleManipulator; public RavenTaskMasterManipulatorImpl( KOIMappingDriver driver, TaskMappingDriver taskMappingDriver ) { driver.autoConstruct( RavenTaskMasterManipulatorImpl.class, Map.of(), this ); this.taskMappingDriver = taskMappingDriver; this.taskMasterManipulator = (TaskMasterManipulator)taskMappingDriver.getMasterManipulator(); this.skeletonMasterManipulator = this.taskMasterManipulator.getSkeletonMasterManipulator(); this.scheduleManipulator = new ScheduleManipulatorImpl( driver ); } @Override public TaskMasterManipulator getTaskMasterManipulator() { return this.taskMasterManipulator; } @Override public TaskMappingDriver getTaskMappingDriver() { return this.taskMappingDriver; } @Override public KOISkeletonMasterManipulator getSkeletonMasterManipulator() { return this.skeletonMasterManipulator; } @Override public CategoryTypeManipulator getCategoryTypeManipulator() { return this.categoryTypeManipulator; } @Override public TaskCategoryManipulator getTaskCategoryManipulator() { return this.taskCategoryManipulator; } @Override public CategoryMappingManipulator getCategoryMappingManipulator() { return this.categoryMappingManipulator; } @Override public TaskProcessorManipulator getTaskProcessorManipulator() { return this.taskProcessorManipulator; } @Override public ScheduleManipulator getScheduleManipulator() { return this.scheduleManipulator; } } ================================================ FILE: Odin/odin-mapper-driver/src/main/java/com/walnut/odin/task/mapper/ScheduleManipulatorImpl.java ================================================ package com.walnut.odin.task.mapper; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.walnut.odin.task.source.ScheduleManipulator; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; @Component public class ScheduleManipulatorImpl implements ScheduleManipulator { @Resource @Structure(type = InstanceEventMapper.class) private InstanceEventMapper minstanceEventMapper; @Resource @Structure(type = InstanceAtlasAdjacentMapper.class) private InstanceAtlasAdjacentMapper minstanceAtlasAdjacentMapper; @Resource @Structure(type = InstanceAtlasNodeMapper.class) private InstanceAtlasNodeMapper minstanceAtlasNodeMapper; @Resource @Structure(type = InstanceExecMapper.class) private InstanceExecMapper minstanceExecMapper; public ScheduleManipulatorImpl() { } public ScheduleManipulatorImpl(KOIMappingDriver driver) { driver.autoConstruct(ScheduleManipulatorImpl.class, Map.of(), this); } public ScheduleManipulatorImpl( InstanceEventMapper instanceEventMapper, InstanceAtlasAdjacentMapper instanceAtlasAdjacentMapper, InstanceAtlasNodeMapper instanceAtlasNodeMapper, InstanceExecMapper instanceExecMapper ) { this.minstanceEventMapper = instanceEventMapper; this.minstanceAtlasAdjacentMapper = instanceAtlasAdjacentMapper; this.minstanceAtlasNodeMapper = instanceAtlasNodeMapper; this.minstanceExecMapper = instanceExecMapper; } @Override public InstanceEventMapper getInstanceEventMapper() { return this.minstanceEventMapper; } @Override public InstanceAtlasAdjacentMapper getInstanceAtlasAdjacentMapper() { return this.minstanceAtlasAdjacentMapper; } @Override public InstanceAtlasNodeMapper getInstanceAtlasNodeMapper() { return this.minstanceAtlasNodeMapper; } @Override public InstanceExecMapper getInstanceExecMapper() { return this.minstanceExecMapper; } } ================================================ FILE: Odin/odin-system/pom.xml ================================================ odin com.walnut.odin 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.walnut.odin odin-system 2.5.1 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile com.pinecone.ulf ulfhedinn 1.2.1 compile com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.pinecone.hydra.kernel hydra-message-control 2.1.0 compile com.pinecone.slime.jelly jelly 2.1.0 compile com.walnut.odin odin-architecture 2.5.1 compile com.walnut.odin odin-framework-atlas 2.5.1 compile com.walnut.odin odin-mapper-driver 2.5.1 compile ================================================ FILE: Odin/odin-system/src/main/java/com/walnut/odin/system/Odin.java ================================================ package com.walnut.odin.system; import com.pinecone.framework.util.config.PatriarchalConfig; import com.pinecone.framework.util.io.Tracer; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.MapStructure; import com.pinecone.hydra.layer.ibatis.hydranium.LayerMappingDriver; import com.pinecone.hydra.proc.ProcessManager; import com.pinecone.hydra.proc.ProcessManagerSystema; import com.pinecone.hydra.system.ArchModularizedSubsystem; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.system.component.LogStatuses; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.umc.msg.MessageNode; import com.pinecone.hydra.umc.wolf.server.UlfServer; import com.pinecone.hydra.unit.vgraph.layer.LayerInstrument; import com.pinecone.hydra.unit.vgraph.layer.VLayerInstrument; import com.pinecone.hydra.unit.vgraph.source.AtlasMappingDriver; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.pinecone.tritium.system.TritiumSystem; import com.walnut.odin.atlas.graph.RuntimeAtlasInstrument; import com.walnut.odin.atlas.graph.UniformRuntimeAtlas; import com.walnut.odin.atlas.mapper.OdinAtlasMappingDriver; import com.walnut.odin.conduct.CollectiveTaskRegiment; import com.walnut.odin.conduct.RavenCollectiveTaskRegiment; import com.walnut.odin.conduct.schedule.RavenTaskScheduler; import com.walnut.odin.conduct.schedule.UniformTaskScheduler; import com.walnut.odin.proc.server.RavenRemoteProcessManagerServer; import com.walnut.odin.proc.server.RemoteProcessManagerServer; import com.walnut.odin.task.CentralizedTaskInstrument; import com.walnut.odin.task.GenericRavenTaskConfig; import com.walnut.odin.task.RavenTaskInstrument; import com.walnut.odin.task.mapper.OdinUniformTaskMappingDriver; public class Odin extends ArchModularizedSubsystem implements TaskCentralControl { private CollectiveTaskRegiment mTaskRegiment; private LayerInstrument mLayerInstrument; private RuntimeAtlasInstrument mAtlasInstrument; private UniformTaskScheduler mTaskScheduler; @MapStructure("metaDependent.atlasDatabase") private String mszAtlasDatabaseKey; @MapStructure("metaDependent.taskInstrument") private String mszTaskInstrumentKey;; @MapStructure("metaDependent.controlRPCDriver") private String mszControlRPCDriverKey; @MapStructure("metaDependent.processManager") private String mszProcessManagerKey; public Odin( Hydrogen primarySystem, String name, PatriarchalConfig config ) { super( primarySystem, name, config ); TritiumSystem sys = (TritiumSystem) this.parentSystem(); sys.getPrimaryConfigScope().autoInject( Odin.class, config, this ); } @Override protected void traceWelcomeInfo() { Tracer console = this.mPrimarySystem.console(); console.getOut().print( "---------------------------------------------------------------\n" ); console.getOut().print( "\u001B[31mBean Nuts Acorn Odin\u001B[0m\n" ); console.getOut().print( "\u001B[31mMassive Task Orchestration System \u001B[0m\n" ); console.getOut().print( "\u001B[32mCopyright(C) 2008-2028 Bean Nuts Foundation. All rights reserved.\u001B[0m\n" ); console.getOut().print( "---------------------------------------------------------------\n" ); } protected void init() { this.getLogger().info( " >>> System Booting..." ); this.infoLifecycle( " Domain Subsystem Initialization", LogStatuses.StatusStart ); this.traceWelcomeInfo(); this.prepare_system_skeleton(); this.infoLifecycle( " Welcome to the Odin task central control!", LogStatuses.StatusReady ); this.infoLifecycle( " Domain Subsystem Initialization", LogStatuses.StatusReady ); } protected void prepare_instrumentation() { this.infoLifecycle( " Constructing components `Instrumentation`.", LogStatuses.StatusStart ); TritiumSystem sys = (TritiumSystem) this.parentSystem(); KOIMappingDriver layerMappingDriver = new LayerMappingDriver( sys, (IbatisClient) sys.getMiddlewareDirector().getRDBManager().getRDBClientByName( this.mszAtlasDatabaseKey ), sys.getDispenserCenter() ); AtlasMappingDriver atlasMappingDriver = new OdinAtlasMappingDriver( sys, (IbatisClient) sys.getMiddlewareDirector().getRDBManager().getRDBClientByName( this.mszAtlasDatabaseKey ), sys.getDispenserCenter() ); KOIMappingDriver taskDriver = new OdinUniformTaskMappingDriver( sys, (IbatisClient) sys.getMiddlewareDirector().getRDBManager().getRDBClientByName( this.mszTaskInstrumentKey ), sys.getDispenserCenter() ); CentralizedTaskInstrument taskInstrument = new RavenTaskInstrument( taskDriver, new GenericRavenTaskConfig( (JSONObject) this.mSubsystemConfig ) ); this.infoLifecycle( " Constructing component `TaskInstrument`.", LogStatuses.StatusDone ); this.mLayerInstrument = new VLayerInstrument( layerMappingDriver ); this.mAtlasInstrument = new UniformRuntimeAtlas( atlasMappingDriver, taskInstrument, this.mLayerInstrument ); this.infoLifecycle( " Constructing component `AtlasInstrument`.", LogStatuses.StatusDone ); MessageNode messageNode = sys.getMiddlewareDirector().getMessagersManager().getMessageNodeByName( this.mszControlRPCDriverKey ); if ( messageNode == null ) { messageNode = (MessageNode) sys.getDispenserCenter().getInstanceDispenser().getRegisteredInstance( this.mszControlRPCDriverKey ); } UlfServer rpcServer = (UlfServer) messageNode; if ( rpcServer != null ) { ProcessManager pm = (ProcessManager) sys.getDispenserCenter().getInstanceDispenser().getRegisteredInstance( this.mszProcessManagerKey ); RemoteProcessManagerServer server = new RavenRemoteProcessManagerServer( pm, rpcServer ); this.mTaskRegiment = new RavenCollectiveTaskRegiment( (ProcessManagerSystema) sys, taskInstrument, server ); } this.infoLifecycle( " Constructing component `TaskRegiment`.", LogStatuses.StatusDone ); this.infoLifecycle( " Constructing components `Instrumentation`.", LogStatuses.StatusDone ); } protected void prepare_scheduler() { this.infoLifecycle( " Constructing component `TaskScheduler`.", LogStatuses.StatusStart ); this.mTaskScheduler = new RavenTaskScheduler( this.mTaskRegiment.taskInstrument(), this.mAtlasInstrument, this.mTaskRegiment.taskDispatcher() ); this.infoLifecycle( " Constructing component `TaskScheduler`.", LogStatuses.StatusDone ); } protected void prepare_system_skeleton() { this.infoLifecycle( " Preparing system skeleton.", LogStatuses.StatusStart ); this.prepare_instrumentation(); this.prepare_scheduler(); this.infoLifecycle( " Preparing system skeleton.", LogStatuses.StatusDone ); } @Override public void vitalize() { this.init(); } @Override public void terminate() { } public LayerInstrument layerInstrument() { return this.mLayerInstrument; } public RuntimeAtlasInstrument atlasInstrument() { return this.mAtlasInstrument; } public CollectiveTaskRegiment taskRegiment() { return this.mTaskRegiment; } public UniformTaskScheduler taskScheduler() { return this.mTaskScheduler; } } ================================================ FILE: Odin/pom.xml ================================================ sauron com.sauron 1.2.7 4.0.0 com.walnut.odin odin pom 2.5.1 odin-architecture odin-framework-atlas odin-framework-runtime odin-framework-conduct odin-mapper-driver odin-system ================================================ FILE: Pinecones/Jelly/pom.xml ================================================ pinecones com.pinecones 2.5.1 org.apache.maven.plugins maven-compiler-plugin 9 9 4.0.0 com.pinecone.slime.jelly jelly 2.1.0 com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.pinecone.slime slime 2.1.0 compile org.mybatis mybatis 3.5.9 org.mybatis mybatis-spring 2.0.6 mysql mysql-connector-java 8.0.26 org.slf4j slf4j-api 1.7.30 redis.clients jedis 3.3.0 net.spy spymemcached 2.12.3 ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/map/SS.java ================================================ package com.pinecone.slime.jelly.map; public class SS { } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/NamespacedKey.java ================================================ package com.pinecone.slime.jelly.source; import com.pinecone.slime.source.indexable.IndexableTargetScopeMeta; public final class NamespacedKey { public static String getFullKey( IndexableTargetScopeMeta meta, String szNameSeparator, String szNamespace, Object key ) { String ns = null; if ( szNamespace != null ) { ns = szNamespace; } else if ( meta.getIndexKey() != null ) { // Index as namespace ns = meta.getIndexKey(); } if( ns != null && !ns.isEmpty() ) { return ns + szNameSeparator + key.toString(); } return key.toString(); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/CannotAcquireLockException.java ================================================ package com.pinecone.slime.jelly.source.ds.dao; public class CannotAcquireLockException extends PessimisticLockingFailureException { public CannotAcquireLockException( String msg ) { super(msg); } public CannotAcquireLockException( String msg, Throwable cause ) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/CannotSerializeTransactionException.java ================================================ package com.pinecone.slime.jelly.source.ds.dao; public class CannotSerializeTransactionException extends PessimisticLockingFailureException { public CannotSerializeTransactionException( String msg ) { super(msg); } public CannotSerializeTransactionException( String msg, Throwable cause ) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/ConcurrencyFailureException.java ================================================ package com.pinecone.slime.jelly.source.ds.dao; import com.pinecone.framework.system.Nullable; public class ConcurrencyFailureException extends TransientDataAccessException { public ConcurrencyFailureException( String msg ) { super(msg); } public ConcurrencyFailureException( String msg, @Nullable Throwable cause ) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/DataAccessException.java ================================================ package com.pinecone.slime.jelly.source.ds.dao; import com.pinecone.framework.system.NestedRuntimeException; import com.pinecone.framework.system.Nullable; public abstract class DataAccessException extends NestedRuntimeException { public DataAccessException( String msg ) { super(msg); } public DataAccessException( @Nullable String msg, @Nullable Throwable cause ) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/DataAccessResourceFailureException.java ================================================ package com.pinecone.slime.jelly.source.ds.dao; import com.pinecone.framework.system.Nullable; public class DataAccessResourceFailureException extends NonTransientDataAccessResourceException { public DataAccessResourceFailureException(String msg) { super(msg); } public DataAccessResourceFailureException(String msg, @Nullable Throwable cause) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/DataIntegrityViolationException.java ================================================ package com.pinecone.slime.jelly.source.ds.dao; public class DataIntegrityViolationException extends NonTransientDataAccessException { public DataIntegrityViolationException( String msg ) { super(msg); } public DataIntegrityViolationException( String msg, Throwable cause ) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/DeadlockLoserDataAccessException.java ================================================ package com.pinecone.slime.jelly.source.ds.dao; public class DeadlockLoserDataAccessException extends PessimisticLockingFailureException { public DeadlockLoserDataAccessException( String msg, Throwable cause ) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/DuplicateKeyException.java ================================================ package com.pinecone.slime.jelly.source.ds.dao; public class DuplicateKeyException extends DataIntegrityViolationException { public DuplicateKeyException( String msg ) { super(msg); } public DuplicateKeyException( String msg, Throwable cause ) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/InvalidDataAccessApiUsageException.java ================================================ package com.pinecone.slime.jelly.source.ds.dao; public class InvalidDataAccessApiUsageException extends NonTransientDataAccessException { public InvalidDataAccessApiUsageException(String msg) { super(msg); } public InvalidDataAccessApiUsageException(String msg, Throwable cause) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/InvalidDataAccessResourceUsageException.java ================================================ package com.pinecone.slime.jelly.source.ds.dao; public class InvalidDataAccessResourceUsageException extends NonTransientDataAccessException { public InvalidDataAccessResourceUsageException( String msg ) { super(msg); } public InvalidDataAccessResourceUsageException( String msg, Throwable cause ) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/NonTransientDataAccessException.java ================================================ package com.pinecone.slime.jelly.source.ds.dao; import com.pinecone.framework.system.Nullable; public abstract class NonTransientDataAccessException extends DataAccessException { public NonTransientDataAccessException( String msg ) { super(msg); } public NonTransientDataAccessException( @Nullable String msg, @Nullable Throwable cause ) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/NonTransientDataAccessResourceException.java ================================================ package com.pinecone.slime.jelly.source.ds.dao; import com.pinecone.framework.system.Nullable; public class NonTransientDataAccessResourceException extends NonTransientDataAccessException { public NonTransientDataAccessResourceException( String msg ) { super(msg); } public NonTransientDataAccessResourceException( String msg, @Nullable Throwable cause ) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/PermissionDeniedDataAccessException.java ================================================ package com.pinecone.slime.jelly.source.ds.dao; public class PermissionDeniedDataAccessException extends NonTransientDataAccessException { public PermissionDeniedDataAccessException( String msg, Throwable cause ) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/PersistenceExceptionTranslator.java ================================================ package com.pinecone.slime.jelly.source.ds.dao; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.prototype.Pinenut; @FunctionalInterface public interface PersistenceExceptionTranslator extends Pinenut { @Nullable DataAccessException translateExceptionIfPossible(RuntimeException e ); } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/PessimisticLockingFailureException.java ================================================ package com.pinecone.slime.jelly.source.ds.dao; public class PessimisticLockingFailureException extends ConcurrencyFailureException { public PessimisticLockingFailureException( String msg ) { super(msg); } public PessimisticLockingFailureException( String msg, Throwable cause ) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/QueryTimeoutException.java ================================================ package com.pinecone.slime.jelly.source.ds.dao; public class QueryTimeoutException extends TransientDataAccessException { public QueryTimeoutException( String msg ) { super(msg); } public QueryTimeoutException( String msg, Throwable cause ) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/RecoverableDataAccessException.java ================================================ package com.pinecone.slime.jelly.source.ds.dao; public class RecoverableDataAccessException extends DataAccessException { public RecoverableDataAccessException( String msg ) { super(msg); } public RecoverableDataAccessException( String msg, Throwable cause ) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/TransientDataAccessException.java ================================================ package com.pinecone.slime.jelly.source.ds.dao; import com.pinecone.framework.system.Nullable; public abstract class TransientDataAccessException extends DataAccessException { public TransientDataAccessException( String msg ) { super(msg); } public TransientDataAccessException( String msg, @Nullable Throwable cause ) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/TransientDataAccessResourceException.java ================================================ package com.pinecone.slime.jelly.source.ds.dao; public class TransientDataAccessResourceException extends TransientDataAccessException { public TransientDataAccessResourceException( String msg ) { super(msg); } public TransientDataAccessResourceException( String msg, Throwable cause ) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/AbstractFallbackSQLExceptionTranslator.java ================================================ package com.pinecone.slime.jelly.source.ds.jdbc; import java.sql.SQLException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.Assert; import com.pinecone.slime.jelly.source.ds.dao.DataAccessException; public abstract class AbstractFallbackSQLExceptionTranslator implements SQLExceptionTranslator { protected final Log logger = LogFactory.getLog(this.getClass()); @Nullable private SQLExceptionTranslator fallbackTranslator; public AbstractFallbackSQLExceptionTranslator() { } public void setFallbackTranslator(@Nullable SQLExceptionTranslator fallback) { this.fallbackTranslator = fallback; } @Nullable public SQLExceptionTranslator getFallbackTranslator() { return this.fallbackTranslator; } @Nullable @Override public DataAccessException translate(String task, @Nullable String sql, SQLException ex) { Assert.notNull(ex, "Cannot translate a null SQLException"); DataAccessException dae = this.doTranslate(task, sql, ex); if (dae != null) { return dae; } else { SQLExceptionTranslator fallback = this.getFallbackTranslator(); return fallback != null ? fallback.translate(task, sql, ex) : null; } } @Nullable protected abstract DataAccessException doTranslate(String var1, @Nullable String var2, SQLException var3); protected String buildMessage(String task, @Nullable String sql, SQLException ex) { return task + "; " + (sql != null ? "SQL [" + sql + "]; " : "") + ex.getMessage(); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/BadSqlGrammarException.java ================================================ package com.pinecone.slime.jelly.source.ds.jdbc; import java.sql.SQLException; import com.pinecone.slime.jelly.source.ds.dao.InvalidDataAccessResourceUsageException; public class BadSqlGrammarException extends InvalidDataAccessResourceUsageException { private final String sql; public BadSqlGrammarException( String task, String sql, SQLException ex ) { super( task + "; bad SQL grammar [" + sql + "]", ex ); this.sql = sql; } public SQLException getSQLException() { return (SQLException)this.getCause(); } public String getSql() { return this.sql; } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/CustomSQLErrorCodesTranslation.java ================================================ package com.pinecone.slime.jelly.source.ds.jdbc; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.StringUtils; import com.pinecone.slime.jelly.source.ds.dao.DataAccessException; public class CustomSQLErrorCodesTranslation { private String[] errorCodes = new String[0]; @Nullable private Class exceptionClass; public CustomSQLErrorCodesTranslation() { } public void setErrorCodes(String... errorCodes) { this.errorCodes = StringUtils.sortStringArray(errorCodes); } public String[] getErrorCodes() { return this.errorCodes; } public void setExceptionClass(@Nullable Class exceptionClass) { if ( exceptionClass != null && !DataAccessException.class.isAssignableFrom(exceptionClass) ) { throw new IllegalArgumentException("Invalid exception class [" + exceptionClass + "]: needs to be a subclass of [org.springframework.dao.DataAccessException]"); } else { this.exceptionClass = exceptionClass; } } @Nullable public Class getExceptionClass() { return this.exceptionClass; } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/DatabaseMetaDataCallback.java ================================================ package com.pinecone.slime.jelly.source.ds.jdbc; import java.sql.DatabaseMetaData; import java.sql.SQLException; import com.pinecone.framework.system.prototype.Pinenut; @FunctionalInterface public interface DatabaseMetaDataCallback extends Pinenut { T processMetaData( DatabaseMetaData metaData ) throws SQLException, MetaDataAccessException; } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/InvalidResultSetAccessException.java ================================================ package com.pinecone.slime.jelly.source.ds.jdbc; import java.sql.SQLException; import com.pinecone.framework.system.Nullable; import com.pinecone.slime.jelly.source.ds.dao.InvalidDataAccessResourceUsageException; public class InvalidResultSetAccessException extends InvalidDataAccessResourceUsageException { @Nullable private final String sql; public InvalidResultSetAccessException(String task, String sql, SQLException ex) { super( task + "; invalid ResultSet access for SQL [" + sql + "]", ex ); this.sql = sql; } public InvalidResultSetAccessException(SQLException ex) { super( ex.getMessage(), ex ); this.sql = null; } public SQLException getSQLException() { return (SQLException)this.getCause(); } @Nullable public String getSql() { return this.sql; } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/MetaDataAccessException.java ================================================ package com.pinecone.slime.jelly.source.ds.jdbc; import com.pinecone.framework.system.NestedCheckedException; public class MetaDataAccessException extends NestedCheckedException { public MetaDataAccessException( String msg ) { super(msg); } public MetaDataAccessException( String msg, Throwable cause ) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/SQLErrorCodeSQLExceptionTranslator.java ================================================ package com.pinecone.slime.jelly.source.ds.jdbc; import java.lang.reflect.Constructor; import java.sql.BatchUpdateException; import java.sql.SQLException; import java.util.Arrays; import javax.sql.DataSource; import com.pinecone.framework.unit.SingletonSupplier; import com.pinecone.framework.util.SupplierUtils; import com.pinecone.slime.jelly.source.ds.dao.CannotAcquireLockException; import com.pinecone.slime.jelly.source.ds.dao.CannotSerializeTransactionException; import com.pinecone.slime.jelly.source.ds.dao.DataAccessException; import com.pinecone.slime.jelly.source.ds.dao.DataAccessResourceFailureException; import com.pinecone.slime.jelly.source.ds.dao.DataIntegrityViolationException; import com.pinecone.slime.jelly.source.ds.dao.DeadlockLoserDataAccessException; import com.pinecone.slime.jelly.source.ds.dao.DuplicateKeyException; import com.pinecone.slime.jelly.source.ds.dao.PermissionDeniedDataAccessException; import com.pinecone.slime.jelly.source.ds.dao.TransientDataAccessResourceException; import com.pinecone.framework.system.Nullable; public class SQLErrorCodeSQLExceptionTranslator extends AbstractFallbackSQLExceptionTranslator { private static final int MESSAGE_ONLY_CONSTRUCTOR = 1; private static final int MESSAGE_THROWABLE_CONSTRUCTOR = 2; private static final int MESSAGE_SQLEX_CONSTRUCTOR = 3; private static final int MESSAGE_SQL_THROWABLE_CONSTRUCTOR = 4; private static final int MESSAGE_SQL_SQLEX_CONSTRUCTOR = 5; @Nullable private SingletonSupplier sqlErrorCodes; public SQLErrorCodeSQLExceptionTranslator() { this.setFallbackTranslator(new SQLExceptionSubclassTranslator()); } public SQLErrorCodeSQLExceptionTranslator( DataSource dataSource) { this(); this.setDataSource(dataSource); } public SQLErrorCodeSQLExceptionTranslator(String dbName) { this(); this.setDatabaseProductName(dbName); } public SQLErrorCodeSQLExceptionTranslator(SQLErrorCodes sec) { this(); this.sqlErrorCodes = SingletonSupplier.of(sec); } public void setDataSource(DataSource dataSource) { this.sqlErrorCodes = SingletonSupplier.of(() -> { return SQLErrorCodesFactory.getInstance().resolveErrorCodes(dataSource); }); this.sqlErrorCodes.get(); } public void setDatabaseProductName(String dbName) { this.sqlErrorCodes = SingletonSupplier.of(SQLErrorCodesFactory.getInstance().getErrorCodes(dbName)); } public void setSqlErrorCodes(@Nullable SQLErrorCodes sec) { this.sqlErrorCodes = SingletonSupplier.ofNullable(sec); } @Nullable public SQLErrorCodes getSqlErrorCodes() { return (SQLErrorCodes) SupplierUtils.resolve(this.sqlErrorCodes); } @Nullable protected DataAccessException doTranslate(String task, @Nullable String sql, SQLException ex) { SQLException sqlEx = ex; if (ex instanceof BatchUpdateException && ex.getNextException() != null) { SQLException nestedSqlEx = ex.getNextException(); if (nestedSqlEx.getErrorCode() > 0 || nestedSqlEx.getSQLState() != null) { sqlEx = nestedSqlEx; } } DataAccessException dae = this.customTranslate(task, sql, sqlEx); if (dae != null) { return dae; } else { SQLErrorCodes sqlErrorCodes = this.getSqlErrorCodes(); if (sqlErrorCodes != null) { SQLExceptionTranslator customTranslator = sqlErrorCodes.getCustomSqlExceptionTranslator(); if (customTranslator != null) { DataAccessException customDex = customTranslator.translate(task, sql, sqlEx); if (customDex != null) { return customDex; } } } String errorCode; if (sqlErrorCodes != null) { if (sqlErrorCodes.isUseSqlStateForTranslation()) { errorCode = sqlEx.getSQLState(); } else { SQLException current; for(current = sqlEx; current.getErrorCode() == 0 && current.getCause() instanceof SQLException; current = (SQLException)current.getCause()) { } errorCode = Integer.toString(current.getErrorCode()); } if (errorCode != null) { CustomSQLErrorCodesTranslation[] customTranslations = sqlErrorCodes.getCustomTranslations(); if (customTranslations != null) { CustomSQLErrorCodesTranslation[] var9 = customTranslations; int var10 = customTranslations.length; for(int var11 = 0; var11 < var10; ++var11) { CustomSQLErrorCodesTranslation customTranslation = var9[var11]; if (Arrays.binarySearch(customTranslation.getErrorCodes(), errorCode) >= 0 && customTranslation.getExceptionClass() != null) { DataAccessException customException = this.createCustomException(task, sql, sqlEx, customTranslation.getExceptionClass()); if (customException != null) { this.logTranslation(task, sql, sqlEx, true); return customException; } } } } if (Arrays.binarySearch(sqlErrorCodes.getBadSqlGrammarCodes(), errorCode) >= 0) { this.logTranslation(task, sql, sqlEx, false); return new BadSqlGrammarException(task, sql != null ? sql : "", sqlEx); } if (Arrays.binarySearch(sqlErrorCodes.getInvalidResultSetAccessCodes(), errorCode) >= 0) { this.logTranslation(task, sql, sqlEx, false); return new InvalidResultSetAccessException(task, sql != null ? sql : "", sqlEx); } if (Arrays.binarySearch(sqlErrorCodes.getDuplicateKeyCodes(), errorCode) >= 0) { this.logTranslation(task, sql, sqlEx, false); return new DuplicateKeyException(this.buildMessage(task, sql, sqlEx), sqlEx); } if (Arrays.binarySearch(sqlErrorCodes.getDataIntegrityViolationCodes(), errorCode) >= 0) { this.logTranslation(task, sql, sqlEx, false); return new DataIntegrityViolationException(this.buildMessage(task, sql, sqlEx), sqlEx); } if (Arrays.binarySearch(sqlErrorCodes.getPermissionDeniedCodes(), errorCode) >= 0) { this.logTranslation(task, sql, sqlEx, false); return new PermissionDeniedDataAccessException(this.buildMessage(task, sql, sqlEx), sqlEx); } if (Arrays.binarySearch(sqlErrorCodes.getDataAccessResourceFailureCodes(), errorCode) >= 0) { this.logTranslation(task, sql, sqlEx, false); return new DataAccessResourceFailureException(this.buildMessage(task, sql, sqlEx), sqlEx); } if (Arrays.binarySearch(sqlErrorCodes.getTransientDataAccessResourceCodes(), errorCode) >= 0) { this.logTranslation(task, sql, sqlEx, false); return new TransientDataAccessResourceException(this.buildMessage(task, sql, sqlEx), sqlEx); } if (Arrays.binarySearch(sqlErrorCodes.getCannotAcquireLockCodes(), errorCode) >= 0) { this.logTranslation(task, sql, sqlEx, false); return new CannotAcquireLockException(this.buildMessage(task, sql, sqlEx), sqlEx); } if (Arrays.binarySearch(sqlErrorCodes.getDeadlockLoserCodes(), errorCode) >= 0) { this.logTranslation(task, sql, sqlEx, false); return new DeadlockLoserDataAccessException(this.buildMessage(task, sql, sqlEx), sqlEx); } if (Arrays.binarySearch(sqlErrorCodes.getCannotSerializeTransactionCodes(), errorCode) >= 0) { this.logTranslation(task, sql, sqlEx, false); return new CannotSerializeTransactionException(this.buildMessage(task, sql, sqlEx), sqlEx); } } } if (this.logger.isDebugEnabled()) { if (sqlErrorCodes != null && sqlErrorCodes.isUseSqlStateForTranslation()) { errorCode = "SQL state '" + sqlEx.getSQLState() + "', error code '" + sqlEx.getErrorCode(); } else { errorCode = "Error code '" + sqlEx.getErrorCode() + "'"; } this.logger.debug("Unable to translate SQLException with " + errorCode + ", will now try the fallback translator"); } return null; } } @Nullable protected DataAccessException customTranslate(String task, @Nullable String sql, SQLException sqlEx) { return null; } @Nullable protected DataAccessException createCustomException(String task, @Nullable String sql, SQLException sqlEx, Class exceptionClass) { try { int constructorType = 0; Constructor[] constructors = exceptionClass.getConstructors(); Constructor[] var7 = constructors; int var8 = constructors.length; for(int var9 = 0; var9 < var8; ++var9) { Constructor constructor = var7[var9]; Class[] parameterTypes = constructor.getParameterTypes(); if (parameterTypes.length == 1 && String.class == parameterTypes[0] && constructorType < 1) { constructorType = 1; } if (parameterTypes.length == 2 && String.class == parameterTypes[0] && Throwable.class == parameterTypes[1] && constructorType < 2) { constructorType = 2; } if (parameterTypes.length == 2 && String.class == parameterTypes[0] && SQLException.class == parameterTypes[1] && constructorType < 3) { constructorType = 3; } if (parameterTypes.length == 3 && String.class == parameterTypes[0] && String.class == parameterTypes[1] && Throwable.class == parameterTypes[2] && constructorType < 4) { constructorType = 4; } if (parameterTypes.length == 3 && String.class == parameterTypes[0] && String.class == parameterTypes[1] && SQLException.class == parameterTypes[2] && constructorType < 5) { constructorType = 5; } } Constructor exceptionConstructor; switch(constructorType) { case 1: Class[] messageOnlyArgsClass = new Class[]{String.class}; Object[] messageOnlyArgs = new Object[]{task + ": " + sqlEx.getMessage()}; exceptionConstructor = exceptionClass.getConstructor(messageOnlyArgsClass); return (DataAccessException)exceptionConstructor.newInstance(messageOnlyArgs); case 2: Class[] messageAndThrowableArgsClass = new Class[]{String.class, Throwable.class}; Object[] messageAndThrowableArgs = new Object[]{task + ": " + sqlEx.getMessage(), sqlEx}; exceptionConstructor = exceptionClass.getConstructor(messageAndThrowableArgsClass); return (DataAccessException)exceptionConstructor.newInstance(messageAndThrowableArgs); case 3: Class[] messageAndSqlExArgsClass = new Class[]{String.class, SQLException.class}; Object[] messageAndSqlExArgs = new Object[]{task + ": " + sqlEx.getMessage(), sqlEx}; exceptionConstructor = exceptionClass.getConstructor(messageAndSqlExArgsClass); return (DataAccessException)exceptionConstructor.newInstance(messageAndSqlExArgs); case 4: Class[] messageAndSqlAndThrowableArgsClass = new Class[]{String.class, String.class, Throwable.class}; Object[] messageAndSqlAndThrowableArgs = new Object[]{task, sql, sqlEx}; exceptionConstructor = exceptionClass.getConstructor(messageAndSqlAndThrowableArgsClass); return (DataAccessException)exceptionConstructor.newInstance(messageAndSqlAndThrowableArgs); case 5: Class[] messageAndSqlAndSqlExArgsClass = new Class[]{String.class, String.class, SQLException.class}; Object[] messageAndSqlAndSqlExArgs = new Object[]{task, sql, sqlEx}; exceptionConstructor = exceptionClass.getConstructor(messageAndSqlAndSqlExArgsClass); return (DataAccessException)exceptionConstructor.newInstance(messageAndSqlAndSqlExArgs); default: if (this.logger.isWarnEnabled()) { this.logger.warn("Unable to find appropriate constructor of custom exception class [" + exceptionClass.getName() + "]"); } return null; } } catch ( Throwable e ) { if (this.logger.isWarnEnabled()) { this.logger.warn("Unable to instantiate custom exception class [" + exceptionClass.getName() + "]", e); } return null; } } private void logTranslation(String task, @Nullable String sql, SQLException sqlEx, boolean custom) { if (this.logger.isDebugEnabled()) { String intro = custom ? "Custom translation of" : "Translating"; this.logger.debug(intro + " SQLException with SQL state '" + sqlEx.getSQLState() + "', error code '" + sqlEx.getErrorCode() + "', message [" + sqlEx.getMessage() + "]" + (sql != null ? "; SQL was [" + sql + "]" : "") + " for task [" + task + "]"); } } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/SQLErrorCodes.java ================================================ package com.pinecone.slime.jelly.source.ds.jdbc; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.ReflectionUtils; public class SQLErrorCodes implements Pinenut { @Nullable private String[] databaseProductNames; private boolean useSqlStateForTranslation = false; private String[] badSqlGrammarCodes = new String[0]; private String[] invalidResultSetAccessCodes = new String[0]; private String[] duplicateKeyCodes = new String[0]; private String[] dataIntegrityViolationCodes = new String[0]; private String[] permissionDeniedCodes = new String[0]; private String[] dataAccessResourceFailureCodes = new String[0]; private String[] transientDataAccessResourceCodes = new String[0]; private String[] cannotAcquireLockCodes = new String[0]; private String[] deadlockLoserCodes = new String[0]; private String[] cannotSerializeTransactionCodes = new String[0]; @Nullable private CustomSQLErrorCodesTranslation[] customTranslations; @Nullable private SQLExceptionTranslator customSqlExceptionTranslator; public SQLErrorCodes() { } public void setDatabaseProductName(@Nullable String databaseProductName) { this.databaseProductNames = new String[]{databaseProductName}; } @Nullable public String getDatabaseProductName() { return this.databaseProductNames != null && this.databaseProductNames.length > 0 ? this.databaseProductNames[0] : null; } public void setDatabaseProductNames(@Nullable String... databaseProductNames) { this.databaseProductNames = databaseProductNames; } @Nullable public String[] getDatabaseProductNames() { return this.databaseProductNames; } public void setUseSqlStateForTranslation(boolean useStateCodeForTranslation) { this.useSqlStateForTranslation = useStateCodeForTranslation; } public boolean isUseSqlStateForTranslation() { return this.useSqlStateForTranslation; } public void setBadSqlGrammarCodes(String... badSqlGrammarCodes) { this.badSqlGrammarCodes = StringUtils.sortStringArray(badSqlGrammarCodes); } public String[] getBadSqlGrammarCodes() { return this.badSqlGrammarCodes; } public void setInvalidResultSetAccessCodes(String... invalidResultSetAccessCodes) { this.invalidResultSetAccessCodes = StringUtils.sortStringArray(invalidResultSetAccessCodes); } public String[] getInvalidResultSetAccessCodes() { return this.invalidResultSetAccessCodes; } public String[] getDuplicateKeyCodes() { return this.duplicateKeyCodes; } public void setDuplicateKeyCodes(String... duplicateKeyCodes) { this.duplicateKeyCodes = duplicateKeyCodes; } public void setDataIntegrityViolationCodes(String... dataIntegrityViolationCodes) { this.dataIntegrityViolationCodes = StringUtils.sortStringArray(dataIntegrityViolationCodes); } public String[] getDataIntegrityViolationCodes() { return this.dataIntegrityViolationCodes; } public void setPermissionDeniedCodes(String... permissionDeniedCodes) { this.permissionDeniedCodes = StringUtils.sortStringArray(permissionDeniedCodes); } public String[] getPermissionDeniedCodes() { return this.permissionDeniedCodes; } public void setDataAccessResourceFailureCodes(String... dataAccessResourceFailureCodes) { this.dataAccessResourceFailureCodes = StringUtils.sortStringArray(dataAccessResourceFailureCodes); } public String[] getDataAccessResourceFailureCodes() { return this.dataAccessResourceFailureCodes; } public void setTransientDataAccessResourceCodes(String... transientDataAccessResourceCodes) { this.transientDataAccessResourceCodes = StringUtils.sortStringArray(transientDataAccessResourceCodes); } public String[] getTransientDataAccessResourceCodes() { return this.transientDataAccessResourceCodes; } public void setCannotAcquireLockCodes(String... cannotAcquireLockCodes) { this.cannotAcquireLockCodes = StringUtils.sortStringArray(cannotAcquireLockCodes); } public String[] getCannotAcquireLockCodes() { return this.cannotAcquireLockCodes; } public void setDeadlockLoserCodes(String... deadlockLoserCodes) { this.deadlockLoserCodes = StringUtils.sortStringArray(deadlockLoserCodes); } public String[] getDeadlockLoserCodes() { return this.deadlockLoserCodes; } public void setCannotSerializeTransactionCodes(String... cannotSerializeTransactionCodes) { this.cannotSerializeTransactionCodes = StringUtils.sortStringArray(cannotSerializeTransactionCodes); } public String[] getCannotSerializeTransactionCodes() { return this.cannotSerializeTransactionCodes; } public void setCustomTranslations(CustomSQLErrorCodesTranslation... customTranslations) { this.customTranslations = customTranslations; } @Nullable public CustomSQLErrorCodesTranslation[] getCustomTranslations() { return this.customTranslations; } public void setCustomSqlExceptionTranslatorClass( @Nullable Class customTranslatorClass ) { if (customTranslatorClass != null) { try { this.customSqlExceptionTranslator = (SQLExceptionTranslator) ReflectionUtils.accessibleConstructor(customTranslatorClass, new Class[0]).newInstance(); } catch (Throwable e) { throw new IllegalStateException("Unable to instantiate custom translator", e); } } else { this.customSqlExceptionTranslator = null; } } public void setCustomSqlExceptionTranslator( @Nullable SQLExceptionTranslator customSqlExceptionTranslator ) { this.customSqlExceptionTranslator = customSqlExceptionTranslator; } @Nullable public SQLExceptionTranslator getCustomSqlExceptionTranslator() { return this.customSqlExceptionTranslator; } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/SQLErrorCodesFactory.java ================================================ package com.pinecone.slime.jelly.source.ds.jdbc; import java.util.Collections; import java.util.Iterator; import java.util.Map; import javax.sql.DataSource; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.unit.ConcurrentReferenceHashMap; import com.pinecone.framework.util.Assert; import com.pinecone.framework.util.PatternMatchUtils; public class SQLErrorCodesFactory implements Pinenut { private static final Log logger = LogFactory.getLog(SQLErrorCodesFactory.class); private static final SQLErrorCodesFactory instance = new SQLErrorCodesFactory(); private final Map errorCodesMap; private final Map dataSourceCache = new ConcurrentReferenceHashMap<>(16); public static SQLErrorCodesFactory getInstance() { return instance; } protected SQLErrorCodesFactory() { Map errorCodes = Collections.emptyMap(); /*try { DefaultListableBeanFactory lbf = new DefaultListableBeanFactory(); lbf.setBeanClassLoader(this.getClass().getClassLoader()); XmlBeanDefinitionReader bdr = new XmlBeanDefinitionReader(lbf); Resource resource = this.loadResource("org/springframework/jdbc/support/sql-error-codes.xml"); if (resource != null && resource.exists()) { bdr.loadBeanDefinitions(resource); } else { logger.info("Default sql-error-codes.xml not found (should be included in spring-jdbc jar)"); } resource = this.loadResource("sql-error-codes.xml"); if (resource != null && resource.exists()) { bdr.loadBeanDefinitions(resource); logger.debug("Found custom sql-error-codes.xml file at the root of the classpath"); } errorCodes = lbf.getBeansOfType(SQLErrorCodes.class, true, false); if (logger.isTraceEnabled()) { logger.trace("SQLErrorCodes loaded: " + errorCodes.keySet()); } } catch (BeansException var5) { logger.warn("Error loading SQL error codes from config file", var5); errorCodes = Collections.emptyMap(); }*/ this.errorCodesMap = errorCodes; } public SQLErrorCodes getErrorCodes(String databaseName) { Assert.notNull(databaseName, "Database product name must not be null"); SQLErrorCodes sec = (SQLErrorCodes)this.errorCodesMap.get(databaseName); if (sec == null) { Iterator iter = this.errorCodesMap.values().iterator(); while(iter.hasNext()) { SQLErrorCodes candidate = (SQLErrorCodes)iter.next(); if ( PatternMatchUtils.simpleMatch(candidate.getDatabaseProductNames(), databaseName) ) { sec = candidate; break; } } } if (sec != null) { //this.checkCustomTranslatorRegistry(databaseName, sec); if (logger.isDebugEnabled()) { logger.debug("SQL error codes for '" + databaseName + "' found"); } return sec; } else { if (logger.isDebugEnabled()) { logger.debug("SQL error codes for '" + databaseName + "' not found"); } return new SQLErrorCodes(); } } public SQLErrorCodes getErrorCodes(DataSource dataSource) { SQLErrorCodes sec = this.resolveErrorCodes(dataSource); return sec != null ? sec : new SQLErrorCodes(); } @Nullable public SQLErrorCodes resolveErrorCodes( DataSource dataSource ) { Assert.notNull(dataSource, "DataSource must not be null"); if (logger.isDebugEnabled()) { logger.debug("Looking up default SQLErrorCodes for DataSource [" + this.identify(dataSource) + "]"); } SQLErrorCodes sec = (SQLErrorCodes)this.dataSourceCache.get(dataSource); if (sec == null) { synchronized(this.dataSourceCache) { sec = (SQLErrorCodes)this.dataSourceCache.get(dataSource); if (sec == null) { // TODO, implement JDBC support. // try { // String name = (String)JdbcUtils.extractDatabaseMetaData(dataSource, DatabaseMetaData::getDatabaseProductName); // if ( StringUtils.hasLength( name ) ) { // return this.registerDatabase(dataSource, name); // } // } // catch ( MetaDataAccessException e ) { // logger.warn("Error while extracting database name", e); // } return null; } } } if (logger.isDebugEnabled()) { logger.debug("SQLErrorCodes found in cache for DataSource [" + this.identify(dataSource) + "]"); } return sec; } public SQLErrorCodes registerDatabase(DataSource dataSource, String databaseName) { SQLErrorCodes sec = this.getErrorCodes(databaseName); if (logger.isDebugEnabled()) { logger.debug("Caching SQL error codes for DataSource [" + this.identify(dataSource) + "]: database product name is '" + databaseName + "'"); } this.dataSourceCache.put(dataSource, sec); return sec; } @Nullable public SQLErrorCodes unregisterDatabase(DataSource dataSource) { return (SQLErrorCodes)this.dataSourceCache.remove(dataSource); } private String identify(DataSource dataSource) { return dataSource.getClass().getName() + '@' + Integer.toHexString(dataSource.hashCode()); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/SQLExceptionSubclassTranslator.java ================================================ package com.pinecone.slime.jelly.source.ds.jdbc; import java.sql.SQLDataException; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.sql.SQLIntegrityConstraintViolationException; import java.sql.SQLInvalidAuthorizationSpecException; import java.sql.SQLNonTransientConnectionException; import java.sql.SQLNonTransientException; import java.sql.SQLRecoverableException; import java.sql.SQLSyntaxErrorException; import java.sql.SQLTimeoutException; import java.sql.SQLTransactionRollbackException; import java.sql.SQLTransientConnectionException; import java.sql.SQLTransientException; import com.pinecone.slime.jelly.source.ds.dao.ConcurrencyFailureException; import com.pinecone.slime.jelly.source.ds.dao.DataAccessException; import com.pinecone.slime.jelly.source.ds.dao.DataAccessResourceFailureException; import com.pinecone.slime.jelly.source.ds.dao.DataIntegrityViolationException; import com.pinecone.slime.jelly.source.ds.dao.InvalidDataAccessApiUsageException; import com.pinecone.slime.jelly.source.ds.dao.PermissionDeniedDataAccessException; import com.pinecone.slime.jelly.source.ds.dao.QueryTimeoutException; import com.pinecone.slime.jelly.source.ds.dao.RecoverableDataAccessException; import com.pinecone.slime.jelly.source.ds.dao.TransientDataAccessResourceException; import com.pinecone.framework.system.Nullable; public class SQLExceptionSubclassTranslator extends AbstractFallbackSQLExceptionTranslator { public SQLExceptionSubclassTranslator() { this.setFallbackTranslator( new SQLStateSQLExceptionTranslator() ); } @Nullable @Override protected DataAccessException doTranslate(String task, @Nullable String sql, SQLException ex) { if (ex instanceof SQLTransientException) { if (ex instanceof SQLTransientConnectionException) { return new TransientDataAccessResourceException(this.buildMessage(task, sql, ex), ex); } if (ex instanceof SQLTransactionRollbackException) { return new ConcurrencyFailureException(this.buildMessage(task, sql, ex), ex); } if (ex instanceof SQLTimeoutException) { return new QueryTimeoutException(this.buildMessage(task, sql, ex), ex); } } else if (ex instanceof SQLNonTransientException) { if (ex instanceof SQLNonTransientConnectionException) { return new DataAccessResourceFailureException(this.buildMessage(task, sql, ex), ex); } if (ex instanceof SQLDataException) { return new DataIntegrityViolationException(this.buildMessage(task, sql, ex), ex); } if (ex instanceof SQLIntegrityConstraintViolationException) { return new DataIntegrityViolationException(this.buildMessage(task, sql, ex), ex); } if (ex instanceof SQLInvalidAuthorizationSpecException) { return new PermissionDeniedDataAccessException(this.buildMessage(task, sql, ex), ex); } if (ex instanceof SQLSyntaxErrorException) { return new BadSqlGrammarException(task, sql != null ? sql : "", ex); } if (ex instanceof SQLFeatureNotSupportedException) { return new InvalidDataAccessApiUsageException(this.buildMessage(task, sql, ex), ex); } } else if (ex instanceof SQLRecoverableException) { return new RecoverableDataAccessException(this.buildMessage(task, sql, ex), ex); } return null; } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/SQLExceptionTranslator.java ================================================ package com.pinecone.slime.jelly.source.ds.jdbc; import java.sql.SQLException; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.slime.jelly.source.ds.dao.DataAccessException; @FunctionalInterface public interface SQLExceptionTranslator extends Pinenut { @Nullable DataAccessException translate(String task, @Nullable String sql, SQLException e ); } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/SQLStateSQLExceptionTranslator.java ================================================ package com.pinecone.slime.jelly.source.ds.jdbc; import java.sql.SQLException; import java.util.HashSet; import java.util.Set; import com.pinecone.framework.system.Nullable; import com.pinecone.slime.jelly.source.ds.dao.ConcurrencyFailureException; import com.pinecone.slime.jelly.source.ds.dao.DataAccessException; import com.pinecone.slime.jelly.source.ds.dao.DataAccessResourceFailureException; import com.pinecone.slime.jelly.source.ds.dao.DataIntegrityViolationException; import com.pinecone.slime.jelly.source.ds.dao.QueryTimeoutException; import com.pinecone.slime.jelly.source.ds.dao.TransientDataAccessResourceException; public class SQLStateSQLExceptionTranslator extends AbstractFallbackSQLExceptionTranslator { private static final Set BAD_SQL_GRAMMAR_CODES = new HashSet<>(8); private static final Set DATA_INTEGRITY_VIOLATION_CODES = new HashSet<>(8); private static final Set DATA_ACCESS_RESOURCE_FAILURE_CODES = new HashSet<>(8); private static final Set TRANSIENT_DATA_ACCESS_RESOURCE_CODES = new HashSet<>(8); private static final Set CONCURRENCY_FAILURE_CODES = new HashSet<>(4); public SQLStateSQLExceptionTranslator() { } @Nullable @Override protected DataAccessException doTranslate(String task, @Nullable String sql, SQLException ex) { String sqlState = this.getSqlState(ex); if (sqlState != null && sqlState.length() >= 2) { String classCode = sqlState.substring(0, 2); if (this.logger.isDebugEnabled()) { this.logger.debug("Extracted SQL state class '" + classCode + "' from value '" + sqlState + "'"); } if (BAD_SQL_GRAMMAR_CODES.contains(classCode)) { return new BadSqlGrammarException(task, sql != null ? sql : "", ex); } if (DATA_INTEGRITY_VIOLATION_CODES.contains(classCode)) { return new DataIntegrityViolationException(this.buildMessage(task, sql, ex), ex); } if (DATA_ACCESS_RESOURCE_FAILURE_CODES.contains(classCode)) { return new DataAccessResourceFailureException(this.buildMessage(task, sql, ex), ex); } if (TRANSIENT_DATA_ACCESS_RESOURCE_CODES.contains(classCode)) { return new TransientDataAccessResourceException(this.buildMessage(task, sql, ex), ex); } if (CONCURRENCY_FAILURE_CODES.contains(classCode)) { return new ConcurrencyFailureException(this.buildMessage(task, sql, ex), ex); } } return ex.getClass().getName().contains("Timeout") ? new QueryTimeoutException(this.buildMessage(task, sql, ex), ex) : null; } @Nullable private String getSqlState(SQLException ex) { String sqlState = ex.getSQLState(); if (sqlState == null) { SQLException nestedEx = ex.getNextException(); if (nestedEx != null) { sqlState = nestedEx.getSQLState(); } } return sqlState; } static { BAD_SQL_GRAMMAR_CODES.add("07"); BAD_SQL_GRAMMAR_CODES.add("21"); BAD_SQL_GRAMMAR_CODES.add("2A"); BAD_SQL_GRAMMAR_CODES.add("37"); BAD_SQL_GRAMMAR_CODES.add("42"); BAD_SQL_GRAMMAR_CODES.add("65"); DATA_INTEGRITY_VIOLATION_CODES.add("01"); DATA_INTEGRITY_VIOLATION_CODES.add("02"); DATA_INTEGRITY_VIOLATION_CODES.add("22"); DATA_INTEGRITY_VIOLATION_CODES.add("23"); DATA_INTEGRITY_VIOLATION_CODES.add("27"); DATA_INTEGRITY_VIOLATION_CODES.add("44"); DATA_ACCESS_RESOURCE_FAILURE_CODES.add("08"); DATA_ACCESS_RESOURCE_FAILURE_CODES.add("53"); DATA_ACCESS_RESOURCE_FAILURE_CODES.add("54"); DATA_ACCESS_RESOURCE_FAILURE_CODES.add("57"); DATA_ACCESS_RESOURCE_FAILURE_CODES.add("58"); TRANSIENT_DATA_ACCESS_RESOURCE_CODES.add("JW"); TRANSIENT_DATA_ACCESS_RESOURCE_CODES.add("JZ"); TRANSIENT_DATA_ACCESS_RESOURCE_CODES.add("S1"); CONCURRENCY_FAILURE_CODES.add("40"); CONCURRENCY_FAILURE_CODES.add("61"); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/TransientDataAccessException.java ================================================ package com.pinecone.slime.jelly.source.ds.jdbc; import com.pinecone.framework.system.Nullable; import com.pinecone.slime.jelly.source.ds.dao.DataAccessException; public abstract class TransientDataAccessException extends DataAccessException { public TransientDataAccessException( String msg ) { super(msg); } public TransientDataAccessException( String msg, @Nullable Throwable cause ) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/TransientDataAccessResourceException.java ================================================ package com.pinecone.slime.jelly.source.ds.jdbc; public class TransientDataAccessResourceException extends TransientDataAccessException { public TransientDataAccessResourceException(String msg) { super(msg); } public TransientDataAccessResourceException(String msg, Throwable cause) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/UncategorizedDataAccessException.java ================================================ package com.pinecone.slime.jelly.source.ds.jdbc; import com.pinecone.framework.system.Nullable; import com.pinecone.slime.jelly.source.ds.dao.NonTransientDataAccessException; public abstract class UncategorizedDataAccessException extends NonTransientDataAccessException { public UncategorizedDataAccessException( @Nullable String msg, @Nullable Throwable cause ) { super( msg, cause ); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/UncategorizedSQLException.java ================================================ package com.pinecone.slime.jelly.source.ds.jdbc; import java.sql.SQLException; import com.pinecone.framework.system.Nullable; public class UncategorizedSQLException extends UncategorizedDataAccessException { @Nullable private final String sql; public UncategorizedSQLException( String task, @Nullable String sql, SQLException ex ) { super(task + "; uncategorized SQLException" + (sql != null ? " for SQL [" + sql + "]" : "") + "; SQL state [" + ex.getSQLState() + "]; error code [" + ex.getErrorCode() + "]; " + ex.getMessage(), ex); this.sql = sql; } public SQLException getSQLException() { return (SQLException)this.getCause(); } @Nullable public String getSql() { return this.sql; } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/PlatformTransactionManager.java ================================================ package com.pinecone.slime.jelly.source.ds.transaction; import com.pinecone.framework.system.Nullable; public interface PlatformTransactionManager extends TransactionManager { TransactionStatus getTransaction( @Nullable TransactionDefinition definition ) throws TransactionException; void commit( TransactionStatus status ) throws TransactionException; void rollback( TransactionStatus status ) throws TransactionException; } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/ResourceHolder.java ================================================ package com.pinecone.slime.jelly.source.ds.transaction; import com.pinecone.framework.system.prototype.Pinenut; public interface ResourceHolder extends Pinenut { void reset(); void unbound(); boolean isVoid(); } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/ResourceHolderSupport.java ================================================ package com.pinecone.slime.jelly.source.ds.transaction; import java.util.Date; import com.pinecone.framework.system.Nullable; public abstract class ResourceHolderSupport implements ResourceHolder { private boolean synchronizedWithTransaction = false; private boolean rollbackOnly = false; @Nullable private Date deadline; private int referenceCount = 0; private boolean isVoid = false; public ResourceHolderSupport() { } public void setSynchronizedWithTransaction(boolean synchronizedWithTransaction) { this.synchronizedWithTransaction = synchronizedWithTransaction; } public boolean isSynchronizedWithTransaction() { return this.synchronizedWithTransaction; } public void setRollbackOnly() { this.rollbackOnly = true; } public void resetRollbackOnly() { this.rollbackOnly = false; } public boolean isRollbackOnly() { return this.rollbackOnly; } public void setTimeoutInSeconds(int seconds) { this.setTimeoutInMillis((long)seconds * 1000L); } public void setTimeoutInMillis(long millis) { this.deadline = new Date(System.currentTimeMillis() + millis); } public boolean hasTimeout() { return this.deadline != null; } @Nullable public Date getDeadline() { return this.deadline; } public int getTimeToLiveInSeconds() { double diff = (double)this.getTimeToLiveInMillis() / 1000.0D; int secs = (int)Math.ceil(diff); this.checkTransactionTimeout(secs <= 0); return secs; } public long getTimeToLiveInMillis() throws TransactionTimedOutException { if (this.deadline == null) { throw new IllegalStateException("No timeout specified for this resource holder"); } else { long timeToLive = this.deadline.getTime() - System.currentTimeMillis(); this.checkTransactionTimeout(timeToLive <= 0L); return timeToLive; } } private void checkTransactionTimeout(boolean deadlineReached) throws TransactionTimedOutException { if (deadlineReached) { this.setRollbackOnly(); throw new TransactionTimedOutException("Transaction timed out: deadline was " + this.deadline); } } public void requested() { ++this.referenceCount; } public void released() { --this.referenceCount; } public boolean isOpen() { return this.referenceCount > 0; } public void clear() { this.synchronizedWithTransaction = false; this.rollbackOnly = false; this.deadline = null; } public void reset() { this.clear(); this.referenceCount = 0; } public void unbound() { this.isVoid = true; } public boolean isVoid() { return this.isVoid; } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/ResourceTransactionManager.java ================================================ package com.pinecone.slime.jelly.source.ds.transaction; public interface ResourceTransactionManager extends PlatformTransactionManager { Object getResourceFactory(); } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/SavepointManager.java ================================================ package com.pinecone.slime.jelly.source.ds.transaction; import com.pinecone.framework.system.prototype.Pinenut; public interface SavepointManager extends Pinenut { Object createSavepoint() throws TransactionException; void rollbackToSavepoint(Object point) throws TransactionException; void releaseSavepoint(Object point) throws TransactionException; } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/StaticTransactionDefinition.java ================================================ package com.pinecone.slime.jelly.source.ds.transaction; final class StaticTransactionDefinition implements TransactionDefinition { static final StaticTransactionDefinition INSTANCE = new StaticTransactionDefinition(); private StaticTransactionDefinition() { } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/TransactionDefinition.java ================================================ package com.pinecone.slime.jelly.source.ds.transaction; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.prototype.Pinenut; public interface TransactionDefinition extends Pinenut { int PROPAGATION_REQUIRED = 0; int PROPAGATION_SUPPORTS = 1; int PROPAGATION_MANDATORY = 2; int PROPAGATION_REQUIRES_NEW = 3; int PROPAGATION_NOT_SUPPORTED = 4; int PROPAGATION_NEVER = 5; int PROPAGATION_NESTED = 6; int ISOLATION_DEFAULT = -1; int ISOLATION_READ_UNCOMMITTED = 1; int ISOLATION_READ_COMMITTED = 2; int ISOLATION_REPEATABLE_READ = 4; int ISOLATION_SERIALIZABLE = 8; int TIMEOUT_DEFAULT = -1; default int getPropagationBehavior() { return 0; } default int getIsolationLevel() { return -1; } default int getTimeout() { return -1; } default boolean isReadOnly() { return false; } @Nullable default String getName() { return null; } static TransactionDefinition withDefaults() { return StaticTransactionDefinition.INSTANCE; } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/TransactionException.java ================================================ package com.pinecone.slime.jelly.source.ds.transaction; import com.pinecone.framework.system.NestedRuntimeException; public abstract class TransactionException extends NestedRuntimeException { public TransactionException( String msg ) { super(msg); } public TransactionException( String msg, Throwable cause ) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/TransactionExecution.java ================================================ package com.pinecone.slime.jelly.source.ds.transaction; import com.pinecone.framework.system.prototype.Pinenut; public interface TransactionExecution extends Pinenut { boolean isNewTransaction(); void setRollbackOnly(); boolean isRollbackOnly(); boolean isCompleted(); } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/TransactionManager.java ================================================ package com.pinecone.slime.jelly.source.ds.transaction; import com.pinecone.framework.system.prototype.Pinenut; public interface TransactionManager extends Pinenut { } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/TransactionStatus.java ================================================ package com.pinecone.slime.jelly.source.ds.transaction; import java.io.Flushable; public interface TransactionStatus extends TransactionExecution, SavepointManager, Flushable { boolean hasSavepoint(); @Override void flush(); } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/TransactionSynchronization.java ================================================ package com.pinecone.slime.jelly.source.ds.transaction; import java.io.Flushable; import com.pinecone.framework.util.comparator.Ordered; public interface TransactionSynchronization extends Ordered, Flushable { int STATUS_COMMITTED = 0; int STATUS_ROLLED_BACK = 1; int STATUS_UNKNOWN = 2; default int getOrder() { return 2147483647; } default void suspend() { } default void resume() { } default void flush() { } default void beforeCommit(boolean readOnly) { } default void beforeCompletion() { } default void afterCommit() { } default void afterCompletion(int status) { } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/TransactionSynchronizationAdapter.java ================================================ package com.pinecone.slime.jelly.source.ds.transaction; import com.pinecone.framework.util.comparator.Ordered; public abstract class TransactionSynchronizationAdapter implements TransactionSynchronization, Ordered { public TransactionSynchronizationAdapter() { } public int getOrder() { return 2147483647; } public void suspend() { } public void resume() { } public void flush() { } public void beforeCommit(boolean readOnly) { } public void beforeCompletion() { } public void afterCommit() { } public void afterCompletion(int status) { } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/TransactionSynchronizationManager.java ================================================ package com.pinecone.slime.jelly.source.ds.transaction; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.Assert; import com.pinecone.framework.util.comparator.OrderComparator; import com.pinecone.framework.lang.NamedThreadLocal; public final class TransactionSynchronizationManager { private static final Log logger = LogFactory.getLog(TransactionSynchronizationManager.class); private static final ThreadLocal > resources = new NamedThreadLocal<>( "Transactional resources" ); private static final ThreadLocal> synchronizations = new NamedThreadLocal<>( "Transaction synchronizations" ); private static final ThreadLocal currentTransactionName = new NamedThreadLocal<>( "Current transaction name" ); private static final ThreadLocal currentTransactionReadOnly = new NamedThreadLocal<>( "Current transaction read-only status" ); private static final ThreadLocal currentTransactionIsolationLevel = new NamedThreadLocal<>( "Current transaction isolation level" ); private static final ThreadLocal actualTransactionActive = new NamedThreadLocal<>( "Actual transaction active" ); public TransactionSynchronizationManager() { } public static Map getResourceMap() { Map map = (Map)resources.get(); return map != null ? Collections.unmodifiableMap(map) : Collections.emptyMap(); } public static boolean hasResource(Object key) { Object actualKey = TransactionSynchronizationUtils.unwrapResourceIfNecessary(key); Object value = doGetResource(actualKey); return value != null; } @Nullable public static Object getResource(Object key) { Object actualKey = TransactionSynchronizationUtils.unwrapResourceIfNecessary(key); Object value = doGetResource(actualKey); if ( value != null && logger.isTraceEnabled() ) { logger.trace("Retrieved value [" + value + "] for key [" + actualKey + "] bound to thread [" + Thread.currentThread().getName() + "]"); } return value; } @Nullable private static Object doGetResource(Object actualKey) { Map map = (Map)resources.get(); if ( map == null ) { return null; } else { Object value = map.get(actualKey); if ( value instanceof ResourceHolder && ((ResourceHolder)value).isVoid() ) { map.remove( actualKey ); if ( map.isEmpty() ) { TransactionSynchronizationManager.resources.remove(); } value = null; } return value; } } public static void bindResource(Object key, Object value) throws IllegalStateException { Object actualKey = TransactionSynchronizationUtils.unwrapResourceIfNecessary(key); Assert.notNull(value, "Value must not be null"); Map map = (Map)TransactionSynchronizationManager.resources.get(); if ( map == null ) { map = new HashMap<>(); TransactionSynchronizationManager.resources.set(map); } Object oldValue = ((Map)map).put(actualKey, value); if ( oldValue instanceof ResourceHolder && ((ResourceHolder)oldValue).isVoid() ) { oldValue = null; } if ( oldValue != null ) { throw new IllegalStateException("Already value [" + oldValue + "] for key [" + actualKey + "] bound to thread [" + Thread.currentThread().getName() + "]"); } else { if (logger.isTraceEnabled()) { logger.trace("Bound value [" + value + "] for key [" + actualKey + "] to thread [" + Thread.currentThread().getName() + "]"); } } } public static Object unbindResource(Object key) throws IllegalStateException { Object actualKey = TransactionSynchronizationUtils.unwrapResourceIfNecessary(key); Object value = doUnbindResource(actualKey); if (value == null) { throw new IllegalStateException("No value for key [" + actualKey + "] bound to thread [" + Thread.currentThread().getName() + "]"); } else { return value; } } @Nullable public static Object unbindResourceIfPossible(Object key) { Object actualKey = TransactionSynchronizationUtils.unwrapResourceIfNecessary(key); return doUnbindResource(actualKey); } @Nullable private static Object doUnbindResource(Object actualKey) { Map map = (Map)TransactionSynchronizationManager.resources.get(); if ( map == null ) { return null; } else { Object value = map.remove(actualKey); if ( map.isEmpty() ) { TransactionSynchronizationManager.resources.remove(); } if ( value instanceof ResourceHolder && ((ResourceHolder)value).isVoid() ) { value = null; } if ( value != null && TransactionSynchronizationManager.logger.isTraceEnabled() ) { TransactionSynchronizationManager.logger.trace("Removed value [" + value + "] for key [" + actualKey + "] from thread [" + Thread.currentThread().getName() + "]"); } return value; } } public static boolean isSynchronizationActive() { return TransactionSynchronizationManager.synchronizations.get() != null; } public static void initSynchronization() throws IllegalStateException { if ( isSynchronizationActive() ) { throw new IllegalStateException("Cannot activate transaction synchronization - already active"); } else { TransactionSynchronizationManager.logger.trace("Initializing transaction synchronization"); TransactionSynchronizationManager.synchronizations.set( new LinkedHashSet<>() ); } } public static void registerSynchronization(TransactionSynchronization synchronization) throws IllegalStateException { Assert.notNull(synchronization, "TransactionSynchronization must not be null"); Set synchs = (Set)TransactionSynchronizationManager.synchronizations.get(); if ( synchs == null ) { throw new IllegalStateException("Transaction synchronization is not active"); } else { synchs.add(synchronization); } } public static List getSynchronizations() throws IllegalStateException { Set synchs = (Set)TransactionSynchronizationManager.synchronizations.get(); if (synchs == null) { throw new IllegalStateException("Transaction synchronization is not active"); } else if (synchs.isEmpty()) { return Collections.emptyList(); } else { List sortedSynchs = new ArrayList<>(synchs); OrderComparator.sort( sortedSynchs ); return Collections.unmodifiableList(sortedSynchs); } } public static void clearSynchronization() throws IllegalStateException { if (!isSynchronizationActive()) { throw new IllegalStateException("Cannot deactivate transaction synchronization - not active"); } else { TransactionSynchronizationManager.logger.trace("Clearing transaction synchronization"); TransactionSynchronizationManager.synchronizations.remove(); } } public static void setCurrentTransactionName( @Nullable String name ) { TransactionSynchronizationManager.currentTransactionName.set(name); } @Nullable public static String getCurrentTransactionName() { return (String)TransactionSynchronizationManager.currentTransactionName.get(); } public static void setCurrentTransactionReadOnly(boolean readOnly) { TransactionSynchronizationManager.currentTransactionReadOnly.set(readOnly ? Boolean.TRUE : null); } public static boolean isCurrentTransactionReadOnly() { return TransactionSynchronizationManager.currentTransactionReadOnly.get() != null; } public static void setCurrentTransactionIsolationLevel( @Nullable Integer isolationLevel ) { TransactionSynchronizationManager.currentTransactionIsolationLevel.set(isolationLevel); } @Nullable public static Integer getCurrentTransactionIsolationLevel() { return (Integer)TransactionSynchronizationManager.currentTransactionIsolationLevel.get(); } public static void setActualTransactionActive(boolean active) { TransactionSynchronizationManager.actualTransactionActive.set(active ? Boolean.TRUE : null); } public static boolean isActualTransactionActive() { return TransactionSynchronizationManager.actualTransactionActive.get() != null; } public static void clear() { TransactionSynchronizationManager.synchronizations.remove(); TransactionSynchronizationManager.currentTransactionName.remove(); TransactionSynchronizationManager.currentTransactionReadOnly.remove(); TransactionSynchronizationManager.currentTransactionIsolationLevel.remove(); TransactionSynchronizationManager.actualTransactionActive.remove(); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/TransactionSynchronizationUtils.java ================================================ package com.pinecone.slime.jelly.source.ds.transaction; import java.util.Iterator; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.Assert; import com.pinecone.framework.util.ClassUtils; import com.pinecone.framework.system.aop.InfrastructureProxy; import com.pinecone.framework.system.aop.ScopedObject; public final class TransactionSynchronizationUtils { private static final Log logger = LogFactory.getLog( TransactionSynchronizationUtils.class ); private static final boolean aopAvailable = ClassUtils.isPresent("com.pinecone.framework.system.aop.ScopedObject", TransactionSynchronizationUtils.class.getClassLoader()); public TransactionSynchronizationUtils() { } public static boolean sameResourceFactory( ResourceTransactionManager tm, Object resourceFactory ) { return unwrapResourceIfNecessary( tm.getResourceFactory()).equals(unwrapResourceIfNecessary(resourceFactory) ); } static Object unwrapResourceIfNecessary( Object resource ) { Assert.notNull( resource, "Resource must not be null" ); Object resourceRef = resource; if ( resource instanceof InfrastructureProxy ) { resourceRef = ((InfrastructureProxy)resource).getWrappedObject(); } if ( TransactionSynchronizationUtils.aopAvailable ) { resourceRef = TransactionSynchronizationUtils.ScopedProxyUnwrapper.unwrapIfNecessary( resourceRef ); } return resourceRef; } public static void triggerFlush() { Iterator iter = TransactionSynchronizationManager.getSynchronizations().iterator(); while( iter.hasNext() ) { TransactionSynchronization synchronization = (TransactionSynchronization)iter.next(); synchronization.flush(); } } public static void triggerBeforeCommit(boolean readOnly) { Iterator iter = TransactionSynchronizationManager.getSynchronizations().iterator(); while( iter.hasNext() ) { TransactionSynchronization synchronization = (TransactionSynchronization)iter.next(); synchronization.beforeCommit(readOnly); } } public static void triggerBeforeCompletion() { Iterator iter = TransactionSynchronizationManager.getSynchronizations().iterator(); while( iter.hasNext() ) { TransactionSynchronization synchronization = (TransactionSynchronization)iter.next(); try { synchronization.beforeCompletion(); } catch ( Throwable e ) { logger.debug( "TransactionSynchronization.beforeCompletion threw exception", e ); } } } public static void triggerAfterCommit() { invokeAfterCommit(TransactionSynchronizationManager.getSynchronizations()); } public static void invokeAfterCommit( @Nullable List synchronizations ) { if ( synchronizations != null ) { Iterator iter = synchronizations.iterator(); while( iter.hasNext() ) { TransactionSynchronization synchronization = (TransactionSynchronization)iter.next(); synchronization.afterCommit(); } } } public static void triggerAfterCompletion( int completionStatus ) { List synchronizations = TransactionSynchronizationManager.getSynchronizations(); TransactionSynchronizationUtils.invokeAfterCompletion(synchronizations, completionStatus); } public static void invokeAfterCompletion( @Nullable List synchronizations, int completionStatus ) { if ( synchronizations != null ) { Iterator iter = synchronizations.iterator(); while( iter.hasNext() ) { TransactionSynchronization synchronization = (TransactionSynchronization)iter.next(); try { synchronization.afterCompletion(completionStatus); } catch ( Throwable e ) { logger.debug("TransactionSynchronization.afterCompletion threw exception", e); } } } } private static class ScopedProxyUnwrapper { private ScopedProxyUnwrapper() { } public static Object unwrapIfNecessary(Object resource) { return resource instanceof ScopedObject ? ((ScopedObject)resource).getTargetObject() : resource; } } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/TransactionTimedOutException.java ================================================ package com.pinecone.slime.jelly.source.ds.transaction; public class TransactionTimedOutException extends TransactionException { public TransactionTimedOutException( String msg ) { super(msg); } public TransactionTimedOutException( String msg, Throwable cause ) { super(msg, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/ArchDynamicQuerierResultHandler.java ================================================ package com.pinecone.slime.jelly.source.ibatis; import com.pinecone.slime.source.GenericResultConverter; import com.pinecone.slime.source.rdb.RDBTargetTableMeta; import com.pinecone.slime.source.ResultConverter; import org.apache.ibatis.session.ResultHandler; import java.util.Map; public abstract class ArchDynamicQuerierResultHandler implements ResultHandler > { protected ResultConverter mConverter ; protected RDBTargetTableMeta mRDBTargetTableMeta; public ArchDynamicQuerierResultHandler( RDBTargetTableMeta meta ) { this.mRDBTargetTableMeta = meta; if( this.mRDBTargetTableMeta.getResultConverter() == null ) { this.mRDBTargetTableMeta.setResultConverter( new GenericResultConverter<>( this.mRDBTargetTableMeta.getValueType(), this.mRDBTargetTableMeta.getValueMetaKeys() ) ); } this.mConverter = this.mRDBTargetTableMeta.getResultConverter(); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/DynamicQuerierEntityResultHandler.java ================================================ package com.pinecone.slime.jelly.source.ibatis; import com.pinecone.slime.source.rdb.RDBTargetTableMeta; import org.apache.ibatis.session.ResultContext; import java.util.ArrayList; import java.util.List; import java.util.Map; public class DynamicQuerierEntityResultHandler extends ArchDynamicQuerierResultHandler { private List mResults = new ArrayList<>(); public DynamicQuerierEntityResultHandler( RDBTargetTableMeta meta ) { super( meta ); } @Override public void handleResult( ResultContext > context ) { Map resultObject = context.getResultObject(); this.mResults.add( this.mConverter.convert( resultObject ) ); } public List getResults() { return this.mResults; } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/DynamicQuerierMappedResultHandler.java ================================================ package com.pinecone.slime.jelly.source.ibatis; import com.pinecone.slime.map.QueryRange; import com.pinecone.slime.source.rdb.RDBTargetTableMeta; import org.apache.ibatis.session.ResultContext; import java.util.LinkedHashMap; import java.util.Map; public class DynamicQuerierMappedResultHandler extends ArchDynamicQuerierResultHandler { private Map mResults = new LinkedHashMap<>(); private QueryRange mRange; public DynamicQuerierMappedResultHandler( RDBTargetTableMeta meta, QueryRange range ) { super( meta ); this.mRange = range; } @Override public void handleResult( ResultContext > context ) { Map resultObject = context.getResultObject(); String szRangeKey = this.mRange.getRangeKey(); Object keyVal = resultObject.get( szRangeKey ); resultObject.remove( szRangeKey ); this.mResults.put( keyVal, this.mConverter.convert( resultObject ) ); } public Map getResults() { return this.mResults; } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/DynamicQuerierSqlBuilder.java ================================================ package com.pinecone.slime.jelly.source.ibatis; import com.pinecone.slime.map.QueryRange; import com.pinecone.slime.source.rdb.RDBTargetTableMeta; import com.pinecone.slime.source.ResultConverter; import org.apache.ibatis.jdbc.SQL; import java.util.Map; import java.util.Set; import java.util.TreeSet; public final class DynamicQuerierSqlBuilder { public static SQL assembleSelectSQL( RDBTargetTableMeta meta, Set keys ) { return new SQL() {{ if ( keys.isEmpty() ) { SELECT("*"); } else { SELECT( String.join( ", ", keys ) ); } FROM( meta.getTableName() ); }}; } public static String selectList ( Map params ) { RDBTargetTableMeta meta = (RDBTargetTableMeta) params.get( "meta" ); Set keys = meta.getValueMetaKeys(); SQL sql = DynamicQuerierSqlBuilder.assembleSelectSQL( meta, keys ); return sql.toString() + "${exSafeSQL}"; } public static String selectObjectByRange( Map params, boolean bWithRangeKey ) { RDBTargetTableMeta meta = (RDBTargetTableMeta) params.get("meta"); QueryRange range = (QueryRange) params.get("range"); Set keys = meta.getValueMetaKeys(); if( range != null && bWithRangeKey ) { keys = new TreeSet<>( keys ); keys.add( range.getRangeKey() ); } SQL sql = DynamicQuerierSqlBuilder.assembleSelectSQL( meta, keys ); if ( range != null ) { sql.WHERE( range.getRangeKey() + " >= #{range.min}" ); sql.WHERE( range.getRangeKey() + " <= #{range.max}" ); } return sql.toString(); } public static String selectListByRange( Map params ) { return DynamicQuerierSqlBuilder.selectObjectByRange( params, false ); } public static String selectMappedByRange( Map params ) { return DynamicQuerierSqlBuilder.selectObjectByRange( params, true ); } public static String selectListByColumn ( Map params ) { RDBTargetTableMeta meta = (RDBTargetTableMeta) params.get( "meta" ); Set keys = meta.getValueMetaKeys(); SQL sql = new SQL() {{ if ( keys.isEmpty() ) { SELECT("*"); } else { SELECT( String.join( ", ", keys ) ); } FROM( meta.getTableName() ); WHERE( "#{columnKey} = #{key}" ); }}; return sql.toString(); } public static String insert ( Map params ) { RDBTargetTableMeta meta = (RDBTargetTableMeta) params.get("meta"); Object key = params.get("key"); Object entity = params.get("entity"); Set keys = meta.getValueMetaKeys(); SQL sql = new SQL() {{ INSERT_INTO( meta.getTableName() ); if ( key != null ) { String szIdxKey = meta.getIndexKey(); VALUES( szIdxKey, "#{key}" ); keys.remove( szIdxKey ); } if( ResultConverter.isPrimitiveOrSpecialType( entity.getClass() ) ) { keys.forEach( k -> VALUES( k, "#{entity}" )); } else { keys.forEach( k -> VALUES( k, "#{entity." + k + "}" )); } }}; return sql.toString(); } public static String updateByEntity ( Map params ) { RDBTargetTableMeta meta = (RDBTargetTableMeta) params.get("meta"); Object entity = params.get("entity"); Set keys = meta.getValueMetaKeys(); Object key = params.get("key"); SQL sql = new SQL() {{ UPDATE(meta.getTableName()); if( ResultConverter.isPrimitiveOrSpecialType( entity.getClass() ) ) { keys.forEach(key -> SET(key + " = #{entity}")); if( key != null ) { WHERE(key + " = #{entity}"); } else { WHERE(meta.getIndexKey() + " = #{entity}"); } } else { keys.forEach(key -> SET(key + " = #{entity." + key + "}")); WHERE(meta.getIndexKey() + " = #{entity." + meta.getIndexKey() + "}"); } }}; return sql.toString(); } public static String deleteByKey ( Map params ) { RDBTargetTableMeta meta = (RDBTargetTableMeta) params.get("meta"); Object key = params.get("key"); SQL sql = new SQL() {{ DELETE_FROM(meta.getTableName()); WHERE(meta.getIndexKey() + " = #{key}"); }}; return sql.toString(); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/GenericMybatisQuerierDataManipulator.java ================================================ package com.pinecone.slime.jelly.source.ibatis; import com.pinecone.framework.unit.LinkedTreeMap; import com.pinecone.framework.util.rdb.SQLStrings; import com.pinecone.slime.map.QueryRange; import com.pinecone.slime.source.GenericResultConverter; import com.pinecone.slime.source.ResultConverter; import com.pinecone.slime.source.rdb.RDBTargetTableMeta; import com.pinecone.slime.source.rdb.RangedRDBQuerierDataManipulator; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.ResultType; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.SelectProvider; import org.apache.ibatis.annotations.InsertProvider; import org.apache.ibatis.annotations.UpdateProvider; import org.apache.ibatis.annotations.DeleteProvider; import org.apache.ibatis.annotations.Update; import org.apache.ibatis.session.ResultHandler; import java.util.ArrayList; import java.util.List; import java.util.Map; public interface GenericMybatisQuerierDataManipulator extends RangedRDBQuerierDataManipulator { @Override @Select( "SELECT COUNT(*) FROM ${meta.tableName} ${exSafeSQL}" ) long counts( @Param("meta") RDBTargetTableMeta meta, @Param("exSafeSQL") String szExSafeSQL ); @Override @Select( "SELECT COUNT(*) FROM ${meta.tableName} WHERE ${keyName} = ${key}" ) long countsByColumn( @Param("meta") RDBTargetTableMeta meta, @Param("keyName") String szSpecificColumnKeyName, @Param("key") Object key ); @Override @Select( "SELECT COUNT(*) FROM ${meta.tableName} WHERE ${range.rangeKey} >= ${range.min} AND ${range.rangeKey} <= ${range.max}" ) long countsByRange( @Param("meta") RDBTargetTableMeta meta, @Param("range") QueryRange range ); @SelectProvider( type = DynamicQuerierSqlBuilder.class, method = "selectList" ) @ResultType( LinkedTreeMap.class ) void selectList0( @Param("meta") RDBTargetTableMeta meta, @Param("handler") ResultHandler > handler, @Param("exSafeSQL") String szExSafeSQL ); @Override @SelectProvider( type = DynamicQuerierSqlBuilder.class, method = "selectListByRange" ) List selectListByRange( @Param("meta") RDBTargetTableMeta meta, @Param("range") QueryRange range ); @SelectProvider( type = DynamicQuerierSqlBuilder.class, method = "selectMappedByRange" ) @ResultType( LinkedTreeMap.class ) void selectMappedByRange0( RDBTargetTableMeta meta, @Param("handler") ResultHandler > handler, QueryRange range ); @Override default Map selectMappedByRange( RDBTargetTableMeta meta, QueryRange range ) { DynamicQuerierMappedResultHandler handler = new DynamicQuerierMappedResultHandler<>( meta, range ); this.selectMappedByRange0( meta, handler, range ); return handler.getResults(); } @Override default List selectList( RDBTargetTableMeta meta, String szExSafeSQL ) { DynamicQuerierEntityResultHandler handler = new DynamicQuerierEntityResultHandler<>( meta ); this.selectList0( meta, handler, szExSafeSQL ); return handler.getResults(); } @Override @Select( "${statement}" ) List query ( @Param("meta") RDBTargetTableMeta meta, @Param("statement") String szStatementSQL ); default List queryVal ( RDBTargetTableMeta meta, String szStatementSQL ) { if( meta.getResultConverter() == null ) { meta.setResultConverter( new GenericResultConverter<>( meta.getValueType(), meta.getValueMetaKeys() ) ); } ResultConverter converter = meta.getResultConverter(); List results = new ArrayList<>(); List raw = this.query( meta, szStatementSQL ); for( Map map : raw ) { results.add( converter.convert( map ) ); } return results; } @Override @Select( "SELECT MAX(${rangeKeyName}) FROM ${meta.tableName}" ) Object getMaximumRangeVal( @Param("meta")RDBTargetTableMeta meta, @Param("rangeKeyName") String szRangeKeyName ); @Override @Select( "SELECT MIN(${rangeKeyName}) FROM ${meta.tableName}" ) Object getMinimumRangeVal( @Param("meta")RDBTargetTableMeta meta, @Param("rangeKeyName") String szRangeKeyName ); @Override @SelectProvider( type = DynamicQuerierSqlBuilder.class, method = "selectListByColumn" ) List selectListByColumn( @Param("meta") RDBTargetTableMeta meta, @Param("columnKey") String szSpecificColumnKeyName, @Param("key") Object key ); @Override default V selectByKey( RDBTargetTableMeta meta, Object key ) { DynamicQuerierEntityResultHandler handler = new DynamicQuerierEntityResultHandler<>( meta ); this.selectList0( meta, handler, String.format( " WHERE `%s` = %s", meta.getIndexKey(), SQLStrings.format( key )) ); List list = handler.getResults(); if( list != null && !list.isEmpty() ) { return handler.getResults().get(0); } return null; } @InsertProvider( type = DynamicQuerierSqlBuilder.class, method = "insert" ) void insert( @Param("meta") RDBTargetTableMeta meta, @Param("key") K key, @Param("entity") V entity ); @Override @UpdateProvider( type = DynamicQuerierSqlBuilder.class, method = "updateByEntity" ) void update( @Param("meta") RDBTargetTableMeta meta, @Param("key") K key, @Param("entity") V entity ); @Override @DeleteProvider( type = DynamicQuerierSqlBuilder.class, method = "deleteByKey" ) void deleteByKey( @Param("meta") RDBTargetTableMeta meta, @Param("key") Object key ); @Override @Update( "TRUNCATE TABLE ${meta.tableName}" ) void truncate( @Param("meta") RDBTargetTableMeta meta ); } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/IbatisClient.java ================================================ package com.pinecone.slime.jelly.source.ibatis; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.slime.source.DAOScanner; import com.pinecone.slime.source.rdb.RDBClient; import org.apache.ibatis.mapping.Environment; import org.apache.ibatis.session.ExecutorType; import org.apache.ibatis.session.SqlSession; import org.apache.ibatis.session.Configuration; import org.apache.ibatis.session.SqlSessionFactory; import org.apache.ibatis.session.TransactionIsolationLevel; import javax.sql.DataSource; import java.sql.Connection; import java.util.List; public interface IbatisClient extends RDBClient { Configuration getConfiguration(); DataSource getDataSource(); Environment getEnvironment(); JSONObject getIbatisConf(); String getJDBCDriverName(); JSONObject getClientConf(); DAOScanner getDAOScanner(); void addMapper( Class type ) ; SqlSessionFactory getSqlSessionFactory(); SqlSession openSession(); SqlSession openSession( boolean autoCommit ); SqlSession openSession( Connection connection); SqlSession openSession( TransactionIsolationLevel level ); SqlSession openSession( ExecutorType execType ); SqlSession openSession( ExecutorType execType, boolean autoCommit ); SqlSession openSession( ExecutorType execType, TransactionIsolationLevel level ); SqlSession openSession( ExecutorType execType, Connection connection ); void addXMLObjectScope( String szPacketName ); void addXMLObjectScopeNoneSync( String szPacketName ); } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/IbatisDAOScanner.java ================================================ package com.pinecone.slime.jelly.source.ibatis; import com.pinecone.framework.util.lang.ClassScope; import com.pinecone.framework.util.lang.ClassScopeNSProtocolIteratorsFactory; import com.pinecone.framework.util.lang.NSProtocolIteratorsFactoryAdapter; import com.pinecone.slime.source.DAOScanner; import com.pinecone.ulf.util.lang.GenericPreloadClassInspector; import com.pinecone.ulf.util.lang.HierarchyClassInspector; import com.pinecone.ulf.util.lang.PooledClassCandidateScanner; import com.pinecone.ulf.util.lang.SimpleAnnotationExcludeFilter; import javassist.ClassPool; public class IbatisDAOScanner extends PooledClassCandidateScanner implements DAOScanner { protected HierarchyClassInspector mClassInspector ; public IbatisDAOScanner ( ClassScope searchScope, ClassLoader classLoader, NSProtocolIteratorsFactoryAdapter iteratorsFactory, ClassPool classPool ) { super( searchScope, classLoader, iteratorsFactory, classPool ); this.mClassInspector = new GenericPreloadClassInspector( this.mClassPool ); this.addExcludeFilter( new SimpleAnnotationExcludeFilter( this.mClassInspector, IbatisDataAccessObject.class ) ); } public IbatisDAOScanner ( ClassScope searchScope, ClassLoader classLoader, ClassPool classPool ) { this( searchScope, classLoader, new ClassScopeNSProtocolIteratorsFactory( classLoader, searchScope ), classPool ); } public IbatisDAOScanner ( ClassScope searchScope, ClassLoader classLoader ) { this( searchScope, classLoader, new ClassScopeNSProtocolIteratorsFactory( classLoader, searchScope ), ClassPool.getDefault() ); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/IbatisDataAccessObject.java ================================================ package com.pinecone.slime.jelly.source.ibatis; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.lang.annotation.ElementType; @Target({ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface IbatisDataAccessObject { String value() default ""; // Which databases or data-manipulator that affinity to. // For multi databases scenario. String scope() default ""; } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/IbatisManipulatorProxyMapperFactory.java ================================================ package com.pinecone.slime.jelly.source.ibatis; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.slime.source.rdb.RDBQuerierDataManipulator; import org.apache.ibatis.session.SqlSession; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; public class IbatisManipulatorProxyMapperFactory implements Pinenut { static class ManipulatorProxyHandler implements InvocationHandler { private final T original; private final SqlSession sqlSession; public ManipulatorProxyHandler( T original, SqlSession sqlSession ) { this.original = original; this.sqlSession = sqlSession; } @Override public Object invoke( Object proxy, Method method, Object[] args ) throws Throwable { if ( "commit".equals( method.getName() ) ) { this.sqlSession.commit(); return null; } return method.invoke( this.original, args ); } } @SuppressWarnings("unchecked") public static T getMapper( SqlSession sqlSession, Class clazz ) { T original = sqlSession.getMapper(clazz); Class[] interfaces = original.getClass().getInterfaces(); return (T) Proxy.newProxyInstance( original.getClass().getClassLoader(), interfaces, new ManipulatorProxyHandler<>(original, sqlSession) ); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/IbatisXMLResourceScanner.java ================================================ package com.pinecone.slime.jelly.source.ibatis; import com.pinecone.framework.util.lang.ClassScope; import com.pinecone.framework.util.lang.GenericScopeNSProtocolIteratorsFactory; import com.pinecone.framework.util.lang.NSProtocolIteratorsFactoryAdapter; import com.pinecone.framework.util.lang.ObjectCandidateScanner; import com.pinecone.slime.source.XMLResourceScanner; public class IbatisXMLResourceScanner extends ObjectCandidateScanner implements XMLResourceScanner { public IbatisXMLResourceScanner ( ClassScope searchScope, ClassLoader classLoader, NSProtocolIteratorsFactoryAdapter iteratorsFactory ) { super( searchScope, classLoader, iteratorsFactory ); } public IbatisXMLResourceScanner ( ClassScope searchScope, ClassLoader classLoader ) { this( searchScope, classLoader, new GenericScopeNSProtocolIteratorsFactory( classLoader, searchScope, ".xml" ) ); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/ProxySessionMapperPool.java ================================================ package com.pinecone.slime.jelly.source.ibatis; import org.apache.ibatis.session.ExecutorType; import com.pinecone.framework.system.construction.InstancePool; import com.pinecone.slime.jelly.source.ibatis.proxy.SqlSessionTemplate; public class ProxySessionMapperPool implements InstancePool { protected IbatisClient mIbatisClient; protected Class mType; protected SqlSessionTemplate mSqlSessionTemplate; protected ProxySessionMapperPool( IbatisClient ibatisClient, Class type, Void dummy ) { this.mIbatisClient = ibatisClient; this.mType = type; } public ProxySessionMapperPool( IbatisClient ibatisClient, Class type, ExecutorType executorType ) { this( ibatisClient, type, (Void) null ); this.mSqlSessionTemplate = new SqlSessionTemplate( ibatisClient.getSqlSessionFactory(), executorType ); } public ProxySessionMapperPool( IbatisClient ibatisClient, Class type ) { this( ibatisClient, type, (Void) null ); this.mSqlSessionTemplate = new SqlSessionTemplate( ibatisClient.getSqlSessionFactory() ); } @Override public Object allocate() { return this.mSqlSessionTemplate.getMapper ( this.mType ); } @Override public void free( Object obj ) { } @Override public int freeSize() { return Integer.MAX_VALUE; } @Override public int pooledSize() { return 0; } @Override public boolean isEmpty() { return false; } @Override public void preAllocate( int count ) { } @Override public void setCapacity( int capacity ) { } @Override public int getCapacity() { return 0; } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/SoloSessionMapperPool.java ================================================ package com.pinecone.slime.jelly.source.ibatis; import com.pinecone.framework.system.construction.InstancePool; import org.apache.ibatis.session.SqlSession; public class SoloSessionMapperPool implements InstancePool { protected SqlSession mSqlSession; protected Class mType; public SoloSessionMapperPool( SqlSession sqlSession, Class type ) { this.mSqlSession = sqlSession; this.mType = type; } @Override public Object allocate() { return this.mSqlSession.getMapper( this.mType ); } @Override public void free( Object obj ) { } @Override public int freeSize() { return Integer.MAX_VALUE; } @Override public int pooledSize() { return 0; } @Override public boolean isEmpty() { return false; } @Override public void preAllocate( int count ) { } @Override public void setCapacity( int capacity ) { } @Override public int getCapacity() { return 0; } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/proxy/MyBatisExceptionTranslator.java ================================================ package com.pinecone.slime.jelly.source.ibatis.proxy; import java.sql.SQLException; import java.util.function.Supplier; import javax.sql.DataSource; import org.apache.ibatis.exceptions.PersistenceException; //import org.springframework.dao.DataAccessException; //import org.springframework.dao.support.PersistenceExceptionTranslator; //import org.springframework.jdbc.UncategorizedSQLException; import com.pinecone.slime.jelly.source.ds.dao.DataAccessException; import com.pinecone.slime.jelly.source.ds.dao.PersistenceExceptionTranslator; import com.pinecone.slime.jelly.source.ds.jdbc.SQLErrorCodeSQLExceptionTranslator; import com.pinecone.slime.jelly.source.ds.jdbc.SQLExceptionTranslator; import com.pinecone.slime.jelly.source.ds.jdbc.UncategorizedSQLException; import com.pinecone.slime.jelly.source.ds.transaction.TransactionException; //import org.springframework.jdbc.support.SQLExceptionTranslator; //import org.springframework.transaction.TransactionException; public class MyBatisExceptionTranslator implements PersistenceExceptionTranslator { private final Supplier exceptionTranslatorSupplier; private SQLExceptionTranslator exceptionTranslator; public MyBatisExceptionTranslator(DataSource dataSource, boolean exceptionTranslatorLazyInit) { this(() -> { return new SQLErrorCodeSQLExceptionTranslator(dataSource); }, exceptionTranslatorLazyInit); } public MyBatisExceptionTranslator(Supplier exceptionTranslatorSupplier, boolean exceptionTranslatorLazyInit) { this.exceptionTranslatorSupplier = exceptionTranslatorSupplier; if (!exceptionTranslatorLazyInit) { this.initExceptionTranslator(); } } @Override public DataAccessException translateExceptionIfPossible(RuntimeException e) { if (e instanceof PersistenceException) { if (((RuntimeException)e).getCause() instanceof PersistenceException) { e = (PersistenceException)((RuntimeException)e).getCause(); } if (((RuntimeException)e).getCause() instanceof SQLException) { this.initExceptionTranslator(); String task = ((RuntimeException)e).getMessage() + "\n"; SQLException se = (SQLException)((RuntimeException)e).getCause(); DataAccessException dae = this.exceptionTranslator.translate(task, (String)null, se); return (DataAccessException)(dae != null ? dae : new UncategorizedSQLException(task, (String)null, se)); } else if (((RuntimeException)e).getCause() instanceof TransactionException) { throw (TransactionException)((RuntimeException)e).getCause(); } else { return new MyBatisSystemException((Throwable)e); } } else { return null; } } private synchronized void initExceptionTranslator() { if (this.exceptionTranslator == null) { this.exceptionTranslator = (SQLExceptionTranslator)this.exceptionTranslatorSupplier.get(); } } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/proxy/MyBatisSystemException.java ================================================ package com.pinecone.slime.jelly.source.ibatis.proxy; import com.pinecone.slime.jelly.source.ds.jdbc.UncategorizedDataAccessException; public class MyBatisSystemException extends UncategorizedDataAccessException { private static final long serialVersionUID = 1284728621670758938L; public MyBatisSystemException( Throwable cause ) { super((String)null, cause); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/proxy/SqlSessionHolder.java ================================================ package com.pinecone.slime.jelly.source.ibatis.proxy; import org.apache.ibatis.session.ExecutorType; import org.apache.ibatis.session.SqlSession; import com.pinecone.framework.util.Assert; import com.pinecone.slime.jelly.source.ds.dao.PersistenceExceptionTranslator; import com.pinecone.slime.jelly.source.ds.transaction.ResourceHolderSupport; public final class SqlSessionHolder extends ResourceHolderSupport { private final SqlSession sqlSession; private final ExecutorType executorType; private final PersistenceExceptionTranslator exceptionTranslator; public SqlSessionHolder( SqlSession sqlSession, ExecutorType executorType, PersistenceExceptionTranslator exceptionTranslator ) { Assert.notNull(sqlSession, "SqlSession must not be null"); Assert.notNull(executorType, "ExecutorType must not be null"); this.sqlSession = sqlSession; this.executorType = executorType; this.exceptionTranslator = exceptionTranslator; } public SqlSession getSqlSession() { return this.sqlSession; } public ExecutorType getExecutorType() { return this.executorType; } public PersistenceExceptionTranslator getPersistenceExceptionTranslator() { return this.exceptionTranslator; } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/proxy/SqlSessionTemplate.java ================================================ package com.pinecone.slime.jelly.source.ibatis.proxy; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.sql.Connection; import java.util.List; import java.util.Map; import org.apache.ibatis.cursor.Cursor; import org.apache.ibatis.exceptions.PersistenceException; import org.apache.ibatis.executor.BatchResult; import org.apache.ibatis.reflection.ExceptionUtil; import org.apache.ibatis.session.Configuration; import org.apache.ibatis.session.ExecutorType; import org.apache.ibatis.session.ResultHandler; import org.apache.ibatis.session.RowBounds; import org.apache.ibatis.session.SqlSession; import org.apache.ibatis.session.SqlSessionFactory; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.Assert; import com.pinecone.slime.jelly.source.ds.dao.PersistenceExceptionTranslator; public class SqlSessionTemplate implements SqlSession, Pinenut { private final SqlSessionFactory sqlSessionFactory; private final ExecutorType executorType; private final SqlSession sqlSessionProxy; private final PersistenceExceptionTranslator exceptionTranslator; public SqlSessionTemplate( SqlSessionFactory sqlSessionFactory ) { this(sqlSessionFactory, sqlSessionFactory.getConfiguration().getDefaultExecutorType() ); } public SqlSessionTemplate( SqlSessionFactory sqlSessionFactory, ExecutorType executorType ) { this(sqlSessionFactory, executorType, new MyBatisExceptionTranslator( sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(), true) ); } public SqlSessionTemplate( SqlSessionFactory sqlSessionFactory, ExecutorType executorType, PersistenceExceptionTranslator exceptionTranslator ) { Assert.notNull(sqlSessionFactory, "Property 'sqlSessionFactory' is required"); Assert.notNull(executorType, "Property 'executorType' is required"); this.sqlSessionFactory = sqlSessionFactory; this.executorType = executorType; this.exceptionTranslator = exceptionTranslator; this.sqlSessionProxy = (SqlSession)Proxy.newProxyInstance(SqlSessionFactory.class.getClassLoader(), new Class[]{SqlSession.class}, new SqlSessionTemplate.SqlSessionInterceptor()); } public SqlSessionFactory getSqlSessionFactory() { return this.sqlSessionFactory; } public ExecutorType getExecutorType() { return this.executorType; } public PersistenceExceptionTranslator getPersistenceExceptionTranslator() { return this.exceptionTranslator; } @Override public T selectOne(String statement) { return this.sqlSessionProxy.selectOne(statement); } @Override public T selectOne(String statement, Object parameter) { return this.sqlSessionProxy.selectOne(statement, parameter); } @Override public Map selectMap(String statement, String mapKey) { return this.sqlSessionProxy.selectMap(statement, mapKey); } @Override public Map selectMap(String statement, Object parameter, String mapKey) { return this.sqlSessionProxy.selectMap(statement, parameter, mapKey); } @Override public Map selectMap(String statement, Object parameter, String mapKey, RowBounds rowBounds) { return this.sqlSessionProxy.selectMap(statement, parameter, mapKey, rowBounds); } @Override public Cursor selectCursor(String statement) { return this.sqlSessionProxy.selectCursor(statement); } @Override public Cursor selectCursor(String statement, Object parameter) { return this.sqlSessionProxy.selectCursor(statement, parameter); } @Override public Cursor selectCursor(String statement, Object parameter, RowBounds rowBounds) { return this.sqlSessionProxy.selectCursor(statement, parameter, rowBounds); } @Override public List selectList(String statement) { return this.sqlSessionProxy.selectList(statement); } @Override public List selectList(String statement, Object parameter) { return this.sqlSessionProxy.selectList(statement, parameter); } @Override public List selectList(String statement, Object parameter, RowBounds rowBounds) { return this.sqlSessionProxy.selectList(statement, parameter, rowBounds); } @Override public void select(String statement, ResultHandler handler) { this.sqlSessionProxy.select(statement, handler); } @Override public void select(String statement, Object parameter, ResultHandler handler) { this.sqlSessionProxy.select(statement, parameter, handler); } @Override public void select(String statement, Object parameter, RowBounds rowBounds, ResultHandler handler) { this.sqlSessionProxy.select(statement, parameter, rowBounds, handler); } @Override public int insert(String statement) { return this.sqlSessionProxy.insert(statement); } @Override public int insert(String statement, Object parameter) { return this.sqlSessionProxy.insert(statement, parameter); } @Override public int update(String statement) { return this.sqlSessionProxy.update(statement); } @Override public int update(String statement, Object parameter) { return this.sqlSessionProxy.update(statement, parameter); } @Override public int delete(String statement) { return this.sqlSessionProxy.delete(statement); } @Override public int delete(String statement, Object parameter) { return this.sqlSessionProxy.delete(statement, parameter); } @Override public T getMapper(Class type) { return this.getConfiguration().getMapper(type, this); } @Override public void commit() { throw new UnsupportedOperationException("Manual commit is not allowed over a Hydra managed SqlSession"); } @Override public void commit(boolean force) { throw new UnsupportedOperationException("Manual commit is not allowed over a Hydra managed SqlSession"); } @Override public void rollback() { throw new UnsupportedOperationException("Manual rollback is not allowed over a Hydra managed SqlSession"); } @Override public void rollback(boolean force) { throw new UnsupportedOperationException("Manual rollback is not allowed over a Hydra managed SqlSession"); } @Override public void close() { throw new UnsupportedOperationException("Manual close is not allowed over a Hydra managed SqlSession"); } @Override public void clearCache() { this.sqlSessionProxy.clearCache(); } @Override public Configuration getConfiguration() { return this.sqlSessionFactory.getConfiguration(); } @Override public Connection getConnection() { return this.sqlSessionProxy.getConnection(); } @Override public List flushStatements() { return this.sqlSessionProxy.flushStatements(); } //@Override public void destroy() throws Exception { } private class SqlSessionInterceptor implements InvocationHandler { private SqlSessionInterceptor() { } @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { SqlSession sqlSession = SqlSessionUtils.getSqlSession(SqlSessionTemplate.this.sqlSessionFactory, SqlSessionTemplate.this.executorType, SqlSessionTemplate.this.exceptionTranslator); Object unwrapped; try { Object result = method.invoke(sqlSession, args); if (!SqlSessionUtils.isSqlSessionTransactional(sqlSession, SqlSessionTemplate.this.sqlSessionFactory)) { sqlSession.commit(true); } unwrapped = result; } catch ( Throwable e ) { unwrapped = ExceptionUtil.unwrapThrowable( e ); if ( SqlSessionTemplate.this.exceptionTranslator != null && unwrapped instanceof PersistenceException ) { SqlSessionUtils.closeSqlSession(sqlSession, SqlSessionTemplate.this.sqlSessionFactory); sqlSession = null; Throwable translated = SqlSessionTemplate.this.exceptionTranslator.translateExceptionIfPossible((PersistenceException)unwrapped); if ( translated != null ) { unwrapped = translated; } } throw (Throwable) unwrapped; } finally { if (sqlSession != null) { SqlSessionUtils.closeSqlSession( sqlSession, SqlSessionTemplate.this.sqlSessionFactory ); } } return unwrapped; } } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/proxy/SqlSessionUtils.java ================================================ package com.pinecone.slime.jelly.source.ibatis.proxy; import org.apache.ibatis.exceptions.PersistenceException; import org.apache.ibatis.mapping.Environment; import org.apache.ibatis.session.ExecutorType; import org.apache.ibatis.session.SqlSession; import org.apache.ibatis.session.SqlSessionFactory; import org.mybatis.logging.Logger; import org.mybatis.logging.LoggerFactory; //import org.mybatis.spring.SqlSessionHolder; import org.mybatis.spring.transaction.SpringManagedTransactionFactory; //import org.springframework.dao.DataAccessException; //import org.springframework.dao.TransientDataAccessResourceException; //import org.springframework.dao.support.PersistenceExceptionTranslator; //import org.springframework.transaction.support.TransactionSynchronizationAdapter; //import org.springframework.transaction.support.TransactionSynchronizationManager; //import org.springframework.util.Assert; import com.pinecone.framework.util.Assert; import com.pinecone.slime.jelly.source.ds.dao.DataAccessException; import com.pinecone.slime.jelly.source.ds.dao.PersistenceExceptionTranslator; import com.pinecone.slime.jelly.source.ds.jdbc.TransientDataAccessResourceException; import com.pinecone.slime.jelly.source.ds.transaction.TransactionSynchronizationAdapter; import com.pinecone.slime.jelly.source.ds.transaction.TransactionSynchronizationManager; public final class SqlSessionUtils { private static final Logger LOGGER = LoggerFactory.getLogger(SqlSessionUtils.class); private static final String NO_EXECUTOR_TYPE_SPECIFIED = "No ExecutorType specified"; private static final String NO_SQL_SESSION_FACTORY_SPECIFIED = "No SqlSessionFactory specified"; private static final String NO_SQL_SESSION_SPECIFIED = "No SqlSession specified"; private SqlSessionUtils() { } public static SqlSession getSqlSession(SqlSessionFactory sessionFactory) { ExecutorType executorType = sessionFactory.getConfiguration().getDefaultExecutorType(); return getSqlSession(sessionFactory, executorType, (PersistenceExceptionTranslator)null); } public static SqlSession getSqlSession(SqlSessionFactory sessionFactory, ExecutorType executorType, PersistenceExceptionTranslator exceptionTranslator) { Assert.notNull(sessionFactory, "No SqlSessionFactory specified"); Assert.notNull(executorType, "No ExecutorType specified"); SqlSessionHolder holder = (SqlSessionHolder) TransactionSynchronizationManager.getResource(sessionFactory); SqlSession session = sessionHolder(executorType, holder); if (session != null) { return session; } else { LOGGER.debug(() -> { return "Creating a new SqlSession"; }); session = sessionFactory.openSession(executorType); registerSessionHolder(sessionFactory, executorType, exceptionTranslator, session); return session; } } private static void registerSessionHolder(SqlSessionFactory sessionFactory, ExecutorType executorType, PersistenceExceptionTranslator exceptionTranslator, SqlSession session) { if (TransactionSynchronizationManager.isSynchronizationActive()) { Environment environment = sessionFactory.getConfiguration().getEnvironment(); if (environment.getTransactionFactory() instanceof SpringManagedTransactionFactory) { LOGGER.debug(() -> { return "Registering transaction synchronization for SqlSession [" + session + "]"; }); SqlSessionHolder holder = new SqlSessionHolder(session, executorType, exceptionTranslator); TransactionSynchronizationManager.bindResource(sessionFactory, holder); TransactionSynchronizationManager.registerSynchronization(new SqlSessionUtils.SqlSessionSynchronization(holder, sessionFactory)); holder.setSynchronizedWithTransaction(true); holder.requested(); } else { if (TransactionSynchronizationManager.getResource(environment.getDataSource()) != null) { throw new TransientDataAccessResourceException("SqlSessionFactory must be using a SpringManagedTransactionFactory in order to use Hydra transaction synchronization"); } LOGGER.debug(() -> { return "SqlSession [" + session + "] was not registered for synchronization because DataSource is not transactional"; }); } } else { LOGGER.debug(() -> { return "SqlSession [" + session + "] was not registered for synchronization because synchronization is not active"; }); } } private static SqlSession sessionHolder(ExecutorType executorType, SqlSessionHolder holder) { SqlSession session = null; if ( holder != null && holder.isSynchronizedWithTransaction() ) { if (holder.getExecutorType() != executorType) { throw new TransientDataAccessResourceException("Cannot change the ExecutorType when there is an existing transaction"); } holder.requested(); LOGGER.debug(() -> { return "Fetched SqlSession [" + holder.getSqlSession() + "] from current transaction"; }); session = holder.getSqlSession(); } return session; } public static void closeSqlSession(SqlSession session, SqlSessionFactory sessionFactory) { Assert.notNull(session, "No SqlSession specified"); Assert.notNull(sessionFactory, "No SqlSessionFactory specified"); SqlSessionHolder holder = (SqlSessionHolder)TransactionSynchronizationManager.getResource(sessionFactory); if (holder != null && holder.getSqlSession() == session) { LOGGER.debug(() -> { return "Releasing transactional SqlSession [" + session + "]"; }); holder.released(); } else { LOGGER.debug(() -> { return "Closing non transactional SqlSession [" + session + "]"; }); session.close(); } } public static boolean isSqlSessionTransactional(SqlSession session, SqlSessionFactory sessionFactory) { Assert.notNull(session, "No SqlSession specified"); Assert.notNull(sessionFactory, "No SqlSessionFactory specified"); SqlSessionHolder holder = (SqlSessionHolder)TransactionSynchronizationManager.getResource(sessionFactory); return holder != null && holder.getSqlSession() == session; } private static final class SqlSessionSynchronization extends TransactionSynchronizationAdapter { private final SqlSessionHolder holder; private final SqlSessionFactory sessionFactory; private boolean holderActive = true; public SqlSessionSynchronization(SqlSessionHolder holder, SqlSessionFactory sessionFactory) { Assert.notNull(holder, "Parameter 'holder' must be not null"); Assert.notNull(sessionFactory, "Parameter 'sessionFactory' must be not null"); this.holder = holder; this.sessionFactory = sessionFactory; } public int getOrder() { return 999; } public void suspend() { if (this.holderActive) { SqlSessionUtils.LOGGER.debug(() -> { return "Transaction synchronization suspending SqlSession [" + this.holder.getSqlSession() + "]"; }); TransactionSynchronizationManager.unbindResource(this.sessionFactory); } } public void resume() { if (this.holderActive) { SqlSessionUtils.LOGGER.debug(() -> { return "Transaction synchronization resuming SqlSession [" + this.holder.getSqlSession() + "]"; }); TransactionSynchronizationManager.bindResource(this.sessionFactory, this.holder); } } public void beforeCommit(boolean readOnly) { if (TransactionSynchronizationManager.isActualTransactionActive()) { try { SqlSessionUtils.LOGGER.debug(() -> { return "Transaction synchronization committing SqlSession [" + this.holder.getSqlSession() + "]"; }); this.holder.getSqlSession().commit(); } catch (PersistenceException var4) { if (this.holder.getPersistenceExceptionTranslator() != null) { DataAccessException translated = this.holder.getPersistenceExceptionTranslator().translateExceptionIfPossible(var4); if (translated != null) { throw translated; } } throw var4; } } } public void beforeCompletion() { if (!this.holder.isOpen()) { SqlSessionUtils.LOGGER.debug(() -> { return "Transaction synchronization deregistering SqlSession [" + this.holder.getSqlSession() + "]"; }); TransactionSynchronizationManager.unbindResource(this.sessionFactory); this.holderActive = false; SqlSessionUtils.LOGGER.debug(() -> { return "Transaction synchronization closing SqlSession [" + this.holder.getSqlSession() + "]"; }); this.holder.getSqlSession().close(); } } public void afterCompletion(int status) { if (this.holderActive) { SqlSessionUtils.LOGGER.debug(() -> { return "Transaction synchronization deregistering SqlSession [" + this.holder.getSqlSession() + "]"; }); TransactionSynchronizationManager.unbindResourceIfPossible(this.sessionFactory); this.holderActive = false; SqlSessionUtils.LOGGER.debug(() -> { return "Transaction synchronization closing SqlSession [" + this.holder.getSqlSession() + "]"; }); this.holder.getSqlSession().close(); } this.holder.reset(); } } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/memcached/GenericMemcachedManipulator.java ================================================ package com.pinecone.slime.jelly.source.memcached; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.StringUtils; import com.pinecone.slime.jelly.source.NamespacedKey; import com.pinecone.slime.source.GenericResultConverter; import com.pinecone.slime.source.indexable.IndexableTargetScopeMeta; import net.spy.memcached.MemcachedClient; import java.io.Serializable; import java.net.SocketAddress; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Collection; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; public class GenericMemcachedManipulator implements MemcachedManipulator { private final MemcachedClient mMemClient; private final String mszNameSeparator; protected int mnExpireTime; public GenericMemcachedManipulator( MemcachedClient client, String szSeparator, int expire ) { this.mMemClient = client; this.mszNameSeparator = szSeparator; this.mnExpireTime = expire; } public GenericMemcachedManipulator( MemcachedClient client, String szSeparator ) { this( client, szSeparator, 0 ); } public GenericMemcachedManipulator( MemcachedClient client ) { this( client, ":" ); } @Override public MemcachedClient getClient() { return this.mMemClient; } private String getFullKey(IndexableTargetScopeMeta meta, String szNamespace, Object key ) { return NamespacedKey.getFullKey( meta, this.mszNameSeparator, szNamespace, key ); } @Override public long counts( IndexableTargetScopeMeta meta, String szScopeKey ) { return this.countsNS( meta, szScopeKey ); } @Override public long countsByNS( IndexableTargetScopeMeta meta, String szNamespace, Object key ) { if( this.selectByNS( meta, szNamespace, key ) != null ){ return 1; } return 0; } @Override public long countsNS( IndexableTargetScopeMeta meta, String szNamespace ) { boolean bEN = StringUtils.isEmpty( szNamespace ); long count = 0; Map > items = this.mMemClient.getStats( "items" ); for ( Map.Entry > entry : items.entrySet() ) { Map itemMap = entry.getValue(); for ( String key : itemMap.keySet() ) { if ( key.startsWith("items:") ) { String[] parts = key.split(":"); if ( parts.length > 2 && "number".equals(parts[2]) ) { int slabNumber = Integer.parseInt(parts[1]); int limit = Integer.parseInt( itemMap.get(key) ); Map > dump = this.mMemClient.getStats( "cachedump " + slabNumber + " " + limit ); for ( Map dumpMap : dump.values() ) { for( String k : dumpMap.keySet() ){ if( bEN || k.startsWith( szNamespace ) ) { ++count; } } } } } } } return count; } @Override public List query( IndexableTargetScopeMeta meta, String szStatement ) { throw new UnsupportedOperationException( "Query method not supported for GenericMemcachedManipulator." ); } @Override public List queryVal( IndexableTargetScopeMeta meta, String szStatement ) { throw new UnsupportedOperationException( "QueryVal method not supported for GenericMemcachedManipulator." ); } @Override public Object selectAllByNS( IndexableTargetScopeMeta meta, String szNamespace, Object key ) { if( key != null && szNamespace != null ) { return this.mMemClient.get( this.getFullKey( meta, szNamespace, key ) ); } Map map = new LinkedHashMap<>(); if( szNamespace == null ) { Collection keys = this.keys(); for( String k : keys ) { map.put( k, this.mMemClient.get( k ) ); } } else { Collection keys = this.keys(); for( String k : keys ) { if( k.startsWith( szNamespace ) ) { map.put( k, this.mMemClient.get( k ) ); } } } return map; } @Override public List selectsByNS( IndexableTargetScopeMeta meta, String szNamespace, Object key ) { return List.of( this.selectByNS( meta, szNamespace, key ) ); } @Override public V selectByNS( IndexableTargetScopeMeta meta, String szNamespace, Object key ) { return selectByKey( meta, this.getFullKey( meta, szNamespace, key ) ); } @Override public V selectByKey( IndexableTargetScopeMeta meta, Object key ) { if ( meta.getResultConverter() == null ) { meta.setResultConverter( new GenericResultConverter<>( meta.getValueType(), meta.getValueMetaKeys() ) ); } return meta.getResultConverter().convert( this.mMemClient.get( key.toString() ) ); } protected void insert0( IndexableTargetScopeMeta meta, String szKey, V entity ) { Future setFuture = this.mMemClient.set( szKey, this.mnExpireTime, entity ); try{ if( !setFuture.get( 5, TimeUnit.SECONDS ) ){ throw new IllegalStateException( "Unseated key: " + szKey ); } } catch ( TimeoutException | ExecutionException | InterruptedException e ) { throw new ProxyProvokeHandleException( e ); } } @Override public void insert( IndexableTargetScopeMeta meta, String key, V entity, long expireMill ) { int expireSeconds = (int) (expireMill / 1000); Future setFuture = this.mMemClient.set(key, expireSeconds, entity); try { if ( !setFuture.get( 5, TimeUnit.SECONDS ) ) { throw new IllegalStateException("Failed to insert key: " + key); } } catch ( TimeoutException | ExecutionException | InterruptedException e ) { throw new ProxyProvokeHandleException(e); } } @Override public void insertByNS( IndexableTargetScopeMeta meta, String szNamespace, String key, V entity ) { String scopeKey = this.getFullKey( meta, szNamespace, key ); this.insert0( meta, scopeKey, entity ); } @Override public void insert( IndexableTargetScopeMeta meta, String key, V entity ) { this.insert0( meta, key.toString(), entity ); } @Override public void updateByNS( IndexableTargetScopeMeta meta, String szNamespace, String key, V entity ) { this.insertByNS( meta, szNamespace, key, entity ); } @Override public void update( IndexableTargetScopeMeta meta, String key, V entity ) { this.insert( meta, key, entity ); } @Override public void deleteByNS( IndexableTargetScopeMeta meta, String szNamespace, Object key ) { String scopeKey = this.getFullKey( meta, szNamespace,key ); this.deleteByKey( meta, scopeKey ); } @Override public void deleteByKey( IndexableTargetScopeMeta meta, Object key ) { Future setFuture = this.mMemClient.delete("key1"); try{ if( !setFuture.get( 5, TimeUnit.SECONDS ) ){ throw new IllegalStateException( "Deletion compromised, with key: " + key ); } } catch ( TimeoutException | ExecutionException | InterruptedException e ) { throw new ProxyProvokeHandleException( e ); } } @Override public void purge( IndexableTargetScopeMeta meta ) { this.purgeByNS( meta, meta.getScopeNS() ); } @Override public void purgeByNS( IndexableTargetScopeMeta meta, String szNamespace ) { if( szNamespace != null && !szNamespace.isEmpty() ) { Collection keys = this.keys(); for( String k : keys ) { if( k.startsWith( szNamespace ) ) { this.deleteByKey( meta, k ); } } } else { this.mMemClient.flush(); } } @Override public void commit() { // Redis operations are atomic, no explicit commit needed. } @Override public Iterator keysIterator( IndexableTargetScopeMeta meta ) { return this.keySet().iterator(); } @Override public Iterator > iterator( IndexableTargetScopeMeta meta ) { return new EntryIterator( meta ); } protected final class EntryIterator implements Iterator > { Iterator keyIterator; IndexableTargetScopeMeta meta; EntryIterator( IndexableTargetScopeMeta meta ) { this.meta = meta; this.keyIterator = GenericMemcachedManipulator.this.keysIterator( meta ); } @Override public final boolean hasNext() { return this.keyIterator.hasNext(); } @Override public final Map.Entry next() { String k = this.keyIterator.next(); return new KeyValue<>( k, GenericMemcachedManipulator.this.selectByKey( this.meta, k ) ); } } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/memcached/MemcachedManipulator.java ================================================ package com.pinecone.slime.jelly.source.memcached; import com.pinecone.framework.unit.Units; import com.pinecone.slime.source.indexable.IndexableIterableManipulator; import net.spy.memcached.MemcachedClient; import java.io.Serializable; import java.net.SocketAddress; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashSet; import java.util.Map; public interface MemcachedManipulator extends IndexableIterableManipulator { MemcachedClient getClient(); default Collection keys( Class stereo ) { try{ MemcachedClient client = this.getClient(); Collection allKeys = Units.newInstance( stereo ); Map > items = client.getStats( "items" ); for ( Map.Entry > entry : items.entrySet() ) { Map itemMap = entry.getValue(); for ( String key : itemMap.keySet() ) { if ( key.startsWith("items:") ) { String[] parts = key.split(":"); if ( parts.length > 2 && "number".equals(parts[2]) ) { int slabNumber = Integer.parseInt(parts[1]); int limit = Integer.parseInt( itemMap.get(key) ); Map > dump = client.getStats( "cachedump " + slabNumber + " " + limit ); for ( Map dumpMap : dump.values() ) { allKeys.addAll( dumpMap.keySet() ); } } } } } return allKeys; } catch ( IllegalArgumentException e ) { return this.keys(); } } default Collection keys() { return this.keys( ArrayList.class ); } default Collection keySet() { return this.keys( LinkedHashSet.class ); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/redis/GenericRedisHashManipulator.java ================================================ package com.pinecone.slime.jelly.source.redis; import com.pinecone.slime.source.GenericResultConverter; import com.pinecone.slime.source.indexable.IndexableIterableManipulator; import com.pinecone.slime.source.indexable.IndexableTargetScopeMeta; import redis.clients.jedis.Jedis; import redis.clients.jedis.ScanParams; import redis.clients.jedis.ScanResult; import java.util.Iterator; import java.util.List; import java.util.Map; public class GenericRedisHashManipulator implements IndexableIterableManipulator { private final Jedis mJedis; public GenericRedisHashManipulator( Jedis jedis ) { this.mJedis = jedis; } private String getScopeKey( IndexableTargetScopeMeta meta, Object namespace ) { if ( namespace != null && !"".equals( namespace ) ) { return namespace.toString(); } else if ( meta.getIndexKey() != null && !meta.getIndexKey().isEmpty() ) { return meta.getIndexKey(); } else { throw new IllegalArgumentException( "Both namespace and meta's index key are empty." ); } } @Override public long counts( IndexableTargetScopeMeta meta, String szParentIndexKey ) { return this.mJedis.hlen( this.getScopeKey( meta, szParentIndexKey ) ); } @Override public long countsByNS( IndexableTargetScopeMeta meta, String szParentIndexKey, Object key) { String scopeKey = this.getScopeKey( meta, szParentIndexKey ); if ( this.mJedis.hexists( scopeKey, key.toString()) ) { return 1; } return 0; } @Override public long countsNS( IndexableTargetScopeMeta meta, String szNamespace ) { long fieldCount = 0; String cursor = ScanParams.SCAN_POINTER_START; ScanParams scanParams = new ScanParams().match( szNamespace + "*" ).count( 1000 ); do { ScanResult> scanResult = this.mJedis.hscan( meta.getIndexKey(), cursor, scanParams ); fieldCount += scanResult.getResult().size(); cursor = scanResult.getCursor(); } while (!cursor.equals(ScanParams.SCAN_POINTER_START)); return fieldCount; } @Override public List query( IndexableTargetScopeMeta meta, String szStatement ) { throw new UnsupportedOperationException( "Query method not supported for Redis Hash manipulator." ); } @Override public List queryVal( IndexableTargetScopeMeta meta, String szStatement ) { throw new UnsupportedOperationException( "QueryVal method not supported for Redis Hash manipulator." ); } @Override public Object selectAllByNS ( IndexableTargetScopeMeta meta, String szParentIndexKey, Object key ) { if( key == null ) { String scopeKey = this.getScopeKey( meta, szParentIndexKey ); return this.mJedis.hgetAll( scopeKey ); } else { return this.selectsByNS( meta, szParentIndexKey, key ); } } @Override public List selectsByNS( IndexableTargetScopeMeta meta, String szParentIndexKey, Object key ) { return List.of( this.selectByNS( meta, szParentIndexKey, key ) ); } @Override public V selectByNS( IndexableTargetScopeMeta meta, String szParentIndexKey, Object key ) { String scopeKey = this.getScopeKey( meta, szParentIndexKey ); if( meta.getResultConverter() == null ) { meta.setResultConverter( new GenericResultConverter<>( meta.getValueType(), meta.getValueMetaKeys() )); } Object val = this.mJedis.hget( scopeKey, key.toString() ); if( val == null ) { return null; } return meta.getResultConverter().convert( val ) ; } @Override public V selectByKey( IndexableTargetScopeMeta meta, Object key ) { return this.selectByNS( meta, meta.getIndexKey(), key ); } @Override public void insertByNS( IndexableTargetScopeMeta meta, String szParentIndexKey, K key, V entity ) { this.mJedis.hset( this.getScopeKey( meta, szParentIndexKey ), key.toString() , entity.toString() ); } @Override public void insert( IndexableTargetScopeMeta meta, K key, V entity ) { this.insertByNS( meta, meta.getIndexKey(), key, entity ); } @Override public void insert( IndexableTargetScopeMeta meta, K key, V entity, long expireMill ) { this.insertByNS( meta, meta.getIndexKey(), key, entity ); // Not supported. } @Override public void updateByNS( IndexableTargetScopeMeta meta, String szParentIndexKey, K key, V entity ) { this.insertByNS( meta, szParentIndexKey, key, entity ); } @Override public void update( IndexableTargetScopeMeta meta, K key, V entity ) { this.insert( meta, key, entity ); } @Override public void deleteByNS( IndexableTargetScopeMeta meta, String szParentIndexKey, Object key ) { this.mJedis.hdel( this.getScopeKey( meta, szParentIndexKey ), key.toString() ); } @Override public void deleteByKey( IndexableTargetScopeMeta meta, Object key ) { this.deleteByNS( meta, meta.getIndexKey(), key ); } @Override public void purge( IndexableTargetScopeMeta meta ) { this.purgeByNS( meta, meta.getIndexKey() ); } @Override public void purgeByNS( IndexableTargetScopeMeta meta, String szParentIndexKey ) { String ns = this.getScopeKey( meta, szParentIndexKey ); this.mJedis.del( ns ); } @Override public void commit() { // Redis operations are atomic, no explicit commit needed. } @Override @SuppressWarnings( "unchecked" ) public Iterator keysIterator( IndexableTargetScopeMeta meta ) { return (Iterator) new RedisKeysIterator( this.mJedis, "", new IteratorSourceAdapter() { @Override public ScanResult > scan( String cursor, ScanParams params ) { return GenericRedisHashManipulator.this.mJedis.hscan( meta.getIndexKey(), cursor, params ); } }); } @Override @SuppressWarnings( "unchecked" ) public Iterator > iterator( IndexableTargetScopeMeta meta ) { return (Iterator) new RedisEntryIterator( this.mJedis, "", new IteratorSourceAdapter() { @Override public ScanResult > scan( String cursor, ScanParams params ) { return GenericRedisHashManipulator.this.mJedis.hscan( meta.getIndexKey(), cursor, params ); } }); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/redis/GenericRedisMasterManipulator.java ================================================ package com.pinecone.slime.jelly.source.redis; import com.pinecone.framework.system.prototype.ObjectiveBean; import com.pinecone.framework.unit.KeyValue; import com.pinecone.slime.jelly.source.NamespacedKey; import com.pinecone.slime.source.GenericResultConverter; import com.pinecone.slime.source.indexable.IndexableIterableManipulator; import com.pinecone.slime.source.indexable.IndexableTargetScopeMeta; import redis.clients.jedis.Jedis; import redis.clients.jedis.ScanParams; import redis.clients.jedis.ScanResult; import redis.clients.jedis.exceptions.JedisException; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.LinkedHashMap; public class GenericRedisMasterManipulator implements IndexableIterableManipulator { private final Jedis mJedis; private final String mszNameSeparator; public GenericRedisMasterManipulator( Jedis jedis, String szSeparator ) { this.mJedis = jedis; this.mszNameSeparator = szSeparator; } public GenericRedisMasterManipulator( Jedis jedis ) { this( jedis, ":" ); } private String getFullKey( IndexableTargetScopeMeta meta, String szNamespace, Object key ) { return NamespacedKey.getFullKey( meta, this.mszNameSeparator, szNamespace, key ); } private String getKeyType( String key ) { try { return mJedis.type( key ); } catch ( JedisException e ) { // Handle exception (log, throw, etc.) return null; // Return null or throw exception to indicate failure } } @Override public long counts( IndexableTargetScopeMeta meta, String szScopeKey ) { if( szScopeKey == null || szScopeKey.isEmpty() ) { return this.mJedis.dbSize(); } try { String type = this.getKeyType( szScopeKey ); if ( "list".equals(type) ) { return this.mJedis.llen( szScopeKey ); } else if ( "set".equals(type) ) { return this.mJedis.scard( szScopeKey ); } else if ( "zset".equals(type) ) { return this.mJedis.zcard( szScopeKey ); } else if ( "hash".equals(type) ) { return this.mJedis.hlen( szScopeKey ); } else { throw new IllegalArgumentException( "Unsupported data type[ " + type + " ] for counts operation." ); } } catch ( JedisException e ) { return -1; } } @Override public long countsByNS( IndexableTargetScopeMeta meta, String szNamespace, Object key ) { String scopeKey = this.getFullKey( meta, szNamespace, key ); if( this.mJedis.exists( scopeKey ) ){ return 1; } return 0; } @Override public long countsNS( IndexableTargetScopeMeta meta, String szNamespace ) { long count = 0; String cursor = ScanParams.SCAN_POINTER_START; ScanParams scanParams = new ScanParams().match( szNamespace + "*" ).count( 1000 ); do { ScanResult scanResult = this.mJedis.scan( cursor, scanParams ); count += scanResult.getResult().size(); cursor = scanResult.getCursor(); } while ( !cursor.equals(ScanParams.SCAN_POINTER_START) ); return count; } @Override public List query( IndexableTargetScopeMeta meta, String szStatement ) { throw new UnsupportedOperationException("Query method not supported for GenericRedisMasterManipulator."); } @Override public List queryVal( IndexableTargetScopeMeta meta, String szStatement ) { throw new UnsupportedOperationException("QueryVal method not supported for GenericRedisMasterManipulator."); } protected Object selectElementByKey( IndexableTargetScopeMeta meta, Object key ) { String szKey = key.toString(); try { String type = this.getKeyType( szKey ); if ( "string".equals( type ) ) { String value = this.mJedis.get( szKey ); if ( value == null ) { return null; } return value; } else if ( "hash".equals( type ) ) { Map map = this.mJedis.hgetAll( szKey ); if ( map == null ) { return null; } return map; } else if ( "list".equals( type ) ) { List list = this.mJedis.lrange( szKey, 0, -1 ); if ( list == null || list.isEmpty() ) { return null; } return list; } else if ( "set".equals( type ) ) { Set set = this.mJedis.smembers( szKey ); if ( set == null || set.isEmpty() ) { return null; } return set; } else if ( "zset".equals( type ) ) { Set zset = this.mJedis.zrange( szKey, 0, -1 ); if ( zset == null || zset.isEmpty() ) { return null; } return zset; } else if ( "none".equals( type ) ) { return null; } else { throw new IllegalArgumentException( "Unsupported data type[" + type + "] for selectByNS operation." ); } } catch ( JedisException | ClassCastException e ) { // Handle exceptions (log, throw, etc.) return null; } } @Override public Object selectAllByNS( IndexableTargetScopeMeta meta, String szNamespace, Object key ) { if( key != null && szNamespace != null ) { return this.selectElementByKey( meta, this.getFullKey( meta, szNamespace, key ) ); } if( szNamespace == null ) { szNamespace = ""; } String cursor = ScanParams.SCAN_POINTER_START; ScanParams scanParams = new ScanParams().match( szNamespace + "*" ).count( 1000 ); Map map = new LinkedHashMap<>(); do { ScanResult scanResult = this.mJedis.scan( cursor, scanParams ); for( String k : scanResult.getResult() ) { map.put( k, this.selectElementByKey( meta, this.getFullKey( meta, szNamespace, k ) ) ); } cursor = scanResult.getCursor(); } while ( !cursor.equals(ScanParams.SCAN_POINTER_START) ); return map; } @Override public List selectsByNS( IndexableTargetScopeMeta meta, String szNamespace, Object key ) { return List.of( this.selectByNS( meta, szNamespace, key ) ); } @Override public V selectByNS( IndexableTargetScopeMeta meta, String szNamespace, Object key ) { return selectByKey( meta, this.getFullKey( meta, szNamespace, key ) ); } @Override public V selectByKey( IndexableTargetScopeMeta meta, Object key ) { if ( meta.getResultConverter() == null ) { meta.setResultConverter( new GenericResultConverter<>( meta.getValueType(), meta.getValueMetaKeys() ) ); } return meta.getResultConverter().convert( this.selectElementByKey( meta, key ) ); } protected void insert0( IndexableTargetScopeMeta meta, String szKey, V entity, long expireMill ) { try { if( entity instanceof String ) { this.mJedis.set( szKey, (String)entity ); } else if( entity instanceof Map ) { Map map = (Map) entity; if( map.get( szKey ) instanceof String ) { this.mJedis.hset( szKey, (Map) map ); // Check once. } else { for( Map.Entry kv : map.entrySet() ) { this.mJedis.hset( szKey, kv.getKey().toString(), kv.getValue().toString() ); } } } else if( entity instanceof List ) { List list = (List) entity; int i = 0; for( Object e : list ) { this.mJedis.lset( szKey, i, e.toString() ); ++i; } } else if( entity instanceof Set) { Set list = (Set) entity; for( Object e : list ) { this.mJedis.sadd( szKey, e.toString() ); } } else if( entity != null ){ ObjectiveBean bean = new ObjectiveBean( entity ); String[] keys = bean.keys(); for( String k : keys ) { this.mJedis.hset( szKey, k, bean.get( k ).toString() ); } } if( expireMill > 0 ) { this.mJedis.pexpire( szKey, expireMill ); } } catch ( JedisException | ClassCastException e ) { // Handle exceptions (log, throw, etc.) } } protected void insert0( IndexableTargetScopeMeta meta, String szKey, V entity ) { this.insert0( meta, szKey, entity, -1 ); } @Override public void insertByNS( IndexableTargetScopeMeta meta, String szNamespace, K key, V entity ) { String scopeKey = this.getFullKey( meta, szNamespace, key ); this.insert0( meta, scopeKey, entity ); } @Override public void insert( IndexableTargetScopeMeta meta, K key, V entity ) { this.insert0( meta, key.toString(), entity ); } @Override public void insert( IndexableTargetScopeMeta meta, K key, V entity, long expireMill ) { this.insert0( meta, key.toString(), entity, expireMill ); } @Override public void updateByNS( IndexableTargetScopeMeta meta, String szNamespace, K key, V entity ) { this.insertByNS( meta, szNamespace, key, entity ); } @Override public void update( IndexableTargetScopeMeta meta, K key, V entity ) { this.insert( meta, key, entity ); } @Override public void deleteByNS( IndexableTargetScopeMeta meta, String szNamespace, Object key ) { String scopeKey = this.getFullKey( meta, szNamespace,key ); this.mJedis.unlink( scopeKey ); } @Override public void deleteByKey( IndexableTargetScopeMeta meta, Object key ) { this.mJedis.unlink( key.toString() ); } @Override public void purge( IndexableTargetScopeMeta meta ) { this.purgeByNS( meta, meta.getScopeNS() ); } @Override public void purgeByNS( IndexableTargetScopeMeta meta, String szNamespace ) { this.mJedis.select( Integer.parseInt( szNamespace ) ); this.mJedis.flushDB(); } @Override public void commit() { // Redis operations are atomic, no explicit commit needed. } @Override @SuppressWarnings( "unchecked" ) public Iterator keysIterator( IndexableTargetScopeMeta meta ) { return (Iterator) new RedisKeysIterator( this.mJedis, "", new IteratorSourceAdapter() { @Override public ScanResult scan( String cursor, ScanParams params ) { return GenericRedisMasterManipulator.this.mJedis.scan( cursor, params ); } }); } @Override @SuppressWarnings( "unchecked" ) public Iterator iterator( IndexableTargetScopeMeta meta ) { return new EntryIterator( meta ); } protected final class EntryIterator implements Iterator { Iterator keyIterator; IndexableTargetScopeMeta meta; EntryIterator( IndexableTargetScopeMeta meta ) { this.meta = meta; this.keyIterator = GenericRedisMasterManipulator.this.keysIterator( meta ); } @Override public final boolean hasNext() { return this.keyIterator.hasNext(); } @Override public final Map.Entry next() { K k = this.keyIterator.next(); // WARNING, Unchecked. return new KeyValue<>( k, GenericRedisMasterManipulator.this.selectElementByKey( this.meta, k ) ); } } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/redis/IteratorSourceAdapter.java ================================================ package com.pinecone.slime.jelly.source.redis; import com.pinecone.framework.system.prototype.Pinenut; import redis.clients.jedis.ScanParams; import redis.clients.jedis.ScanResult; public interface IteratorSourceAdapter extends Pinenut { ScanResult scan( String cursor, ScanParams params ); } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/redis/RedisEntryIterator.java ================================================ package com.pinecone.slime.jelly.source.redis; import redis.clients.jedis.Jedis; import java.util.Map; public class RedisEntryIterator extends RedisIterator { public RedisEntryIterator( Jedis jedis, String namespace, int batchSize, IteratorSourceAdapter adapter ) { super( jedis, namespace, batchSize, adapter ); } public RedisEntryIterator( Jedis jedis, String namespace, IteratorSourceAdapter adapter ) { this( jedis, namespace, 1000, adapter ); } @Override @SuppressWarnings( "unchecked" ) public Map.Entry next() { return (Map.Entry) super.next(); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/redis/RedisIterator.java ================================================ package com.pinecone.slime.jelly.source.redis; import redis.clients.jedis.Jedis; import redis.clients.jedis.ScanParams; import redis.clients.jedis.ScanResult; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; public class RedisIterator implements Iterator { private final Jedis mJedis; private final ScanParams mScanParams; private String mCursor; private List mCurrentBatch; private int mCurrentIndex; private IteratorSourceAdapter mSourceAdapter; public RedisIterator( Jedis jedis, String namespace, int batchSize, IteratorSourceAdapter adapter ) { this.mJedis = jedis; this.mScanParams = new ScanParams().match( namespace + "*" ).count( batchSize ); this.mCursor = ScanParams.SCAN_POINTER_START; this.mCurrentBatch = null; this.mCurrentIndex = 0; this.mSourceAdapter = adapter; this.fetchNextBatch(); } public RedisIterator(Jedis jedis, String namespace, IteratorSourceAdapter adapter ) { this( jedis, namespace, 1000, adapter ); } private void fetchNextBatch() { ScanResult scanResult = this.mSourceAdapter.scan( this.mCursor, this.mScanParams ); this.mCurrentBatch = scanResult.getResult(); this.mCursor = scanResult.getCursor(); this.mCurrentIndex = 0; } @Override public boolean hasNext() { if ( this.mCurrentBatch == null || this.mCurrentIndex >= this.mCurrentBatch.size() ) { if ( this.mCursor.equals( ScanParams.SCAN_POINTER_START ) ) { return false; } this.fetchNextBatch(); } return this.mCurrentIndex < this.mCurrentBatch.size(); } @Override public Object next() { if ( !this.hasNext() ) { throw new NoSuchElementException(); } return this.mCurrentBatch.get( this.mCurrentIndex++ ); } } ================================================ FILE: Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/redis/RedisKeysIterator.java ================================================ package com.pinecone.slime.jelly.source.redis; import redis.clients.jedis.Jedis; import java.util.Map; public class RedisKeysIterator extends RedisIterator { public RedisKeysIterator( Jedis jedis, String namespace, int batchSize, IteratorSourceAdapter adapter ) { super( jedis, namespace, batchSize, adapter ); } public RedisKeysIterator( Jedis jedis, String namespace, IteratorSourceAdapter adapter ) { this( jedis, namespace, 1000, adapter ); } @Override public String next() { Object e = super.next(); if( e instanceof String ) { return ( String ) e; } else { Map.Entry entry = (Map.Entry) e; return (String) entry.getKey(); } } } ================================================ FILE: Pinecones/Jelly/src/test/java/com/TestJelly.java ================================================ package com; import com.pinecone.Pinecone; public class TestJelly { public static void main( String[] args ) throws Exception { //String szJson = FileUtils.readAll("J:/120KWordsPhonetics.json5"); Pinecone.init( (Object...cfg )->{ return 0; }, (Object[]) args ); } } ================================================ FILE: Pinecones/Pinecone/pom.xml ================================================ pinecones com.pinecones 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.pinecone pinecone 2.5.1 jar 11 11 UTF-8 ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/PineTrial.java ================================================ package com.pinecone; import java.math.BigDecimal; import com.pinecone.framework.util.Debug; //import opennlp.tools.ml.maxent.DataStream; //import org.glassfish.jersey.server.internal.scanning.FilesScanner; //import sun.misc.FloatingDecimal; //import sun.nio.ch.WindowsSelectorImpl class SS implements Runnable { public int i = 0; @Override public void run() { for ( int j = 0; j < 1e4; j++ ) { //++i; Debug.trace( Thread.currentThread().getName(), j ); } } } public class PineTrial { public static boolean test(int n){ if (n<2){ return false; } int z = (int)Math.sqrt(n); for (int i = 2; i <= z; i++) { if (n%i == 0){ return false; } } return true; } public static Integer pre(int n) { int temp = 0; while (n > 0) { temp = temp * 10 + (n % 10); n = n / 10; } return temp; } public static void main( String[] args ) throws Exception { //String szJson = FileUtils.readAll("J:/120KWordsPhonetics.json5"); Pinecone.init( (Object...cfg )->{ // for (int i = 11; i < 100_000_000_0; i++) { // if (i == pre(i) && test(i) ){ // System.out.println(i); // } // } // MySQLExecutor mysql = new MySQLExecutor( new MySQLHost( "localhost/predator", "root", "test", "UTF-8" ) ); //JSONArray tables = mysql.fetch( "SELECT tM.en_word, tM.coca_rank FROM( SELECT tW.en_word, tF.coca_rank FROM predator_mutual_words AS tW LEFT JOIN predator_mutual_words_frequency AS tF ON tW.en_word = tF.en_word WHERE LENGTH(tW.en_word) = 3 ) AS tM WHERE tM.coca_rank IS NOT NULL AND tM.coca_rank <= 20000 AND tM.coca_rank != 0 ORDER BY tM.coca_rank;" ); // JSONArray tables = mysql.fetch( "SELECT tM.en_word, tM.coca_rank FROM( SELECT tW.en_word, tF.coca_rank FROM predator_mutual_words AS tW LEFT JOIN predator_mutual_words_frequency AS tF ON tW.en_word = tF.en_word WHERE LENGTH(tW.en_word) >= 3 AND LENGTH(tW.en_word) <= 5 ) AS tM WHERE tM.coca_rank IS NOT NULL AND tM.coca_rank <= 15000 AND tM.coca_rank != 0 ORDER BY tM.coca_rank;" ); // // JSONArray words = new JSONArraytron(); // for ( int i = 0; i < tables.size(); i++ ) { // String szWord = tables.optJSONObject(i).optString( "en_word" ); // //if( szWord.charAt(0) >= 'a' ) { // words.put( szWord ); // //} // } // // Debug.trace( words ); // SS runnable = new SS(); // // Thread t1 = new Thread( runnable ); // Thread t2 = new Thread( runnable ); // // // t1.start(); // // t2.start(); // // Thread.sleep( 100 ); // // Debug.trace( runnable.i ); Debug.redfs((new BigDecimal("8031.12")) .multiply(new BigDecimal(1024)) .multiply(new BigDecimal(1024)) .multiply(new BigDecimal(1024)) .multiply(new BigDecimal(1024)) .multiply(new BigDecimal(1024)) .longValue()); // Debug.trace( ( (Framework)Pinecone.sys().getTaskManager().summon( // Framework.class.getName(), // new Class[]{ String[].class, PrimeSystem.class }, // (Object[]) new String[0], Pinecone.sys() // ) ).getName() ); // ReentrantLock lock = new ReentrantLock(); // Runnable runnable = new Runnable() { // @Override // public void run() { //// for ( int i = 0; i < 1e6; i++ ) { //// lock.lock(); //// Debug.trace( i ); //// lock.unlock(); //// } // Thread thread2 = new Thread(()->{ // //ThreadGroup parentThreadGroup = Thread.currentThread().getThreadGroup().getParent(); // // Debug.trace( Thread.currentThread().getId(), parentThreadGroup. ) // Thread.currentThread().getThreadGroup().list(); // // }); // thread2.start(); // } // }; // // Thread thread1 = new Thread(runnable); // thread1.start(); // // Thread thread2 = new Thread(runnable); // thread2.start(); // LinkedTreeMap linkedTreeMap = new LinkedTreeMap<>(); // LinkedHashSet linkedTreeSet = new LinkedHashSet<>(); // for ( int i = 0; i < 1e6; i++ ) { // int j = new Random().nextInt((int)1e6); // linkedTreeMap.put( j, i ); // linkedTreeSet.add(j); // } // // // //// for ( Integer i : linkedTreeSet ) { //// linkedTreeMap.remove(i); //// } // Integer[] arr = linkedTreeSet.toArray( new Integer[0] ); // // int len = linkedTreeMap.size(); // for ( int i = 0; i < len -20; i++ ) { // //linkedTreeMap.remove( arr[i] ); // linkedTreeMap.removeFirst(); // } // // // int i = 0; // for ( Object kv : linkedTreeMap.entrySet() ) { // ++i; // } // // Debug.trace( linkedTreeMap.size(), i, linkedTreeMap ); // Thread.sleep( 100000 ); // for ( Map.Entry kv : treeMap ) { // Debug.trace( kv ); // } // Debug.trace( JSON.parse( FileUtils.readAll("E:\\MyFiles\\CodeScript\\Project\\Hazelnut\\Sauron\\Saurons\\system\\setup\\PubChem.json5") ) ); // String packageName = "Predator.Wizard.Public.undefined"; // // List classNames = getClassName(packageName); // List classNames = PackageUtils.fetchClassName( packageName ); // if (classNames != null) { // for ( String className : classNames ) { // className = className.substring( className.indexOf(packageName) ); // Class pVoid = Thread.currentThread().getContextClassLoader().loadClass( className ); // Debug.trace(pVoid.getAnnotations()); // } // } // Debug.trace( system.getProperty("user.dir") ); // // HostMatrix illuminationSystem = new HostMatrix("E:/MyFiles/CodeScript/Project/Hazelnut/Predator/Predator/src/Resources/","config.json5"); // // //Debug.trace( illuminationSystem.getSystemConfig() ); // String szJson = FileUtils.readAll("J:/120KWordsPhonetics.json5"); // JSONObject jsonShit = new JSONMaptron(szJson); ///predator_en_w_etymon_derived_linguae /*MySQLExecutor mysql = new MySQLExecutor( new MySQLHost( "localhost/predator", "root", "test", "UTF-8" ) ); JSONArray tables = mysql.fetch( "SELECT * FROM predator_en_w_etymon_derived_linguae" ); for ( Object obj : tables) { JSONObject row = (JSONObject)obj; row.put( "nation", new JSONArraytron() ); } FileWriter fileWriter = new FileWriter( "M:/etymon_derived_linguae.json" ); tables.write( fileWriter ); fileWriter.close();*/ /* MySQLExecutor mysql = new MySQLExecutor( new MySQLHost( "localhost/predator", "root", "test", "UTF-8" ) ); JSONArray tables = mysql.fetch( "SELECT * FROM predator_mutual_words_frequency" ); JSONObject jMap = new JSONMaptron(); for ( Object obj : tables) { JSONObject row = (JSONObject)obj; String szWord = row.optString( "en_word" ); jMap.affirmArray( szWord ).put( row ); } FileWriter fileWriter = new FileWriter( "M:/dv/mutual_words_frequency.json" ); tables.write( fileWriter ); fileWriter.close();*/ // ArrayList arrayList = new ArrayList<>(); // for ( int i = 0; i < 1e7; i++ ) { // arrayList.add( new String( new char[4] ) ); // } // // system.out.println( arrayList.size() ); // // system.gc(); // system.gc(); // system.gc(); // system.gc(); // // long nMem = (long)( (double)1 * 1024 * 1024 * 1024 ); // byte[][] magnChars = new byte[8][]; // magnChars[0] = new byte[ (int) nMem ]; // magnChars[1] = new byte[ (int) nMem ]; // magnChars[2] = new byte[ (int) nMem ]; // magnChars[3] = new byte[ (int) nMem ]; // // // Debug.trace( nMem,"Done", framework.getRunTime() ); // // Thread.sleep(1000000); return 0; }, (Object[]) args ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/Pinecone.java ================================================ package com.pinecone; import com.pinecone.framework.system.Framework; import com.pinecone.framework.system.functions.Function; import com.pinecone.framework.util.io.Tracer; import java.io.InputStream; import java.io.PrintStream; /** * Pinecone Framework For Java (Bean Nuts Pinecone Ursus for Java) * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * Open Source licensed under the GPL. * ***************************************************************************************** * Other information about this framework, such as papers, patents, etc -> http://www.rednest.cn * Warning: This source code is protected by copyright law and international treaties. * ***************************************************************************************** * www.nutgit.com/ www.xbean.net / www.rednest.cn * Include Almond, C/CPP, JAVA, PHP, Python, JavaScript, ActionScript, GoLang * ***************************************************************************************** * ;) Hope you enjoy this | Dragon King, the undefined */ public class Pinecone { public static final long VER_PINE = 202506L; public static final String VERSION = "2.5.1"; public static final String RELEASE_DATE = "2025/06/06"; public static final String ROOT_SERVER = "http://www.rednest.cn/"; public static final String MY_PROGRAM_NAME = "Pinecone"; public static final String CONTACT_INFO = "E-Mail:arb#rednest.cn"; public static final boolean S_DEBUG_MODE = true; public static final int FLOAT_ACCURACY = 32; public static final int COMMON_ACCURACY_LIMIT = 10000; public static final Framework PRIME_SYSTEM = new Framework(); public static int init ( Function fnInlet, Object...args ) throws Exception { return Pinecone.PRIME_SYSTEM.init( fnInlet, args ); } public static Framework sys(){ return Pinecone.PRIME_SYSTEM; } public static Tracer console() { return Pinecone.sys().console(); } public static PrintStream out() { return Pinecone.console().getOut(); } public static PrintStream err() { return Pinecone.console().getOut(); } public static InputStream in() { return Pinecone.sys().in(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/lang/NamedInheritableThreadLocal.java ================================================ package com.pinecone.framework.lang; import com.pinecone.framework.util.Assert; public class NamedInheritableThreadLocal extends InheritableThreadLocal { private final String name; public NamedInheritableThreadLocal( String name ) { Assert.hasText( name, "Name must not be empty" ); this.name = name; } @Override public String toString() { return this.name; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/lang/NamedThreadLocal.java ================================================ package com.pinecone.framework.lang; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.Assert; public class NamedThreadLocal extends ThreadLocal implements Pinenut { private final String name; public NamedThreadLocal( String name ) { Assert.hasText(name, "Name must not be empty"); this.name = name; } @Override public String toString() { return this.name; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/lang/field/DataStructureEntity.java ================================================ package com.pinecone.framework.lang.field; public interface DataStructureEntity extends SegmentEntity { String StructureNameKey = "__NAME__"; int getStartOffset(); int getTextOffset(); int getDataOffset(); void setTextOffset( int offset ); void setDataOffset( int offset ); boolean isEmpty(); int size(); int capacity(); void resize( int newSize ); FieldEntity[] getFields(); FieldEntity[] getSegments(); void setTextField( int index, FieldEntity field ) ; void setDataField( int index, FieldEntity field ) ; void setTextField( int index, String key, Object val ); void setDataField( int index, String key, Object val ); void setDataField( int index, String key, Object val, String genericLabel ); void setTextField( int index, String key, Class type ); void setDataField( int index, String key, Class type ); void setDataField( int index, String key, Class type, String genericLabel ); FieldEntity getTextField( int index ); FieldEntity getDataField( int index ); FieldEntity findTextField( String key ); FieldEntity findDataField( String key ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/lang/field/FieldEntity.java ================================================ package com.pinecone.framework.lang.field; import java.util.Arrays; import java.util.Map; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.ReflectionUtils; public interface FieldEntity extends Pinenut { String getName(); Class getType(); Object getValue(); String getGenericTypeLabel(); default String[] getGenericTypeNames() { return ReflectionUtils.extractGenericClassNames( this.getGenericTypeLabel() ); } void applyGenericTypeLabel( String genericTypeLabel ); default boolean hasDeclaredGenericType() { return this.getGenericTypeLabel() != null && this.getGenericTypeLabel().contains( "<" ) && this.getGenericTypeLabel().contains( ">" ); } void setValue( Object value ); static FieldEntity[] typeFrom( Map map ) { FieldEntity[] entities = new FieldEntity[ map.size() ]; int i = 0; for( Object em : map.entrySet() ) { Map.Entry kv = (Map.Entry) em; entities[ i ] = new GenericFieldEntity( kv.getKey().toString(), kv.getValue().getClass() ); ++i; } return entities; } static FieldEntity[] from( Map map ) { FieldEntity[] entities = new FieldEntity[ map.size() ]; int i = 0; for( Object em : map.entrySet() ) { Map.Entry kv = (Map.Entry) em; entities[ i ] = new GenericFieldEntity( kv.getKey().toString(), kv.getValue() ); ++i; } return entities; } static FieldEntity[] from( Class[] parameters ) { FieldEntity[] entities = new FieldEntity[ parameters.length ]; int i = 0; for( Class parameter : parameters ) { entities[ i ] = new GenericFieldEntity( parameter.getName().replace( ".", "_" ) + "_" + i, parameter.getComponentType() ); ++i; } return entities; } default FieldEntity[] copy( FieldEntity[] that ) { return Arrays.copyOf( that, that.length ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/lang/field/GenericFieldEntity.java ================================================ package com.pinecone.framework.lang.field; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.json.JSON; public class GenericFieldEntity implements FieldEntity { protected String mszName; protected Class mType; protected String mszGenericTypeLabel; protected Object mValue; public GenericFieldEntity( String szName, Object value, Class type, String genericTypeLabel ) { this.mszName = szName; this.mType = type; this.mValue = value; this.mszGenericTypeLabel = genericTypeLabel; } public GenericFieldEntity( String szName, Object value, Class type ) { this( szName, value, type, null ); } public GenericFieldEntity( String szName, Object value ) { this( szName, value, value.getClass() ); } public GenericFieldEntity( String szName, Class type ) { this( szName, null, type ); } @Override public String getName() { return this.mszName; } @Override public Class getType() { return this.mType; } @Override public String getGenericTypeLabel() { return this.mszGenericTypeLabel; } @Override public void applyGenericTypeLabel( String genericTypeLabel ) { this.mszGenericTypeLabel = genericTypeLabel; } @Override public Object getValue() { return this.mValue; } @Override public void setValue( Object value ) { this.mValue = value; } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return "{" + StringUtils.jsonQuote( this.mszName.toString() ) + ":" + JSON.stringify( this.mValue ) + "}"; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/lang/field/GenericStructure.java ================================================ package com.pinecone.framework.lang.field; import java.util.Arrays; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.json.JSON; public class GenericStructure implements DataStructureEntity { protected FieldEntity[] mSegments; protected int mnTextOffset; protected int mnDataOffset; public GenericStructure( String szName, int nTextOffset, int nDataOffset ,int nElements ) { if ( nDataOffset < 1 || nTextOffset >= nDataOffset ) { throw new IllegalArgumentException( "DataOffset must be greater than 1." ); } this.mSegments = new FieldEntity[ nDataOffset - nTextOffset + nElements ]; this.mSegments[ 0 ] = new GenericFieldEntity( DataStructureEntity.StructureNameKey, szName, String.class ); this.mnTextOffset = nTextOffset; this.mnDataOffset = nDataOffset; } public GenericStructure( String szName ,int nElements ) { this( szName, 0, 1 , nElements ); } public GenericStructure( FieldEntity[] segments, int nTextOffset, int nDataOffset ) { this.mSegments = segments; this.mnTextOffset = nTextOffset; this.mnDataOffset = nDataOffset; } @Override public String getName() { return (String) this.mSegments[ 0 ].getValue(); } @Override public String getSimpleName() { String sz = this.getName(); String[] debris = sz.split( "\\.|\\/" ); if( debris.length > 1 ) { return debris [ 1 ]; } return sz; } @Override public int getStartOffset() { return 0; } @Override public int getTextOffset() { return this.mnTextOffset; } @Override public int getDataOffset() { return this.mnDataOffset; } @Override public void setTextOffset( int offset ) { if( offset < 0 ) { return; } if ( offset > this.mnTextOffset ) { int legacySize = this.mSegments.length - this.mnTextOffset; this.resize( offset + this.mSegments.length ); System.arraycopy( this.mSegments, this.mnTextOffset, this.mSegments, offset, legacySize ); for( int i = 0; i < offset; i++ ){ this.mSegments[ i ] = null; } } if( offset < this.mnTextOffset ) { int length = this.mnDataOffset - this.mnTextOffset; System.arraycopy( this.mSegments, this.mnTextOffset, this.mSegments, offset, length ); for( int i = offset + length; i < this.mnDataOffset; i++ ){ this.mSegments[ i ] = null; } } this.mnDataOffset = offset - this.mnTextOffset + this.mnDataOffset; this.mnTextOffset = offset; } @Override public void setDataOffset( int offset ) { if( offset <= 1 ) { return; } if ( offset > this.mnDataOffset ) { this.resize( this.size() + offset - this.mnDataOffset ); System.arraycopy( this.mSegments, this.mnDataOffset, this.mSegments, offset, this.mSegments.length - offset ); for ( int i = this.mnDataOffset ; i < offset; ++i ) { this.mSegments[ i ] = null; } } if( offset < this.mnDataOffset ){ this.trimResize( this.mSegments.length - ( this.mnDataOffset - offset ), offset ); } this.mnDataOffset = offset; } @Override public boolean isEmpty() { return this.mnDataOffset <= 0; } @Override public int size() { return this.mSegments.length - this.mnDataOffset; } @Override public int capacity() { return this.mSegments.length; } @Override public void resize( int newSize ) { if ( newSize + this.mnDataOffset <= this.mSegments.length ) { throw new IllegalArgumentException( "New size must be greater than current size." ); } FieldEntity[] newSegments = new FieldEntity[ newSize + this.mnDataOffset ]; System.arraycopy( this.mSegments, 0, newSegments, 0, this.mSegments.length ); this.mSegments = newSegments; } @Override public FieldEntity[] getFields() { return Arrays.copyOfRange( this.mSegments, this.mnDataOffset, this.mSegments.length ); } @Override public FieldEntity[] getSegments() { return this.mSegments; } @Override public void setTextField( int index, FieldEntity field ) { if ( index < this.mnTextOffset || index >= this.mnDataOffset ) { throw new IndexOutOfBoundsException( "Text segment index out of bounds." ); } this.mSegments[ this.mnTextOffset + index ] = field; } @Override public void setDataField( int index, FieldEntity field ) { int dataEnd = this.mSegments.length; if ( index >= dataEnd - this.mnDataOffset ) { throw new IndexOutOfBoundsException( "Data segment index out of bounds." ); } this.mSegments[ this.mnDataOffset + index ] = field; } @Override public void setTextField( int index, String key, Object val ) { FieldEntity legacy = this.getTextField( index ); FieldEntity neo = null; if( legacy != null ) { if( legacy.getName().equals( key ) ) { legacy.setValue( val ); return; } } neo = new GenericFieldEntity( key, val ); this.setTextField( index, neo ); } @Override public void setDataField( int index, String key, Object val ) { this.setDataField( index, key, val, null ); } @Override public void setDataField( int index, String key, Object val, String genericLabel ) { FieldEntity legacy = this.getDataField( index ); FieldEntity neo = null; if( legacy != null ) { if( key.equals( legacy.getName() ) ) { legacy.setValue( val ); return; } } if ( genericLabel == null ) { neo = new GenericFieldEntity( key, val ); } else { neo = new GenericFieldEntity( key, val, val.getClass(), genericLabel ); } this.setDataField( index, neo ); } @Override public void setTextField( int index, String key, Class type ) { this.setTextField( index, new GenericFieldEntity( key, type ) ); } @Override public void setDataField( int index, String key, Class type ) { this.setDataField( index, new GenericFieldEntity( key, type ) ); } @Override public void setDataField( int index, String key, Class type, String genericLabel ) { this.setDataField( index, new GenericFieldEntity( key, null, type, genericLabel ) ); } @Override public FieldEntity getDataField( int index ) { return this.mSegments[ this.mnDataOffset + index ]; } @Override public FieldEntity getTextField( int index ) { return this.mSegments[ this.mnTextOffset + index ]; } @Override public FieldEntity findTextField( String key ) { return this.findField( key, this.mnTextOffset ); } @Override public FieldEntity findDataField( String key ) { return this.findField( key, this.mnDataOffset ); } protected FieldEntity findField( String key, int offset ) { for ( int i = offset; i < this.mSegments.length; ++i ) { FieldEntity entity = this.mSegments[ i ]; if( entity.getName() == (Object)key ) { return entity; } if( entity.getName() != null && entity.getName().equals( key ) ) { return entity; } } return null; } @Override public String toJSONString() { StringBuilder sb = new StringBuilder(); sb.append( '{' ); for( int i = this.mnDataOffset; i < this.mSegments.length; ++i ) { FieldEntity entity = this.mSegments[ i ]; if( entity != null ) { sb.append( StringUtils.jsonQuote( entity.getName() ) ); sb.append( ':' ); sb.append( JSON.stringify( entity.getValue() ) ); sb.append( ',' ); } } if( sb.charAt( sb.length() - 1 ) == ',' ) { sb.deleteCharAt( sb.length() - 1 ); } sb.append( '}' ); return sb.toString(); } protected void trimResize( int newSize, int newDataOffset ){ FieldEntity[] newSegments = new FieldEntity[newSize]; System.arraycopy( this.mSegments, this.mnDataOffset, newSegments, newDataOffset, this.mSegments.length - this.mnDataOffset ); System.arraycopy( this.mSegments, this.mnTextOffset, newSegments, this.mnTextOffset, newDataOffset - this.mnTextOffset ); this.mSegments = newSegments; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/lang/field/SegmentEntity.java ================================================ package com.pinecone.framework.lang.field; import com.pinecone.framework.system.prototype.Pinenut; public interface SegmentEntity extends Pinenut { String getName(); String getSimpleName(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/ApoptosisRejectSignalException.java ================================================ package com.pinecone.framework.system; public class ApoptosisRejectSignalException extends PineRuntimeException { public ApoptosisRejectSignalException() { super(); } public ApoptosisRejectSignalException( String message ) { super( message ); } public ApoptosisRejectSignalException( String message, Throwable cause ) { super( message, cause ); } public ApoptosisRejectSignalException( Throwable cause ) { super(cause); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/AssertionRuntimeException.java ================================================ package com.pinecone.framework.system; public class AssertionRuntimeException extends PineRuntimeException { public AssertionRuntimeException() { super(); } public AssertionRuntimeException( String message ) { super( message ); } public AssertionRuntimeException( String message, Throwable cause ) { super( message, cause ); } public AssertionRuntimeException( Throwable cause ) { super(cause); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/AsynSystem.java ================================================ package com.pinecone.framework.system; public interface AsynSystem extends RuntimeSystem { void handleAsynLiveException( Exception e ) throws ProvokeHandleException; void handleAsynKillException( Exception e ) throws ProvokeHandleException; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/CascadeSystem.java ================================================ package com.pinecone.framework.system; public interface CascadeSystem extends RuntimeSystem { CascadeSystem rootSystem(); CascadeSystem getParent(); default long getPrimaryId() { return 0; } default boolean isPrimarySystem() { return this.getPrimaryId() == this.getSystemId(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/ConformitySystem.java ================================================ package com.pinecone.framework.system; import com.pinecone.framework.util.config.SysConfigson; public interface ConformitySystem extends RuntimeSystem { SysConfigson getGlobalConfig() ; SysConfigson getSystemConfig() ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/ErrorStrings.java ================================================ package com.pinecone.framework.system; public abstract class ErrorStrings { public static final String E_IRREDEEMABLE_NO_PATH_CONTEXT_MATCHED = "Compromised attempts, Included path and its parent context are all invalid."; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/Experimental.java ================================================ package com.pinecone.framework.system; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Target({ElementType.TYPE, ElementType.METHOD}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface Experimental { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/Framework.java ================================================ package com.pinecone.framework.system; import com.pinecone.framework.system.executum.ArchProcessum; import com.pinecone.framework.system.functions.Function; import com.pinecone.Pinecone; import com.pinecone.framework.util.config.JSONSystemConfig; import com.pinecone.framework.util.config.StartupCommandParser; import com.pinecone.framework.util.io.Tracer; import com.pinecone.framework.util.io.Tracerson; import com.pinecone.framework.util.json.JSONException; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintStream; import java.nio.file.Path; import java.util.Map; import java.util.Set; public class Framework extends ArchProcessum implements Pinecore { public static final String DEFAULT_MAIN_CONFIG_FILE_NAME = "config"; // System properties protected JSONSystemConfig mjoGlobalConfig ; protected JSONSystemConfig mjoSystemConfig ; // Startup & Environment properties protected String mszMajorPackagePath ; // The path of this class file. protected String mszRuntimeContextPath ; // System startup command, the 'user.dir'. protected String mszRuntimePath ; // System real runtime path. protected Map mStartupCommandMap ; protected Map mEnvironmentVars ; protected String[] mStartupCommand ; protected Thread mMainThread ; protected InputStream mIn = System.in ; protected OutputStream mOut = System.out ; protected Tracer mConsole = new Tracerson(); private long mnBootTime ; private Function mfnAfterGlobalExpCaught = new Function() { @Override public Object invoke( Object... obj ) throws Exception { Framework.this.console().cerr( "Unhandled exception in \"" + Framework.this.getAffiliateThread().getName() + "\" : \n" ); ( ( Throwable ) obj[0] ).printStackTrace(); return null; } }; private ClassLoader mGlobalClassLoader ; protected void setStartupCommand( String[] args ) { if( args == null ) { args = new String[0]; } this.mStartupCommand = args; this.mStartupCommandMap = StartupCommandParser.DefaultParser.parse( args ); } protected void dispatchStartupCommand() { } protected Thread searchMainThread() { Set all = this.fetchAllProcessThreads(); Thread main = null; Thread tid1 = null; for( Thread thread : all ) { if( // The thread name can be modify, so it is hard to believe all those conditions are mismatched, Jesus! Who would ever do that... thread.getName().equals( "main" ) && !thread.isDaemon() && Thread.currentThread().getThreadGroup().getName().equals( "main" ) && Thread.currentThread().getThreadGroup().getParent().getName().equals("system") ){ main = thread; } if( thread.getId() == 1 ) { tid1 = thread; } } if( main == null ) { this.console().warn( "[PineconeLifecycle] [WARN] System main thread will using thread[id=1] as main thread." ); main = tid1; } return main; } private File findDefaultConfigFile() { String szDefaultConfMajorPath = Path.of( this.getRuntimePath(), Framework.DEFAULT_MAIN_CONFIG_FILE_NAME ).toString(); String szDefaultConfFilePath = szDefaultConfMajorPath + ".json5"; File f = new File( szDefaultConfFilePath ); if( f.exists() ) { return f; } szDefaultConfFilePath = szDefaultConfMajorPath + ".json"; f = new File( szDefaultConfFilePath ); if( f.exists() ) { return f; } return null; } protected void loadConfig() { if( this.mjoGlobalConfig == null ) { File f = this.findDefaultConfigFile(); if( f != null ) { try{ this.mjoGlobalConfig = new JSONSystemConfig( this ); this.mjoGlobalConfig.apply( f ); this.mjoSystemConfig = this.mjoGlobalConfig.getChild( "System" ); } catch ( IOException | JSONException e ) { this.handleIgnoreException( e ); } } this.mjoGlobalConfig = new JSONSystemConfig( this ); this.mjoSystemConfig = new JSONSystemConfig( this ); this.mjoGlobalConfig.put( "System", this.mjoSystemConfig ); } this.mExceptionRestartTime = this.getSystemConfig().optInt( "ExceptionRestartTime", 0 ); } protected void onlyLoadTaskManager() { this.mTaskManager = new GenericMasterTaskManager( this ); } protected void init() { this.traceWelcomeInfo(); this.mszMajorPackagePath = this.getClass().getProtectionDomain().getCodeSource().getLocation().getPath(); this.mszRuntimeContextPath = System.getProperty("user.dir"); this.mszRuntimePath = this.mszRuntimeContextPath; this.mMainThread = this.searchMainThread(); this.mGlobalClassLoader = this.mMainThread.getContextClassLoader(); this.mEnvironmentVars = StartupCommandParser.DefaultParser.parse( System.getenv() ); this.setThreadAffinity( Thread.currentThread() ); this.loadConfig(); this.onlyLoadTaskManager(); } @Override public Map getStartupCommandMap() { return this.mStartupCommandMap; } @Override public Map getEnvironmentVars() { return this.mEnvironmentVars; } public Framework(){ this( new String[0], null, null ); } public Framework( String[] args ){ this( args, null, null ); } public Framework( String[] args, String szName ){ this( args, szName, null ); } public Framework( String[] args, CascadeSystem parent ){ this( args, null, parent ); } public Framework( String[] args, String szName, CascadeSystem parent ){ this( szName, parent ); this.setStartupCommand( args ); this.init(); } public Framework( String szName, CascadeSystem parent ) { super( szName, parent ); } public void registerPineExpCatcher( Function fn ){ this.mfnAfterGlobalExpCaught = fn; } public long getBootTime(){ return this.mnBootTime; } public long getRunTime(){ //This function is using to calculate program run time return System.currentTimeMillis() - this.mnBootTime; } public void traceRunTime() { System.out.print( String.format( "\n%s Runtime : %d /ms !\n", Pinecone.MY_PROGRAM_NAME, this.getRunTime() ) ); } private void initCommit() throws Throwable { this.mnBootTime = System.currentTimeMillis(); } protected Object invokeInitHandle( Function fnInlet, Object...args ) throws Exception { this.setStartupCommand( (String[]) (Object[])args ); this.dispatchStartupCommand(); int nRetNum = 0; try { this.initCommit(); nRetNum = (int) fnInlet.invoke( args ); if( Pinecone.S_DEBUG_MODE ){ this.traceRunTime(); } } catch ( Throwable throwable ){ try{ this.handleRootKillException( throwable ); } catch ( RestartSignalException e ) { this.handleIgnoreException( e ); return e; } nRetNum = -1; } return nRetNum; } public int init ( Function fnInlet, Object...args ) throws Exception { Object ret = null; while ( true ) { ret = this.invokeInitHandle( fnInlet, args ); if( ! (ret instanceof RestartSignalException ) ) { return (int) ret; } else { RestartSignalException e = (RestartSignalException) ret; this.console().warn( String.format( "[PineconeLifecycle] [WARN] System restart [Time: %s] [What:<%s>:%s]", this.mExceptionRestartCount, e.getCause().getClass().getSimpleName(), e.getCause().getMessage() )); e.getCause().printStackTrace(); } } } @Override public ClassLoader getGlobalClassLoader() { return this.mGlobalClassLoader; } @Override public void setGlobalClassLoader( ClassLoader classLoader ) { this.mGlobalClassLoader = classLoader; } public InputStream in(){ return this.mIn; } public InputStream inSync(){ this.mResourceLock.readLock().lock(); try{ return this.in(); } finally { this.mResourceLock.readLock().unlock(); } } public OutputStream out(){ return this.mOut; } public PrintStream pout(){ try{ return (PrintStream) this.mOut; } catch ( ClassCastException e ) { return new PrintStream( this.mOut ); } } public OutputStream outSync(){ this.mResourceLock.readLock().lock(); try{ return this.out(); } finally { this.mResourceLock.readLock().unlock(); } } @Override public Tracer console() { return this.mConsole; } public Tracer consoleSync() { this.mResourceLock.readLock().lock(); Tracer tracer = this.console(); this.mResourceLock.readLock().unlock(); return tracer; } public Tracer setConsole( Tracer tracer ) { this.mResourceLock.writeLock().lock(); this.mConsole = tracer; this.mResourceLock.writeLock().unlock(); return this.mConsole; } public InputStream setIn( InputStream in ) { this.mResourceLock.writeLock().lock(); this.mIn = in; this.mResourceLock.writeLock().unlock(); return this.mIn; } public OutputStream setOut( OutputStream out ) { this.mResourceLock.writeLock().lock(); this.mOut = out; this.mResourceLock.writeLock().unlock(); return this.mOut; } protected void traceWelcomeInfo() { } @Override public CascadeSystem parentExecutum(){ return (CascadeSystem)super.parentExecutum(); } @Override public String[] getStartupCommand(){ return this.mStartupCommand; } @Override public String getMajorPackagePath() { return this.mszMajorPackagePath; } @Override public String getRuntimeContextPath() { return this.mszRuntimeContextPath; } @Override public String getRuntimePath() { return this.mszRuntimePath; } @Override public void setRuntimePath( String szRealRuntimePath ){ this.mszRuntimePath = szRealRuntimePath; } @Override public JSONSystemConfig getGlobalConfig() { return this.mjoGlobalConfig; } @Override public JSONSystemConfig getSystemConfig() { return this.mjoSystemConfig; } @Override public CascadeSystem rootSystem(){ CascadeSystem system = this.getParent(); CascadeSystem root = system; while ( true ) { if( system != null ){ root = system; system = system.getParent(); } else { break; } } return root; } @Override public long getPrimaryId() { if( this.getSystemId() == 0 ) { return this.getSystemId(); } CascadeSystem root = this.rootSystem(); if( root == null ) { this.console().warn( "[PineconeLifecycle] [WARN] Id of primary system should be always 0." ); return this.getSystemId(); } return root.getPrimaryId(); } @Override public CascadeSystem getParent(){ return (CascadeSystem)this.mParentSystem; } @Override public Thread getProcessMainThread() { return this.mMainThread; } @Override public void handleLiveException( Exception e ) throws ProvokeHandleException { this.console().warn( e.toString() ); } @Override public void handleAsynLiveException( Exception e ) throws ProvokeHandleException { } @Override public void handleAsynKillException( Exception e ) throws ProvokeHandleException { } // Lifecycle protected void handleRootKillException( Throwable e ) throws RestartSignalException { try{ this.mfnAfterGlobalExpCaught.invoke( e ); } catch ( Exception e1 ) { e = e1; } if( e instanceof InstantKillError ) { this.kill(); } if( e instanceof Error ) { this.kill(); } if( e instanceof Exception ) { if( this.mExceptionRestartCount < this.mExceptionRestartTime ) { ++this.mExceptionRestartCount; throw new RestartSignalException( e ); } else { this.kill(); } } } protected void beforeReluctantDeath() { } @Override public void entreatLive() { this.beforeReluctantDeath(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/GenericMasterTaskManager.java ================================================ package com.pinecone.framework.system; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.Map; import java.util.concurrent.BlockingDeque; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.Phaser; import java.util.concurrent.TimeUnit; import com.pinecone.framework.system.executum.EventedTaskManager; import com.pinecone.framework.system.executum.ExclusiveProcessum; import com.pinecone.framework.system.executum.Executum; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.system.executum.VitalResource; public class GenericMasterTaskManager implements EventedTaskManager { protected Processum mParentProcessum ; protected ClassLoader mClassLoader ; protected RuntimeSystem mSystem; protected Map mExecutumPool = new ConcurrentHashMap<>(); protected Map mExclusiveTasks = new ConcurrentHashMap<>(); protected Map mVitalResourcePool = new ConcurrentHashMap<>(); protected long mnVitalizeCount = 0; protected long mnFatalityCount = 0; protected long mnMaxWaitApoptosis = 5000; protected final Object mTerminationLock = new Object(); protected BlockingDeque mSyncApoptosisQueue = new LinkedBlockingDeque<>(); protected Phaser mFinishingPhaser = new Phaser( 1 ); public GenericMasterTaskManager( Processum parent, ClassLoader classLoader ) { this.mParentProcessum = parent; if( parent instanceof RuntimeSystem ) { this.mSystem = (RuntimeSystem) parent; } else { this.mSystem = parent.parentSystem(); } this.mClassLoader = classLoader; } public GenericMasterTaskManager( Processum parent ) { this( parent, null ); if( this.mSystem != null ) { this.mClassLoader = this.mSystem.getGlobalClassLoader(); } else { this.mClassLoader = Thread.currentThread().getContextClassLoader(); } } protected BlockingDeque getSyncApoptosisQueue(){ return this.mSyncApoptosisQueue; } public Map getExecutumPool() { return this.mExecutumPool; } public Map getExclusiveTasks() { return this.mExclusiveTasks; } @Override public Processum getParentProcessum () { return this.mParentProcessum; } @Override public RuntimeSystem getSystem() { return this.mSystem; } @Override public ClassLoader getClassLoader() { return this.mClassLoader; } @Override public Map getVitalResources() { return this.mVitalResourcePool; } @Override public void executeZionSequence() { Map map = this.getVitalResources(); for ( Map.Entry kv : map.entrySet() ) { VitalResource resource = kv.getValue(); try{ resource.store(); } catch ( Throwable e ) { System.err.println( String.format( "[FatesCriticalWarn] [VitalResource: %s, Id: %d] [StoreFailed]", resource.getName(), resource.getId() ) ); } } } @Override public void sendApoptosisSignal() { for ( Map.Entry kv : this.getExecutumPool().entrySet() ) { kv.getValue().apoptosis(); } } protected void killAll() { if( !this.isTerminated() ) { for ( Map.Entry kv : this.getExecutumPool().entrySet() ) { kv.getValue().kill(); } if( !this.mFinishingPhaser.isTerminated() ) { this.mFinishingPhaser.forceTermination(); } } } @Override public void terminate() { this.executeZionSequence(); this.killAll(); } @Override public void suspendAll() { for ( Map.Entry kv : this.getExecutumPool().entrySet() ) { kv.getValue().suspend(); } } @Override public void resumeAll() { for ( Map.Entry kv : this.getExecutumPool().entrySet() ) { kv.getValue().resume(); } } @Override public int size(){ return this.getExecutumPool().size(); } @Override public boolean isPooled(){ return true; } @Override public long getVitalizeCount() { return this.mnVitalizeCount; } @Override public long getFatalityCount() { return this.mnFatalityCount; } @Override public Executum add( Executum that ){ this.getExecutumPool().put( that.getExecutumId(), that ); if( that instanceof ExclusiveProcessum ) { this.getExclusiveTasks().put( that.getName(), (ExclusiveProcessum) that ); } return that; } @Override public void erase( Executum that ){ if( this.autopsy( that ) ) { this.getExecutumPool().remove( that.getExecutumId() ); this.getExclusiveTasks().remove( that.getName() ); ++this.mnFatalityCount; } else { throw new IllegalStateException( "Executum is still alive." ); } } @Override public void purge() { this.terminate(); this.getExecutumPool().clear(); this.getVitalResources().clear(); this.getExclusiveTasks().clear(); } @Override public boolean isTerminated(){ boolean b = true; for ( Map.Entry kv : this.getExecutumPool().entrySet() ) { Thread primaryAffiliateThread = kv.getValue().getAffiliateThread(); if( primaryAffiliateThread != null ) { // null is uninitialized thread. if( !primaryAffiliateThread.isDaemon() ) { b &= kv.getValue().isTerminated(); } } } return b; } @Override public void syncWaitingTerminated() throws Exception { this.mFinishingPhaser.arriveAndAwaitAdvance(); if( !this.isTerminated() ){ while ( true ) { if( this.isTerminated() ) { break; } synchronized ( this.mTerminationLock ) { this.mTerminationLock.wait( 30 ); } } } } protected Executum spawn ( String szClassPath, Object... args ) { Executum obj = null; try { Class[] paramTypes; if( args.length > 0 && args[0] instanceof Class[] ) { paramTypes = (Class[]) args[0]; Object[] neoArgs = new Object[ args.length - 1 ]; for ( int i = 0; i < neoArgs.length; i++ ) { neoArgs[i] = args[i+1]; } args = neoArgs; } else { paramTypes = new Class[ args.length ]; for ( int i = 0; i < args.length; i++ ) { paramTypes[i] = args[i].getClass(); } } Class pVoid = this.getClassLoader().loadClass( szClassPath ); try{ Constructor constructor = pVoid.getConstructor( paramTypes ); obj = (Processum) constructor.newInstance( args ); } catch ( NoSuchMethodException | InvocationTargetException e1 ){ this.getSystem().handleLiveException( e1 ); } } catch ( ClassNotFoundException | IllegalAccessException | InstantiationException e ){ this.getSystem().handleLiveException( e ); } return obj; } @Override public Executum summon ( String szClassPath, Object... args ) throws Exception { String[] debris = szClassPath.split( "." ); String szTaskName = debris[ debris.length - 1 ]; Executum obj = (Executum) this.getExclusiveTasks().get( szTaskName ); if( obj != null ) { return obj; } obj = this.spawn( szClassPath, args ); this.add( obj ); ++this.mnVitalizeCount; return obj; } @Override public void kill ( Executum that ) { that.kill(); this.erase( that ); } protected boolean isApproveLifeRenewal( ApoptosisRejectSignalException e ) { return true; // TODO } @Override public void apoptosis ( Executum that ) { try{ that.apoptosis(); } catch ( ApoptosisRejectSignalException e ) { if( this.isApproveLifeRenewal( e ) ) { return; } else { try { that.apoptosis(); } catch ( ApoptosisRejectSignalException e1 ) { // No more wait, just going to die. System.err.println( String.format( "[FatesCriticalWarn] [Executum: %d] [ForceApoptosis]", that.hashCode() ) ); } } } try{ Executum suspect = this.getSyncApoptosisQueue().poll( this.mnMaxWaitApoptosis, TimeUnit.MILLISECONDS ); if( suspect == that ) { this.kill( that ); } } catch ( InterruptedException e ) { this.kill( that ); } } @Override public void commitSuicide ( Executum that ){ this.getSyncApoptosisQueue().add( that ); } @Override public boolean autopsy ( Executum that ) { return true; //TODO } @Override public String nomenclature ( Thread that ) { return String.format( "proc-%s-%s",this.getParentProcessum().getName(), that.getName() ).toLowerCase(); } @Override public void notifyFinished ( Executum that ){ this.mFinishingPhaser.arriveAndDeregister(); } @Override public void notifyExecuting ( Executum that ){ this.mFinishingPhaser.register(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/InstantKillError.java ================================================ package com.pinecone.framework.system; import com.pinecone.framework.system.prototype.Pinenut; public class InstantKillError extends Error implements Pinenut { public InstantKillError() { super(); } public InstantKillError( String message ) { super(message); } public InstantKillError( String message, Throwable cause ) { super(message, cause); } public InstantKillError( Throwable cause ) { super(cause); } protected InstantKillError( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) { super( message, cause, enableSuppression, writableStackTrace ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/IntegratedSubsystem.java ================================================ package com.pinecone.framework.system; public interface IntegratedSubsystem extends Subsystem { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/IrrationalProvokedException.java ================================================ package com.pinecone.framework.system; public class IrrationalProvokedException extends PineRuntimeException { protected IrrationalProvokedType irrationalProvokedType; public IrrationalProvokedException() { this( IrrationalProvokedType.Aberration ); } public IrrationalProvokedException( IrrationalProvokedType type ) { super(); this.irrationalProvokedType = type; } public IrrationalProvokedException( String message, IrrationalProvokedType type ) { super( message ); this.irrationalProvokedType = type; } public IrrationalProvokedException( String message ) { this( message, IrrationalProvokedType.Aberration ); } public IrrationalProvokedException( String message, Throwable cause, IrrationalProvokedType type ) { super( message, cause ); this.irrationalProvokedType = type; } public IrrationalProvokedException( String message, Throwable cause ) { this( message, cause, IrrationalProvokedType.Aberration ); } public IrrationalProvokedException( Throwable cause, IrrationalProvokedType type ) { super(cause); this.irrationalProvokedType = type; } public IrrationalProvokedException( Throwable cause ) { this( cause, IrrationalProvokedType.Aberration ); } public IrrationalProvokedType getIrrationalProvokedType() { return this.irrationalProvokedType; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/IrrationalProvokedType.java ================================================ package com.pinecone.framework.system; public enum IrrationalProvokedType { Aberration ( "Aberration" ), // Should never happen, but happened. Expected ( "Expected" ), // Programmatic designed exception. Architectural ( "Architectural" ), // Architecturally critical errors. Destructive ( "Destructive" ); // Structural breach trigger. private final String name; IrrationalProvokedType( String name ){ this.name = name; } public String getName(){ return this.name; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/ModularizedSubsystem.java ================================================ package com.pinecone.framework.system; import com.pinecone.framework.util.config.PatriarchalConfig; public interface ModularizedSubsystem extends Subsystem { RuntimeSystem parentSystem(); void vitalize(); void terminate(); void release(); PatriarchalConfig getSubsystemConfig(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/NestedCheckedException.java ================================================ package com.pinecone.framework.system; public abstract class NestedCheckedException extends Exception { private static final long serialVersionUID = 7100714597678207546L; public NestedCheckedException( String msg ) { super(msg); } public NestedCheckedException( @Nullable String msg, @Nullable Throwable cause ) { super(msg, cause); } @Nullable @Override public String getMessage() { return NestedExceptionUtils.buildMessage(super.getMessage(), this.getCause()); } @Nullable public Throwable getRootCause() { return NestedExceptionUtils.getRootCause(this); } public Throwable getMostSpecificCause() { Throwable rootCause = this.getRootCause(); return (Throwable)(rootCause != null ? rootCause : this); } public boolean contains(@Nullable Class exType) { if ( exType == null ) { return false; } else if ( exType.isInstance(this) ) { return true; } else { Throwable cause = this.getCause(); if ( cause == this ) { return false; } else if ( cause instanceof NestedCheckedException ) { return ((NestedCheckedException)cause).contains(exType); } else { while(cause != null) { if (exType.isInstance(cause)) { return true; } if (cause.getCause() == cause) { break; } cause = cause.getCause(); } return false; } } } static { NestedExceptionUtils.class.getName(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/NestedExceptionUtils.java ================================================ package com.pinecone.framework.system; public final class NestedExceptionUtils { public NestedExceptionUtils() { } @Nullable public static String buildMessage( @Nullable String message, @Nullable Throwable cause ) { if (cause == null) { return message; } else { StringBuilder sb = new StringBuilder(64); if ( message != null ) { sb.append(message).append("; "); } sb.append( "nested exception is " ).append( cause ); return sb.toString(); } } @Nullable public static Throwable getRootCause( @Nullable Throwable original ) { if ( original == null ) { return null; } else { Throwable rootCause = null; for( Throwable cause = original.getCause(); cause != null && cause != rootCause; cause = cause.getCause() ) { rootCause = cause; } return rootCause; } } public static Throwable getMostSpecificCause( Throwable original ) { Throwable rootCause = NestedExceptionUtils.getRootCause(original); return rootCause != null ? rootCause : original; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/NestedRuntimeException.java ================================================ package com.pinecone.framework.system; public class NestedRuntimeException extends PineRuntimeException { private static final long serialVersionUID = 1312001337874041913L; public NestedRuntimeException( String msg ) { super( msg ); } public NestedRuntimeException( @Nullable String msg, @Nullable Throwable cause ) { super(msg, cause); } @Nullable @Override public String getMessage() { return NestedExceptionUtils.buildMessage(super.getMessage(), this.getCause()); } @Nullable public Throwable getRootCause() { return NestedExceptionUtils.getRootCause(this); } public Throwable getMostSpecificCause() { Throwable rootCause = this.getRootCause(); return (Throwable)(rootCause != null ? rootCause : this); } public boolean contains( @Nullable Class exType ) { if ( exType == null ) { return false; } else if ( exType.isInstance(this) ) { return true; } else { Throwable cause = this.getCause(); if ( cause == this ) { return false; } else if ( cause instanceof NestedRuntimeException ) { return ((NestedRuntimeException)cause).contains(exType); } else { while( cause != null ) { if ( exType.isInstance(cause) ) { return true; } if ( cause.getCause() == cause ) { break; } cause = cause.getCause(); } return false; } } } static { NestedExceptionUtils.class.getName(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/NoSuchProviderException.java ================================================ package com.pinecone.framework.system; public class NoSuchProviderException extends Exception { public NoSuchProviderException() { super(); } public NoSuchProviderException( String message ) { super( message ); } public NoSuchProviderException( String message, Throwable cause ) { super( message, cause ); } public NoSuchProviderException( Throwable cause ) { super(cause); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/Noexcept.java ================================================ package com.pinecone.framework.system; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Target({ElementType.METHOD}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface Noexcept { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/NonNull.java ================================================ package com.pinecone.framework.system; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Target({ElementType.METHOD, ElementType.PARAMETER, ElementType.FIELD}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface NonNull { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/NotImplementedException.java ================================================ package com.pinecone.framework.system; public class NotImplementedException extends PineRuntimeException { public NotImplementedException() { super(); } public NotImplementedException( String message ) { super( message ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/Nullable.java ================================================ package com.pinecone.framework.system; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Target({ElementType.METHOD, ElementType.PARAMETER, ElementType.FIELD}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface Nullable { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/ParseException.java ================================================ package com.pinecone.framework.system; public class ParseException extends PineRuntimeException { protected int errorOffset; public int getErrorOffset () { return errorOffset; } public ParseException ( String what ) { this( what, -1 ); } public ParseException ( String what, int errorOffset ) { super( what ); this.errorOffset = errorOffset; } public ParseException ( String message, int errorOffset, Throwable cause ) { super( message, cause ); this.errorOffset = errorOffset; } public ParseException ( Throwable cause, int errorOffset ) { super(cause.getMessage(), cause); this.errorOffset = errorOffset; } public ParseException ( Throwable cause ) { this( cause, -1 ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/PieceworkManager.java ================================================ package com.pinecone.framework.system; public class PieceworkManager { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/PineRuntimeException.java ================================================ package com.pinecone.framework.system; import com.pinecone.framework.system.prototype.Pinenut; public class PineRuntimeException extends RuntimeException implements Pinenut { public PineRuntimeException () { super(); } public PineRuntimeException ( String message ) { super(message); } public PineRuntimeException ( String message, Throwable cause ) { super(message, cause); } public PineRuntimeException ( Throwable cause ) { super(cause); } protected PineRuntimeException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) { super( message, cause, enableSuppression, writableStackTrace ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/Pinecore.java ================================================ package com.pinecone.framework.system; import com.pinecone.framework.util.io.Tracer; public interface Pinecore extends AsynSystem, CascadeSystem, ConformitySystem, RuntimeSystem { Tracer console(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/PrimarySystem.java ================================================ package com.pinecone.framework.system; import com.pinecone.framework.system.executum.ExclusiveProcessum; public interface PrimarySystem extends RuntimeSystem, ExclusiveProcessum { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/ProvokeHandleException.java ================================================ package com.pinecone.framework.system; public class ProvokeHandleException extends PineRuntimeException { public ProvokeHandleException() { super(); } public ProvokeHandleException( String message ) { super( message ); } public ProvokeHandleException( String message, Throwable cause ) { super( message, cause ); } public ProvokeHandleException( Throwable cause ) { super(cause); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/ProxyProvokeHandleException.java ================================================ package com.pinecone.framework.system; public class ProxyProvokeHandleException extends PineRuntimeException { public ProxyProvokeHandleException() { super(); } public ProxyProvokeHandleException( String message ) { super( message ); } public ProxyProvokeHandleException( String message, Throwable cause ) { super( message, cause ); } public ProxyProvokeHandleException( Throwable cause ) { super(cause); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/RedirectRuntimeException.java ================================================ package com.pinecone.framework.system; public class RedirectRuntimeException extends PineRuntimeException { public RedirectRuntimeException () { super(); } public RedirectRuntimeException ( String message ) { super(message); } public RedirectRuntimeException ( String message, Throwable cause ) { super(message, cause); } public RedirectRuntimeException ( Throwable cause ) { super(cause); } protected RedirectRuntimeException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) { super( message, cause, enableSuppression, writableStackTrace ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/RestartSignalException.java ================================================ package com.pinecone.framework.system; import com.pinecone.framework.system.prototype.Pinenut; public class RestartSignalException extends Exception implements Pinenut { public RestartSignalException () { super(); } public RestartSignalException ( String message ) { super(message); } public RestartSignalException ( String message, Throwable cause ) { super(message, cause); } public RestartSignalException ( Throwable cause ) { super(cause); } protected RestartSignalException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) { super( message, cause, enableSuppression, writableStackTrace ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/RuntimeConstructionException.java ================================================ package com.pinecone.framework.system; import com.pinecone.framework.system.prototype.Pinenut; public class RuntimeConstructionException extends PineRuntimeException implements Pinenut { public RuntimeConstructionException () { super(); } public RuntimeConstructionException ( String message ) { super(message); } public RuntimeConstructionException ( String message, Throwable cause ) { super(message, cause); } public RuntimeConstructionException ( Throwable cause ) { super(cause); } protected RuntimeConstructionException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) { super( message, cause, enableSuppression, writableStackTrace ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/RuntimeInstantiationException.java ================================================ package com.pinecone.framework.system; public class RuntimeInstantiationException extends RuntimeConstructionException { public RuntimeInstantiationException() { super(); } public RuntimeInstantiationException( String message ) { super( message ); } public RuntimeInstantiationException( String message, Throwable cause ) { super( message, cause ); } public RuntimeInstantiationException( Throwable cause ) { super(cause); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/RuntimeSystem.java ================================================ package com.pinecone.framework.system; import com.pinecone.framework.system.executum.Lifecycle; import com.pinecone.framework.system.executum.Systemum; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.config.SystemConfig; import java.util.Map; public interface RuntimeSystem extends Pinenut, Systemum, Lifecycle { String[] getStartupCommand(); Map getStartupCommandMap(); Map getEnvironmentVars(); SystemConfig getGlobalConfig(); String getMajorPackagePath(); String getRuntimeContextPath(); String getRuntimePath(); void setRuntimePath( String szRealRuntimePath ); ClassLoader getGlobalClassLoader(); void setGlobalClassLoader( ClassLoader classLoader ); void handleLiveException( Exception e ) throws ProvokeHandleException; default void handleKillException( Exception e ) throws ProvokeHandleException { throw new ProvokeHandleException( e ); } default void handleIgnoreException( Exception e ) throws ProvokeHandleException { // Just ignore them. } /** * Those Exceptions should never happened. */ default void handleDummyException( Exception e ) throws ProvokeHandleException { throw new ProvokeHandleException( e ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/Subsystem.java ================================================ package com.pinecone.framework.system; import com.pinecone.framework.system.executum.Systema; public interface Subsystem extends Systema { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/SynergicSystem.java ================================================ package com.pinecone.framework.system; import com.pinecone.framework.system.prototype.Pinenut; public interface SynergicSystem extends Pinenut { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/Unsafe.java ================================================ package com.pinecone.framework.system; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Target({ElementType.METHOD}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface Unsafe { String value() default ""; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/aop/InfrastructureProxy.java ================================================ package com.pinecone.framework.system.aop; import com.pinecone.framework.system.prototype.Pinenut; public interface InfrastructureProxy extends Pinenut { Object getWrappedObject(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/aop/RawTargetAccess.java ================================================ package com.pinecone.framework.system.aop; import com.pinecone.framework.system.prototype.Pinenut; public interface RawTargetAccess extends Pinenut { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/aop/ScopedObject.java ================================================ package com.pinecone.framework.system.aop; public interface ScopedObject extends RawTargetAccess { Object getTargetObject(); void removeFromScope(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/architecture/ArchCascadeComponent.java ================================================ package com.pinecone.framework.system.architecture; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.unit.LinkedTreeMap; import com.pinecone.framework.util.name.Namespace; import com.pinecone.framework.util.name.UniNamespace; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; public abstract class ArchCascadeComponent extends ArchComponent implements CascadeComponent { private CascadeComponent mParent; private Namespace mName; private Map mChildren; protected ArchCascadeComponent( Namespace name, CascadeComponentManager manager, CascadeComponent parent ) { super( manager ); this.mName = name; this.mChildren = new LinkedTreeMap<>(); if( name == null ) { this.setTargetingName( this.className() ); } this.setParent( parent ); } @Override public CascadeComponent parent() { return this.mParent; } @Override public void setParent( CascadeComponent parent ) { this.mParent = parent; if( parent != null ) { this.mName.setParent( parent.getTargetingName() ); } } @Override public Namespace getTargetingName() { return this.mName; } @Override public void setTargetingName( Namespace name ) { this.mName = name; } @Override public void setTargetingName( String name ) { CascadeComponent.super.setTargetingName( name ); } @Override public Collection children() { return this.mChildren.values(); } protected Map getChildren() { return this.mChildren; } @Override public CascadeComponentManager getComponentManager() { return (CascadeComponentManager) super.getComponentManager(); } @Override public void addChildComponent( CascadeComponent child ) { child.setParent( this ); this.referChildComponent( child ); this.getComponentManager().addComponent( child ); } @Override public void referChildComponent ( Component child ) { this.mChildren.put( child.getFullName(), child ); } @Override public void detachChildComponent( String fullName ) { this.mChildren.remove( fullName ); } public void removeChildComponent ( @Nullable Component child, String fullName ) { if( child == null ) { child = this.getChildComponentByFullName( fullName ); } if( child != null ) { this.detachChildComponent( fullName ); if( child instanceof CascadeComponent && this.ownedChild( (CascadeComponent)child ) ) { this.getComponentManager().removeComponent( child ); } } } @Override public void removeChildComponent ( Component child ) { this.removeChildComponent( child, child.getFullName() ); } @Override public void removeChildComponent ( String fullName ) { this.removeChildComponent( null, fullName ); } @Override public void clear() { this.mChildren.clear(); } @Override public void independent( String newName ) { if( this.mParent != null ) { this.mParent = null; this.getComponentManager().detachComponent( this ); this.mName.setName( newName ); this.mName.setParent( null ); this.getComponentManager().addComponent( this ); } } @Override public void purge() { this.purgeChildren(); String szFN = this.getTargetingName().getFullName(); if ( this.mParent != null ) { this.mParent.removeChildComponent( szFN ); } this.getComponentManager().removeComponent( szFN ); } @Override public void purgeChildren() { List purgeList = new ArrayList<>( this.mChildren.values() ); for ( Component child : purgeList ) { this.mChildren.remove( child.getFullName() ); if( child instanceof CascadeComponent ) { if( this.ownedChild( (CascadeComponent)child ) ) { // Purge owned child, this.getComponentManager().removeComponent( child.getFullName() ); } } } } @Override public boolean hasOwnChild( CascadeComponent child ) { Component component = this.getChildComponentByFullName( child.getFullName() ); if( component instanceof CascadeComponent && component == child ) { return this.ownedChild( child ); } return false; } @Override public boolean hasReferredChild( Component child ) { return this.mChildren.containsKey( child.getFullName() ); } @Override public Component getChildComponentByFullName( String fullName ) { return this.mChildren.get( fullName ); } @Override public int childSize() { return this.mChildren.size(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/architecture/ArchCascadeComponentManager.java ================================================ package com.pinecone.framework.system.architecture; import com.pinecone.framework.unit.LinkedTreeMap; import java.util.Collection; import java.util.Map; /** * The Omega Device * CascadeComponentManager can cascade control all node and its reference. * If one node owned its child, and others referred it: * 1. Mark-Sweep cascading effacement. * 1.1. Remove a node, and effaces itself and its own children will be erased from the whole scope. * 2. Cascading add. * 2.1 Add a new node, and will automatic marks and registers in its parent manager. * 3. Reference add. * 3.1 Refer a new node, and will only refers its instance without ownership. */ public abstract class ArchCascadeComponentManager extends ArchComponentManager implements CascadeComponentManager { private Map mComponentListMap; protected ArchCascadeComponentManager( Map rootComponents, Map componentsList ) { super( rootComponents ); this.mComponentListMap = componentsList; } protected ArchCascadeComponentManager() { super(); this.mComponentListMap = new LinkedTreeMap<>(); } protected Component onlyAdd( Component component ) { Component v = this.mComponentListMap.put( component.getFullName(), component ); if( component instanceof CascadeComponent ) { if( ((CascadeComponent) component).parent() != null ) { return v; } } this.rootComponents().put( component.getFullName(), component ); return v; } protected Component onlyRemove( String fullName ) { Component v = this.mComponentListMap.remove( fullName ); if( v instanceof CascadeComponent ) { if( ((CascadeComponent) v).parent() != null ) { return v; } } this.rootComponents().remove( fullName ); return v; } protected Map getComponentListMap() { return this.mComponentListMap; } @Override public void addComponent ( Component component ) { this.onlyAdd( component ); } @Override public void detachComponent( Component component ) { this.onlyRemove( component.getFullName() ); } @Override public void removeComponent( String fullName ) { Component v = this.mComponentListMap.get( fullName ); if( v != null ) { if( v instanceof CascadeComponent ){ for( Component c : this.mComponentListMap.values() ) { if( c instanceof CascadeComponent ) { ((CascadeComponent) c).detachChildComponent( fullName ); } } } this.onlyRemove( fullName ); if( v instanceof CascadeComponent ) { CascadeComponent component = (CascadeComponent) v; component.purge(); } } } @Override public Component getComponentByFullName( String fullName ) { return this.mComponentListMap.get( fullName ); } @Override public Component getRootComponentByFullName( String fullName ) { return super.getComponentByFullName( fullName ); } @Override public Collection getComponentsRegisterList() { return this.mComponentListMap.values(); } @Override public int componentScopeSize() { return this.getComponentListMap().size(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/architecture/ArchComponent.java ================================================ package com.pinecone.framework.system.architecture; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.json.JSONEncoder; public abstract class ArchComponent implements Component { private ComponentManager mComponentManager; protected ArchComponent( ComponentManager manager ) { this.mComponentManager = manager; } @Override public ComponentManager getComponentManager() { return this.mComponentManager; } @Override public void setComponentManager( ComponentManager componentManager ) { this.mComponentManager = componentManager; } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "class", this.className() ), new KeyValue<>( "name", this.getSimpleName() ), new KeyValue<>( "fullName", this.getFullName() ) } ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/architecture/ArchComponentManager.java ================================================ package com.pinecone.framework.system.architecture; import com.pinecone.framework.unit.LinkedTreeMap; import java.util.Collection; import java.util.Map; public abstract class ArchComponentManager implements ComponentManager { private Map mRootComponents; protected ArchComponentManager( Map components ) { this.mRootComponents = components; } protected ArchComponentManager() { this( new LinkedTreeMap<>() ); } protected Map rootComponents() { return this.mRootComponents; } @Override public void addComponent ( Component component ) { this.mRootComponents.put( component.getFullName(), component ); } @Override public void removeComponent ( String fullName ){ this.mRootComponents.remove( fullName ); } @Override public Component getComponentByFullName ( String fullName ) { return this.mRootComponents.get( fullName ); } @Override public int componentSize() { return this.mRootComponents.size(); } @Override public Collection getComponents() { return this.rootComponents().values(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/architecture/CascadeComponent.java ================================================ package com.pinecone.framework.system.architecture; import com.pinecone.framework.system.regimentation.CascadeNodus; import com.pinecone.framework.util.name.Namespace; import java.util.Collection; public interface CascadeComponent extends Component, CascadeNodus { @Override CascadeComponent parent(); void setParent( CascadeComponent parent ); @Override default boolean isRoot() { return this.parent() == null; } @Override default CascadeComponent root() { return (CascadeComponent) CascadeNodus.super.root(); } Collection children(); @Override Namespace getTargetingName(); @Override void setTargetingName( Namespace name ); @Override default void setTargetingName( String name ) { CascadeNodus.super.setTargetingName( name ); } @Override default String getSimpleName() { return this.getTargetingName().getSimpleName(); } @Override default String getFullName() { return this.getTargetingName().getFullName(); } CascadeComponentManager getComponentManager(); void addChildComponent ( CascadeComponent child ) ; void detachChildComponent ( String fullName ); void referChildComponent ( Component child ) ; void removeChildComponent ( Component child ); void removeChildComponent ( String fullName ) ; default boolean ownedChild ( CascadeComponent child ) { return child.parent() == this; } boolean hasOwnChild ( CascadeComponent child ) ; boolean hasReferredChild ( Component child ) ; Component getChildComponentByFullName( String fullName ) ; // Only clear all children reference. void clear() ; // if this has parent, mark it as null, and elevated to a root node. void independent( String newName ); // Purge itself and its own children void purge(); // Purge its own children void purgeChildren(); int childSize() ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/architecture/CascadeComponentManager.java ================================================ package com.pinecone.framework.system.architecture; import java.util.Collection; public interface CascadeComponentManager extends ComponentManager { int componentScopeSize() ; Component getRootComponentByFullName ( String fullName ); Collection getComponentsRegisterList(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/architecture/Component.java ================================================ package com.pinecone.framework.system.architecture; import com.pinecone.framework.system.prototype.Pinenut; public interface Component extends Pinenut { String getSimpleName(); String getFullName(); ComponentManager getComponentManager(); void setComponentManager( ComponentManager componentManager ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/architecture/ComponentManager.java ================================================ package com.pinecone.framework.system.architecture; import com.pinecone.framework.system.regime.arch.Manager; import java.util.Collection; public interface ComponentManager extends Manager { void addComponent ( Component component ) ; void detachComponent ( Component component ) ; default void removeComponent ( Component component ){ this.removeComponent( component.getFullName() ); } void removeComponent ( String fullName ); Component getComponentByFullName ( String fullName ); int componentSize() ; Collection getComponents(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/architecture/SystemComponent.java ================================================ package com.pinecone.framework.system.architecture; import com.pinecone.framework.system.RuntimeSystem; public interface SystemComponent extends Component { RuntimeSystem getSystem(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/architecture/SystemComponentManager.java ================================================ package com.pinecone.framework.system.architecture; public interface SystemComponentManager extends ComponentManager { @Override SystemComponent getComponentByFullName( String fullName ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/DynamicInstancePool.java ================================================ package com.pinecone.framework.system.construction; public interface DynamicInstancePool extends InstancePool { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/DynamicStructure.java ================================================ package com.pinecone.framework.system.construction; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.lang.annotation.Repeatable; @Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD}) @Retention(RetentionPolicy.RUNTIME) public @interface DynamicStructure { String name() default ""; String lookup() default ""; // If true, will allowed this object member fields assignment likes the C/C++/Go struct. boolean directlyStruct() default false; Class type() default Object.class; ReuseCycle cycle() default ReuseCycle.Singleton; DynamicStructure.AuthenticationType authenticationType() default DynamicStructure.AuthenticationType.CONTAINER; boolean shareable() default true; String mappedName() default ""; String description() default ""; // Instancing handle // TODO Class provider() default void.class; String providerMethod() default ""; enum AuthenticationType { CONTAINER, APPLICATION; AuthenticationType() { } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/GenericDynamicInstancePool.java ================================================ package com.pinecone.framework.system.construction; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.util.lang.DynamicFactory; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; public class GenericDynamicInstancePool implements DynamicInstancePool { private BlockingQueue mPool; private DynamicFactory mFactory; private Class mClassType; private int mCapacity; private int mFreeSize; private int mPreAllocate; public GenericDynamicInstancePool(DynamicFactory factory, Class classType ) { this( factory, 0, 0, classType ); } public GenericDynamicInstancePool(DynamicFactory factory, int preAllocate, Class classType ) { this( factory, 0, preAllocate, classType ); } public GenericDynamicInstancePool(DynamicFactory factory, int capacity, int preAllocate, Class classType ) { this.mPool = new LinkedBlockingQueue<>(); this.mFactory = factory; this.mCapacity = capacity > 0 ? capacity : Integer.MAX_VALUE; this.mClassType = classType; this.mPreAllocate = preAllocate; this.mFreeSize = this.mCapacity; this.preAllocate( preAllocate ); } protected T newInstance() { try{ return this.mClassType.cast( this.mFactory.newInstance( this.mClassType, null, null ) ); } catch ( Exception e ) { throw new ProxyProvokeHandleException( e ); } } @Override public T allocate() { T obj = this.mPool.poll(); if ( obj == null ) { int availableCapacity = this.freeSize(); if ( availableCapacity > 0 ) { int allocateCount = 1; if( this.mPreAllocate > 0 ) { allocateCount = Math.min( availableCapacity, this.mPreAllocate ); } this.preAllocate( allocateCount ); obj = this.mPool.poll(); if ( obj == null ) { throw new InternalError( "Unable to allocate instance." ); } } else { throw new IllegalStateException( "Out of capacity, too many instances[" + this.mCapacity + "]." ); } } --this.mFreeSize; return obj; } @Override public void free( T obj ) { if ( obj != null ) { this.mPool.offer( obj ); ++this.mFreeSize; } } @Override public int freeSize() { return this.mFreeSize; } @Override public int pooledSize() { return this.mPool.size(); } @Override public boolean isEmpty() { return this.freeSize() == 0; } @Override public void preAllocate( int count ) { for ( int i = 0; i < count; ++i) { this.mPool.offer( this.newInstance() ); } } @Override public void setCapacity( int capacity ) { if ( capacity < this.mCapacity - this.mFreeSize ) { throw new IllegalArgumentException( "New capacity cannot be less than current capacity minus free size." ); } if ( capacity > this.mCapacity ) { int availableCapacity = this.freeSize(); if ( availableCapacity > 0 ) { if( this.mPreAllocate > 0 ) { this.preAllocate( Math.min( availableCapacity, this.mPreAllocate ) ); } } } this.mCapacity = capacity; } @Override public int getCapacity() { return this.mCapacity; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/GenericStructureDefinition.java ================================================ package com.pinecone.framework.system.construction; public class GenericStructureDefinition implements StructureDefinition { private String mLookup = ""; private Class mType = Object.class; private ReuseCycle mCycle = ReuseCycle.Singleton; private boolean mShareable = true; private String mDescription = ""; private Class mProvider = void.class; private String mProviderMethod = ""; private Structure.AuthenticationType mAuthenticationType = Structure.AuthenticationType.CONTAINER; public GenericStructureDefinition( Class type ) { this.mType = type; } public GenericStructureDefinition( Structure structure ) { this( structure.type() ); this.mCycle = structure.cycle(); this.mLookup = structure.lookup(); this.mProvider = structure.provider(); this.mShareable = structure.shareable(); this.mDescription = structure.description(); this.mProviderMethod = structure.providerMethod(); this.mAuthenticationType = structure.authenticationType(); } @Override public String getLookup() { return this.mLookup; } @Override public void setLookup( String lookup ) { this.mLookup = lookup; //TODO } @Override public Class getType() { return this.mType; } @Override public void setType( Class type ) { this.mType = type; } @Override public ReuseCycle getCycle() { return this.mCycle; } @Override public void setCycle( ReuseCycle cycle ) { this.mCycle = cycle; } @Override public Structure.AuthenticationType getAuthenticationType() { return this.mAuthenticationType; } @Override public void setAuthenticationType( Structure.AuthenticationType authenticationType ) { this.mAuthenticationType = authenticationType; } @Override public boolean isShareable() { return this.mShareable; } @Override public void setShareable( boolean shareable ) { this.mShareable = shareable; } @Override public String getDescription() { return this.mDescription; } @Override public void setDescription(String description) { this.mDescription = description; } @Override public Class getProvide() { return this.mProvider; } @Override public String getProvideMethod() { return this.mProviderMethod; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/InstanceDispenser.java ================================================ package com.pinecone.framework.system.construction; import com.pinecone.framework.system.prototype.Pinenut; public interface InstanceDispenser extends Pinenut { InstanceDispenser register( Class type ) ; boolean hasRegistered( Class type ); T allotInstance( Class type ) ; void free( Class type, Object instance ) ; void free( Object instance ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/InstanceManufacturer.java ================================================ package com.pinecone.framework.system.construction; import java.util.List; public interface InstanceManufacturer extends InstanceDispenser { @Override InstanceManufacturer register( Class type ) ; void onlyRegister( Class type ) ; InstanceManufacturer registers( List > types ); List> fetchRegistered(); String[] fetchRegisteredNames(); default InstanceManufacturer registerInstancing( Class type ) { return this.registerInstancing( type, null ); } InstanceManufacturer registerInstancing( Class type, Object instance ) ; Object allotInstance( String type ); Object autowire( Object that ); void close(); void refresh(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/InstancePool.java ================================================ package com.pinecone.framework.system.construction; import com.pinecone.framework.system.prototype.Pinenut; public interface InstancePool extends Pinenut { T allocate() ; void free( T obj ) ; int freeSize() ; int pooledSize(); boolean isEmpty() ; void preAllocate( int count ) ; void setCapacity( int capacity ) ; int getCapacity() ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/ObjectBasicTraits.java ================================================ package com.pinecone.framework.system.construction; public class ObjectBasicTraits implements ObjectTraits { private boolean mIsBean = false; private boolean mIsDirectlyStruct = false; private String mName = ""; private String mMappedKey = ""; private Object mTargetAnnotation = null; private Class mDeclaredType = null; private Class mAffiliatedType = null; public ObjectBasicTraits() { } public ObjectBasicTraits( Structure structure ) { this.fromStructure( structure ); } public ObjectTraits fromStructure( Structure structure ) { this.setBean( true ); this.setDirectlyStruct( false ); this.setName( structure.name() ); this.setTargetAnnotation( structure ); this.setDeclaredType( structure.type() ); this.setMappedKey( structure.mappedName() ); return this; } @Override public boolean isDirectlyStruct() { return this.mIsDirectlyStruct; } @Override public void setDirectlyStruct( boolean isDirectlyStruct ) { this.mIsDirectlyStruct = isDirectlyStruct; } @Override public boolean isBean() { return this.mIsBean; } @Override public void setBean( boolean isBean ) { this.mIsBean = isBean; } @Override public String getName() { return this.mName; } @Override public void setName( String name ) { this.mName = name; } @Override public String getMappedKey() { return this.mMappedKey; } @Override public void setMappedKey( String mappedKey ) { this.mMappedKey = mappedKey; } @Override public Object getTargetAnnotation() { return this.mTargetAnnotation; } @Override public void setTargetAnnotation( Object targetAnnotation ) { this.mTargetAnnotation = targetAnnotation; } @Override public Class getDeclaredType() { return this.mDeclaredType; } @Override public void setDeclaredType( Class declaredType ) { this.mDeclaredType = declaredType; } @Override public Class getAffiliatedType() { return this.mAffiliatedType; } @Override public void setAffiliatedType( Class affiliatedType ) { this.mAffiliatedType = affiliatedType; } @Override public boolean isStructure() { return this.getTargetAnnotation() instanceof Structure; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/ObjectTraits.java ================================================ package com.pinecone.framework.system.construction; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.StringUtils; public interface ObjectTraits extends Pinenut { boolean isBean(); void setBean( boolean isBean ); boolean isDirectlyStruct(); void setDirectlyStruct( boolean isDirectlyStruct ); String getName(); void setName( String name ); String getMappedKey(); void setMappedKey( String mappedKey ); Object getTargetAnnotation(); void setTargetAnnotation( Object targetAnnotation ); Class getDeclaredType(); void setDeclaredType( Class declaredType ); Class getAffiliatedType(); void setAffiliatedType( Class affiliatedType ); default boolean isStructure() { return this.getTargetAnnotation() instanceof Structure; } default boolean isAnonymous() { return StringUtils.isEmpty( this.getMappedKey() ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/Postpone.java ================================================ package com.pinecone.framework.system.construction; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Target({ElementType.METHOD, ElementType.PARAMETER, ElementType.FIELD}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface Postpone { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/ReuseCycle.java ================================================ package com.pinecone.framework.system.construction; public enum ReuseCycle { // Passively(Lazy) loading and instancing, and only have the one static singleton in the whole program scope. Singleton ( "Singleton" ), // Passively(Lazy) loading and instancing, and with instancing without pooling, free lifecycle. Disposable ( "Disposable" ), // Passively(Lazy) loading and instancing, and pooled the instanced object. Opt. Allocate / Free. Recyclable ( "Recyclable" ), // Positively loading and instancing if found, and only have the one static singleton in the whole program scope. PreSingleton ( "PreSingleton" ), // Positively loading and instancing if found, and pooled the instanced object. Opt. Allocate / Free. PreRecyclable ( "PreRecyclable" ); private final String value; ReuseCycle( String value ){ this.value = value; } public String getName(){ return this.value; } public boolean isSingleton() { return this == ReuseCycle.Singleton || this == ReuseCycle.PreSingleton; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/Structure.java ================================================ package com.pinecone.framework.system.construction; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.lang.annotation.Repeatable; @Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD}) @Retention(RetentionPolicy.RUNTIME) @Repeatable( Structures.class ) public @interface Structure { String name() default ""; String lookup() default ""; // If true, will allowed this object member fields assignment likes the C/C++/Go struct. boolean directlyStruct() default false; Class type() default Object.class; ReuseCycle cycle() default ReuseCycle.Singleton; Structure.AuthenticationType authenticationType() default Structure.AuthenticationType.CONTAINER; boolean shareable() default true; String mappedName() default ""; String description() default ""; // Instancing handle // TODO Class provider() default void.class; String providerMethod() default ""; enum AuthenticationType { CONTAINER, APPLICATION; AuthenticationType() { } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/StructureDefinition.java ================================================ package com.pinecone.framework.system.construction; import com.pinecone.framework.system.prototype.Pinenut; public interface StructureDefinition extends Pinenut { String getLookup(); void setLookup( String lookup ); Class getType(); void setType( Class type ); ReuseCycle getCycle(); void setCycle( ReuseCycle cycle ); Structure.AuthenticationType getAuthenticationType(); void setAuthenticationType( Structure.AuthenticationType authenticationType ); boolean isShareable(); void setShareable( boolean shareable ); String getDescription(); void setDescription( String description ); Class getProvide(); String getProvideMethod(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/StructureInstanceDispenser.java ================================================ package com.pinecone.framework.system.construction; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.lang.DynamicFactory; public interface StructureInstanceDispenser extends InstanceDispenser { StructureDefinition update ( Class type, StructureDefinition definition ) ; InstanceDispenser register( Class type, StructureDefinition definition, @Nullable InstancePool pool ) ; InstanceDispenser register( Class type, StructureDefinition definition ) ; InstanceDispenser register( Class type, Structure structure, @Nullable InstancePool pool ) ; InstanceDispenser register( Class type, Structure structure ) ; InstanceDispenser register( Class type, @Nullable InstancePool pool ) ; InstanceDispenser register( StructureDefinition definition ) ; InstanceDispenser registerByImplicitFirstFound( Class type, @Nullable Structure structure, @Nullable InstancePool pool ) ; InstanceDispenser registerByImplicitFirstFound( Class type, @Nullable Structure structure ) ; InstanceDispenser registerByImplicitFirstFound( Class type ) ; T allotInstance( Class type, @Nullable Structure instanceStructure ) ; StructureDefinition getStructureDefinition( Class type ); InstancePool getInstancePool( Class type ); DynamicFactory getCentralFactory(); Object registerInstance( String name, Object instance ); Object getRegisteredInstance( String name ); Object removeRegisteredInstance( String name ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/Structures.java ================================================ package com.pinecone.framework.system.construction; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Documented @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.TYPE}) public @interface Structures { Structure[] value(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/UnifyCentralInstanceDispenser.java ================================================ package com.pinecone.framework.system.construction; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.ReflectionUtils; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.framework.util.lang.GenericDynamicFactory; import java.lang.annotation.Annotation; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; public class UnifyCentralInstanceDispenser implements StructureInstanceDispenser { protected final Map, Object > mSingletonObjects = new ConcurrentHashMap<>(); protected final Map, StructureDefinition > mObjectDefinitions = new ConcurrentHashMap<>(); protected final Map, InstancePool > mObjectInstancer = new ConcurrentHashMap<>(); // Pool is immutable. protected final Map mObjectRegister = new ConcurrentHashMap<>(); protected final DynamicFactory mCentralFactory ; public UnifyCentralInstanceDispenser( DynamicFactory factory ) { this.mCentralFactory = factory; } public UnifyCentralInstanceDispenser() { this( new GenericDynamicFactory() ); } /** * update * The pool is immutable. * @param type the object`s type * @param definition the object`s definition * @return null for nonsexist or definition which just inserted. */ @Override public StructureDefinition update( Class type, StructureDefinition definition ) { if( this.mObjectDefinitions.containsKey( type ) ) { return this.mObjectDefinitions.put( type, definition ); } return null; } @Override public InstanceDispenser register( Class type, StructureDefinition definition, InstancePool pool ) { if( pool == null ) { pool = this.defaultInstancePool( type, definition ); } this.mObjectDefinitions.putIfAbsent( type, definition ); this.mObjectInstancer.putIfAbsent( type, pool ); return this; } @Override public InstanceDispenser register( Class type, StructureDefinition definition ) { return this.register( type, definition, this.defaultInstancePool( type, definition ) ); } protected StructureDefinition defaultDefinition( Class type, Structure structure ) { StructureDefinition definition = new GenericStructureDefinition( structure ); if( definition.getType() == Object.class && type != Object.class ) { definition.setType( type ); } return definition; } @Override public InstanceDispenser register( Class type, Structure structure ) { return this.register( type, structure, null ); } @Override public InstanceDispenser register( Class type, Structure structure, @Nullable InstancePool pool ) { StructureDefinition definition = this.defaultDefinition( type, structure ); if( pool == null ) { pool = this.defaultInstancePool( type, definition ); } return this.register( type, definition, pool ); } @Override public InstanceDispenser register( Class type ) { return this.register( type, (InstancePool) null ); } protected Structure foundClassDeclaredStructure( Class type ) { Annotation[] annotations = type.getAnnotations(); for( Annotation annotation : annotations ) { if( annotation instanceof Structure ) { return (Structure)annotation; } } return null; } @Override public InstanceDispenser register( Class type, @Nullable InstancePool pool ) { Structure target = this.foundClassDeclaredStructure( type ); if( target != null ) { return this.register( type, target, pool ); } StructureDefinition definition = new GenericStructureDefinition( type ); return this.register( type, definition, pool ); } protected InstancePool defaultInstancePool( Class type, StructureDefinition definition ) { if( definition.getProvide() != void.class && definition.getProvide() != Object.class ) { Object o = this.tryInstancingFromProvider( type, definition, null ); if( o instanceof InstancePool ) { return (InstancePool)o; } } if( definition.getCycle() == ReuseCycle.Disposable || definition.getCycle().isSingleton() ) { return new GenericDynamicInstancePool<>( this.mCentralFactory, 0, type ); } return new GenericDynamicInstancePool<>( this.mCentralFactory, 4, type ); } @Override public InstanceDispenser register( StructureDefinition definition ) { return this.register( definition.getType(), definition ); } @Override public InstanceDispenser registerByImplicitFirstFound( Class type, @Nullable Structure structure ) { return this.registerByImplicitFirstFound( type, structure, null ); } @Override public InstanceDispenser registerByImplicitFirstFound( Class type, @Nullable Structure structure, @Nullable InstancePool pool ) { Structure target = this.foundClassDeclaredStructure( type ); if( target == null ) { target = structure; } if( target == null ) { StructureDefinition definition = new GenericStructureDefinition( type ); return this.register( type, definition, pool ); } return this.register( type, target, pool ); } @Override public InstanceDispenser registerByImplicitFirstFound( Class type ) { return this.registerByImplicitFirstFound( type, null, null ); } @Override public boolean hasRegistered( Class type ) { return this.mObjectDefinitions.containsKey( type ); } protected Object invokeInstancingProvider( Class provider, String szMethodName ) { Object provide = this.mCentralFactory.optNewInstance( provider, null ); Method pm; try{ pm = provide.getClass().getMethod( szMethodName ); } catch ( NoSuchMethodException nme ) { return null; } try { return ReflectionUtils.tryAccessibleInvoke( pm, provide ); } catch ( InvocationTargetException | IllegalArgumentException e ) { return null; } } protected Object tryInstancingFromProvider( Class type, StructureDefinition definition, @Nullable Structure instanceStructure ) { if( instanceStructure != null ) { Class provider = instanceStructure.provider(); if( DynamicInstancePool.class.isAssignableFrom( provider ) ) { if( instanceStructure.cycle() == ReuseCycle.Disposable || instanceStructure.cycle().isSingleton() ) { return (InstancePool) this.mCentralFactory.optNewInstance( provider, new Object[]{ this.mCentralFactory, 0, type } ); } return (InstancePool) this.mCentralFactory.optNewInstance( provider, new Object[]{ this.mCentralFactory, 4, type } ); } else if( InstancePool.class.isAssignableFrom( provider ) ) { return (InstancePool) this.mCentralFactory.optNewInstance( provider, null ); } else if( instanceStructure.type() != void.class && instanceStructure.type() != Object.class && !instanceStructure.providerMethod().isEmpty() ) { Object ret = this.invokeInstancingProvider( provider, instanceStructure.providerMethod() ); if( ret != null ) { return ret; } } } Class provider = definition.getProvide(); if( DynamicInstancePool.class.isAssignableFrom( definition.getProvide() ) ) { if( definition.getCycle() == ReuseCycle.Disposable || definition.getCycle().isSingleton() ) { return (InstancePool) this.mCentralFactory.optNewInstance( provider, new Object[]{ this.mCentralFactory, 0, type } ); } return (InstancePool) this.mCentralFactory.optNewInstance( provider, new Object[]{ this.mCentralFactory, 4, type } ); } else if( InstancePool.class.isAssignableFrom( definition.getProvide() ) ) { return (InstancePool) this.mCentralFactory.optNewInstance( provider, null ); } else if( definition.getType() != void.class && definition.getType() != Object.class && !definition.getProvideMethod().isEmpty() ) { Object ret = this.invokeInstancingProvider( provider, definition.getProvideMethod() ); if( ret != null ) { return ret; } } return null; } @Override public T allotInstance( Class type, @Nullable Structure instanceStructure ) { StructureDefinition definition = this.mObjectDefinitions.get( type ); if( definition == null ) { return null; // Unregistered. } Class innerType = definition.getType(); if( innerType == Object.class ) { innerType = type; } Object t = this.tryInstancingFromProvider( type, definition, instanceStructure ); if( t != null ) { return type.cast( t ); } Object b = this.mSingletonObjects.get( type ); if ( b != null ) { if( instanceStructure != null && !instanceStructure.cycle().isSingleton() ) { return type.cast( this.mObjectInstancer.get( innerType ).allocate() ); } return type.cast( b ); } InstancePool pool = this.mObjectInstancer.get( innerType ); if ( pool != null ) { if( definition.getCycle() == ReuseCycle.Disposable || ( instanceStructure != null && instanceStructure.cycle() == ReuseCycle.Disposable ) ) { return type.cast( pool.allocate() ); } T obj = type.cast( pool.allocate() ); if ( definition.getCycle().isSingleton() ) { this.mSingletonObjects.put( innerType, obj ); } return obj; } String name = instanceStructure.name(); if ( StringUtils.isEmpty(name) ) { name = type.getSimpleName(); name = Character.toLowerCase( name.charAt(0) ) + name.substring(1); } if ( StringUtils.isNoneEmpty(name) ) { Object o = this.getRegisteredInstance( name ); if( o != null && type.isAssignableFrom( o.getClass() ) ) { return type.cast( o ); } } return null; } @Override public T allotInstance( Class type ) { return this.allotInstance( type, null ); } @Override @SuppressWarnings("unchecked") public void free( Class type, Object instance ) { InstancePool pool = this.mObjectInstancer.get( type ); if( pool != null ) { pool.free( instance ); } else { throw new IllegalArgumentException( type.getName() + " is not owned instance." ); } } @Override public void free( Object instance ) { this.free( instance.getClass(), instance ); } @Override public StructureDefinition getStructureDefinition( Class type ) { return this.mObjectDefinitions.get( type ); } @Override public InstancePool getInstancePool( Class type ) { return this.mObjectInstancer.get( type ); } @Override public DynamicFactory getCentralFactory() { return this.mCentralFactory; } @Override public Object registerInstance( String name, Object instance ) { return this.mObjectRegister.put( name, instance ); } @Override public Object getRegisteredInstance( String name ) { return this.mObjectRegister.get( name ); } @Override public Object removeRegisteredInstance( String name ) { return this.mObjectRegister.remove( name ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/UnifyStructureInjector.java ================================================ package com.pinecone.framework.system.construction; import com.pinecone.framework.system.functions.Executable; import com.pinecone.framework.unit.Units; import com.pinecone.framework.util.ReflectionUtils; import com.pinecone.framework.util.json.homotype.JSONGet; import com.pinecone.framework.util.json.homotype.MapStructure; import com.pinecone.framework.util.json.homotype.ObjectInjector; import java.beans.JavaBean; import java.lang.annotation.Annotation; import java.lang.reflect.Array; import java.lang.reflect.Field; import java.util.Collection; import java.util.Map; public class UnifyStructureInjector extends ObjectInjector { protected StructureInstanceDispenser mInstanceDispenser; public UnifyStructureInjector( Class type, StructureInstanceDispenser instanceDispenser ) { super( type ); this.mInstanceDispenser = instanceDispenser; } public UnifyStructureInjector( Class type ) { this( type, null ); } public ObjectTraits getObjectTraits( Field field ) { String szKey = null; ObjectBasicTraits traits = new ObjectBasicTraits(); Annotation[] annotations = field.getAnnotations(); for ( Annotation a : annotations ) { if( a instanceof JSONGet ) { szKey = ( (JSONGet) a ).value(); traits.setMappedKey( szKey ); traits.setTargetAnnotation( a ); break; } else if( a instanceof MapStructure ) { szKey = ( (MapStructure) a ).value(); traits.setMappedKey( szKey ); traits.setTargetAnnotation( a ); break; } else if( a instanceof Structure ) { Structure structure = (Structure) a; traits.fromStructure( structure ); szKey = structure.mappedName(); if( szKey.isEmpty() ) { szKey = structure.name(); } traits.setMappedKey( szKey ); traits.setTargetAnnotation( a ); break; } else if( a instanceof JavaBean ) { JavaBean javaBean = (JavaBean) a; szKey = javaBean.defaultProperty(); traits.setMappedKey( szKey ); traits.setBean( true ); traits.setTargetAnnotation( a ); break; } } if( szKey == null ) { return null; } traits.setAffiliatedType( field.getType() ); return traits; } protected Object getFromMapStructure ( Object mapLiked, String key ) { return Units.getFromMapStructure( mapLiked, key, true, true ); } protected Object injectMapLinked ( Object mapLiked, Class type, Object instance ) { Field[] fields = type.getDeclaredFields(); for ( Field field : fields ) { ReflectionUtils.makeAccessible( field ); try { ObjectTraits traits = this.getObjectTraits( field ); String szMappedKey; if( traits == null ) { continue; } else if( traits.getMappedKey().isEmpty() ) { szMappedKey = field.getName(); traits.setMappedKey( szMappedKey ); } else { szMappedKey = traits.getMappedKey(); } if( traits.getName().isEmpty() ) { traits.setName( field.getName() ); } Object val = this.getFromMapStructure( mapLiked, this.getFieldName( szMappedKey ) ); if( val == null ){ val = this.getFromMapStructure( mapLiked, szMappedKey ); } if( val == null && szMappedKey.contains( "." ) ){ val = this.getValueFromMapRecursively( mapLiked, szMappedKey ); } try { Object j; Class insType = traits.getDeclaredType(); Class fieldType = field.getType(); Object ann = traits.getTargetAnnotation(); if( ann instanceof Structure ) { if( insType == Object.class || insType == null ) { j = this.inject( val, fieldType ); } else { j = this.instancingUnitWithSpecificType( traits, val, field ); } } else { j = this.inject( val, fieldType ); } if( j == null ) { j = this.instancingAndInject( traits, val, field ); } field.set( instance, j ); } catch ( IllegalArgumentException e ){ //e.printStackTrace(); field = null; } } catch ( IllegalAccessException e ){ throw new IllegalStateException(e); // This should never be happened. } } return instance; } protected void ensureRegistered( ObjectTraits traits, Class insType ) { if( !this.mInstanceDispenser.hasRegistered( insType ) ) { Object ann = traits.getTargetAnnotation(); if( ann instanceof Structure ) { this.mInstanceDispenser.registerByImplicitFirstFound( insType, (Structure)ann ); } else { this.mInstanceDispenser.registerByImplicitFirstFound( insType ); } } } protected Object instancingAndInject( ObjectTraits traits, Object val, Field field ) { Class insType = traits.getDeclaredType(); if( this.mInstanceDispenser != null ) { if( insType == null || (insType == Object.class && field.getType() != Object.class) ) { insType = field.getType(); } this.ensureRegistered( traits, insType ); Object neoMember = this.mInstanceDispenser.allotInstance( insType ); try{ this.inject( val, insType, neoMember ); } catch ( Exception e ) { throw new IllegalArgumentException( e ); } return neoMember; } return null; } protected Object instancingUnitWithSpecificType( ObjectTraits traits, Object val, Field field ) { if( this.mInstanceDispenser == null || val == null ) { return null; } Class fieldType = field.getType(); Class insType = traits.getDeclaredType(); this.ensureRegistered( traits, insType ); if( fieldType.isAssignableFrom( val.getClass() ) ) { if( val instanceof Map ) { Map cm = Units.newInstance( val.getClass() ); for( Object v : ((Map) val).entrySet() ) { Map.Entry kv = (Map.Entry) v; Object neo = this.mInstanceDispenser.allotInstance( insType ); neo = this.injectMapLinked( kv.getValue(), insType, neo ); cm.put( kv.getKey(), neo ); } return cm; } else if( val.getClass().isArray() ) { Object[] vals = new Object[ Array.getLength( val ) ]; for ( int i = 0; i < vals.length; ++i ) { Object neo = this.mInstanceDispenser.allotInstance( insType ); neo = this.injectMapLinked( Array.get( val, i ), insType, neo ); vals[i] = neo; } return vals; } else if( val instanceof Collection ) { Collection ib = Units.newInstance( val.getClass() ); for( Object o : (Collection) val ) { Object neo = this.mInstanceDispenser.allotInstance( insType ); neo = this.injectMapLinked( o, insType, neo ); ib.add( neo ); } return ib; } } return val; } @Override public Object inject ( Map that, Class type, Object instance ) { return this.injectMapLinked( that, type, instance ); } protected Object getValueFromMapRecursively( Object mapLiked, String key ) { String[] keys = key.split("\\.|\\/"); Object value = mapLiked; for ( String k : keys ) { value = this.getFromMapStructure( value, k ); } return value; } @Override public Object inject ( Object that, Class type, Object instance ) throws Exception { if ( ObjectInjector.trialHomogeneity( that ) ){ return that; } else if( type == Object.class ){ return that; } else if( that instanceof Executable){ return this.inject( (Executable) that ); } else if ( that instanceof Collection){ return this.inject( (Collection) that, type, instance ); } else if ( that instanceof Map ){ return this.inject( (Map) that, type, instance ); } else { return this.injectMapLinked( that, type, instance ); } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/ArchExecutum.java ================================================ package com.pinecone.framework.system.executum; import com.pinecone.framework.system.RuntimeSystem; import com.pinecone.framework.system.GenericMasterTaskManager; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReentrantReadWriteLock; public abstract class ArchExecutum implements Executum { private long mnId ; protected String mszName ; protected RuntimeSystem mParentSystem ; protected Processum mParentProcessum ; protected Thread mAffiliateThread ; protected int mExceptionRestartCount = 0; protected int mExceptionRestartTime ; // < 0 Force always keep alive, like the cancer. // Mutex & Lock protected ReentrantReadWriteLock mResourceLock = new ReentrantReadWriteLock(); protected ArchExecutum ( String szName, Processum parent, Thread affiliateThread ) { this.mAffiliateThread = affiliateThread; this.mszName = szName; this.mParentProcessum = parent; if( this.mParentProcessum == null ) { this.mParentSystem = null; } else if( this.mParentProcessum instanceof RuntimeSystem ) { this.mParentSystem = (RuntimeSystem) this.mParentProcessum; } else { this.mParentSystem = this.mParentProcessum.parentSystem(); } this.makeNameAndId(); } protected ArchExecutum ( String szName, Processum parent ) { this( szName, parent, null ); } protected ArchExecutum ( Processum parent, Thread affiliateThread ) { this( affiliateThread.getName(), parent, affiliateThread ); } protected void makeNameAndId() { this.mnId = Executum.nextAutoIncrementId(); if( this.mszName == null ) { this.mszName = this.className(); long id = this.getExecutumId(); if( this.mParentProcessum != null ) { this.mszName = this.mszName + "-Executum-" + id; } } } @Override public int getExceptionRestartTime() { return this.mExceptionRestartTime; } @Override public ArchExecutum applyExceptionRestartTime( int time ){ this.mResourceLock.writeLock().lock(); this.mExceptionRestartTime = time; this.mResourceLock.writeLock().unlock(); return this; } @Override public String getName(){ return this.mszName; } @Override public void setName( String szName ) { this.mszName = szName; } @Override public long getExecutumId() { return this.mnId; } @Override public ArchExecutum setThreadAffinity( Thread affinity ) { this.mAffiliateThread = affinity; return this; } @Override public Thread getAffiliateThread() { return this.mAffiliateThread; } @Override public Thread.State getState() { return this.getAffiliateThread().getState(); } @Override public RuntimeSystem parentSystem() { return this.mParentSystem; } @Override public RuntimeSystem revealNearestSystem() { if ( this instanceof RuntimeSystem ) { return (RuntimeSystem) this; } return parentSystem(); } @Override public Processum parentExecutum() { return this.mParentProcessum; } @Override public boolean isTerminated(){ return this.getState() == Thread.State.TERMINATED; } @Override public void start() { if ( this.mAffiliateThread != null ) { this.mAffiliateThread.start(); } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/ArchProcessum.java ================================================ package com.pinecone.framework.system.executum; import com.pinecone.framework.system.ApoptosisRejectSignalException; import com.pinecone.framework.system.GenericMasterTaskManager; import java.time.LocalDateTime; import java.util.Map; public abstract class ArchProcessum extends ArchExecutum implements Processum { protected GenericMasterTaskManager mTaskManager ; protected LocalDateTime mCreateTime; protected LocalDateTime mStartTime; public ArchProcessum ( String szName, Processum parent ) { super( szName, parent ); this.mCreateTime = LocalDateTime.now(); } @Override public ArchProcessum applyExceptionRestartTime( int time ){ return (ArchProcessum) super.applyExceptionRestartTime( time ); } @Override public ArchProcessum setThreadAffinity( Thread affinity ) { return (ArchProcessum) super.setThreadAffinity( affinity ); } @Override public void apoptosis() { this.interrupt(); } @Override public void interrupt() { if( this.getAffiliateThread() != null ) { this.getAffiliateThread().interrupt(); } } @Override public void kill() { this.getTaskManager().terminate(); if( this.getAffiliateThread() != null ) { this.getAffiliateThread().stop(); } } @Override public void suspend() { this.getAffiliateThread().suspend(); } @Override public void resume() { this.getAffiliateThread().resume(); } @Override public void entreatLive() { throw new ApoptosisRejectSignalException(); } @Override public GenericMasterTaskManager getTaskManager() { return this.mTaskManager; } @Override public Map getOwnThreadGroup() { return this.getTaskManager().getExecutumPool(); } @Override public void start() { super.start(); this.mStartTime = LocalDateTime.now(); } @Override public LocalDateTime getCreateTime() { return this.mCreateTime; } @Override public LocalDateTime getStartTime() { return this.mStartTime; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/ArchThreadum.java ================================================ package com.pinecone.framework.system.executum; import com.pinecone.framework.system.ApoptosisRejectSignalException; public abstract class ArchThreadum extends ArchExecutum implements Executum { protected ArchThreadum ( String szName, Processum parent, Thread affiliateThread ) { super( szName, parent, affiliateThread ); } protected ArchThreadum ( String szName, Processum parent ) { this( szName, parent, null ); } protected ArchThreadum ( Processum parent, Thread affiliateThread ) { this( affiliateThread.getName(), parent, affiliateThread ); } @Override public void setName( String szName ) { super.setName( szName ); if( this.getAffiliateThread() != null ) { this.getAffiliateThread().setName( szName ); } } @Override public void apoptosis() { this.interrupt(); } @Override public void kill() { this.getAffiliateThread().stop(); } @Override public void interrupt() { if( this.getAffiliateThread() != null ) { this.getAffiliateThread().interrupt(); } } @Override @SuppressWarnings( "deprecated" ) public void suspend() { this.getAffiliateThread().suspend(); } @Override @SuppressWarnings( "deprecated" ) public void resume() { this.getAffiliateThread().resume(); } @Override public void entreatLive() { throw new ApoptosisRejectSignalException(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/Chronum.java ================================================ package com.pinecone.framework.system.executum; import com.pinecone.framework.system.prototype.Pinenut; import java.util.concurrent.TimeUnit; public interface Chronum extends Pinenut { long getStartNano(); default long getStartTime( TimeUnit unit ) { return unit.convert( this.getStartNano(), TimeUnit.NANOSECONDS ); } default long getStartMillis() { return this.getStartTime( TimeUnit.MILLISECONDS ); } default long getExecutedNano() { return System.nanoTime() - this.getStartNano(); } default long getExecutedTime( TimeUnit unit) { long executedNano = this.getExecutedNano(); return unit.convert(executedNano, TimeUnit.NANOSECONDS); } default long getExecutedMillis() { return this.getExecutedTime(TimeUnit.MILLISECONDS); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/EventedTaskManager.java ================================================ package com.pinecone.framework.system.executum; public interface EventedTaskManager extends TaskManager { void notifyFinished ( Executum that ); void notifyExecuting ( Executum that ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/ExclusiveProcessum.java ================================================ package com.pinecone.framework.system.executum; public interface ExclusiveProcessum extends Processum { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/ExecutableSummoner.java ================================================ package com.pinecone.framework.system.executum; import com.pinecone.framework.system.prototype.Summoner; public interface ExecutableSummoner extends Summoner { void executeAfterSummonSequence() throws Exception ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/Executum.java ================================================ package com.pinecone.framework.system.executum; import com.pinecone.framework.system.RuntimeSystem; import com.pinecone.framework.system.functions.Executable; import java.util.concurrent.atomic.AtomicLong; /** * Pinecone Ursus For Java Executum * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Executum vs Executor * 1. Executor is just a function, that ignores the specific thread it executing on. * 2. Executum is a thread based executable object, that has its own specific execute threads. * 2.1 Executum can own its thread group, which just like a process [Processum]. * 2.2 Executum is a sophisticated task, which is focus on specific task-group or scheme. * ***************************************************************************************** */ public interface Executum extends Executable, Lifecycle { String getName(); void setName( String szName ); long getExecutumId(); RuntimeSystem parentSystem(); RuntimeSystem revealNearestSystem(); Executum parentExecutum(); Executum setThreadAffinity( Thread affinity ); Thread getAffiliateThread(); default boolean isSystemExecutum() { return this instanceof Systemum; } default boolean isMainThreadExecutum() { return this.getAffiliateThread() == this.parentSystem().getProcessMainThread(); } boolean isTerminated(); void start(); AtomicLong AutoIncrementId = new AtomicLong( 0 ); static long nextAutoIncrementId() { return Executum.AutoIncrementId.getAndIncrement(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/JobCompromisedException.java ================================================ package com.pinecone.framework.system.executum; import com.pinecone.framework.system.prototype.Pinenut; public class JobCompromisedException extends Exception implements Pinenut { public JobCompromisedException () { super(); } public JobCompromisedException ( String message ) { super(message); } public JobCompromisedException ( String message, Throwable cause ) { super(message, cause); } public JobCompromisedException ( Throwable cause ) { super(cause); } protected JobCompromisedException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) { super( message, cause, enableSuppression, writableStackTrace ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/LifeDaemon.java ================================================ package com.pinecone.framework.system.executum; public interface LifeDaemon { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/Lifecycle.java ================================================ package com.pinecone.framework.system.executum; import com.pinecone.framework.system.ApoptosisRejectSignalException; import com.pinecone.framework.system.prototype.Pinenut; public interface Lifecycle extends Pinenut { void apoptosis() throws ApoptosisRejectSignalException; // Notify you should die, but you can chose to be the cancer that refuse to die. void kill(); // Just kill you, the darkness comes... void interrupt(); void suspend(); void resume(); void entreatLive(); // Before you die. Thread.State getState(); int getExceptionRestartTime(); Lifecycle applyExceptionRestartTime( int time ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/Processum.java ================================================ package com.pinecone.framework.system.executum; import java.time.LocalDateTime; import java.util.Map; public interface Processum extends Executum { Map getOwnThreadGroup(); default Thread rootThread() { return this.getAffiliateThread(); } default boolean isOnMainThread() { return this.rootThread() == null || this.rootThread() == this.parentSystem().getProcessMainThread(); } TaskManager getTaskManager(); LocalDateTime getCreateTime() ; LocalDateTime getStartTime() ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/StageCompromisedException.java ================================================ package com.pinecone.framework.system.executum; import com.pinecone.framework.system.prototype.Pinenut; public class StageCompromisedException extends Exception implements Pinenut { public StageCompromisedException () { super(); } public StageCompromisedException ( String message ) { super(message); } public StageCompromisedException ( String message, Throwable cause ) { super(message, cause); } public StageCompromisedException ( Throwable cause ) { super(cause); } protected StageCompromisedException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) { super( message, cause, enableSuppression, writableStackTrace ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/Systema.java ================================================ package com.pinecone.framework.system.executum; import com.pinecone.framework.system.prototype.Pinenut; // Systema [Latin, System] public interface Systema extends Pinenut { String getName(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/Systemum.java ================================================ package com.pinecone.framework.system.executum; import java.util.Map; import java.util.Set; public interface Systemum extends Processum, Systema { default long getSystemId() { return this.getExecutumId(); } Thread getAffiliateThread(); Thread getProcessMainThread() ; default boolean isMainThreadSystem() { return this.getAffiliateThread() == this.getProcessMainThread(); } default Set fetchAllProcessThreads() { Map allThreads = Thread.getAllStackTraces(); return allThreads.keySet(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/TaskCompromisedException.java ================================================ package com.pinecone.framework.system.executum; import com.pinecone.framework.system.prototype.Pinenut; public class TaskCompromisedException extends Exception implements Pinenut { public TaskCompromisedException () { super(); } public TaskCompromisedException ( String message ) { super(message); } public TaskCompromisedException ( String message, Throwable cause ) { super(message, cause); } public TaskCompromisedException ( Throwable cause ) { super(cause); } protected TaskCompromisedException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) { super( message, cause, enableSuppression, writableStackTrace ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/TaskManager.java ================================================ package com.pinecone.framework.system.executum; import com.pinecone.framework.system.regime.arch.Manager; import com.pinecone.framework.system.RuntimeSystem; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.system.prototype.Summoner; import java.util.Map; public interface TaskManager extends Pinenut, Summoner, Manager { Processum getParentProcessum(); RuntimeSystem getSystem(); ClassLoader getClassLoader(); Map getVitalResources(); void executeZionSequence(); // No exception void sendApoptosisSignal(); void terminate(); // Instant kill all subordinate executums, will no a negotiation. void suspendAll(); void resumeAll(); int size(); boolean isPooled(); long getVitalizeCount(); long getFatalityCount(); Executum add ( Executum that ); void erase( Executum that ); void purge(); boolean isTerminated(); // Synchronized currently thread, waiting for all tasks be terminated. void syncWaitingTerminated() throws Exception; Executum summon ( String szClassPath, Object... args ) throws Exception ; void kill ( Executum that ); void apoptosis ( Executum that ); void commitSuicide ( Executum that ); boolean autopsy ( Executum that ); // Check if is dead. String nomenclature ( Thread that ); // Object clearance rate, help load balance and dispatch. [e.g. Using priority queue.] default double getClearanceRate() { double nFatality = this.getFatalityCount(); double nVitalize = this.getVitalizeCount(); return nFatality / ( nVitalize + nFatality ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/VitalResource.java ================================================ package com.pinecone.framework.system.executum; import com.pinecone.framework.system.prototype.Pinenut; public interface VitalResource extends Pinenut { String getName(); long getId(); Object nativeResource(); void store(); // No exception TaskManager parentFates(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/functions/ChosenDispatcher.java ================================================ package com.pinecone.framework.system.functions; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; import java.util.TreeSet; public class ChosenDispatcher implements SteerableSegment { public static Set S_RESERVED = new TreeSet() { { add( "default" ); } }; private Map mInnerFunctions; private Map mDynamicData; private String mszCurrentChosen ; private Invoker mInvoker; public ChosenDispatcher( Map innerFns ){ this( null, innerFns, null ); } public ChosenDispatcher( Map dynamicData, Map innerFns ){ this( dynamicData, innerFns, null ); } public ChosenDispatcher( Map dynamicData, Map innerFns, Invoker invoker ){ this.mInnerFunctions = innerFns; this.mDynamicData = dynamicData != null ? dynamicData : new LinkedHashMap<>(); this.mInvoker = invoker != null ? invoker : new SystemInvoker(); } @Override public Map data(){ return this.mDynamicData; } @Override public String name() { return this.mszCurrentChosen; } @Override public Object invoke( String fnName, Object...args ) throws Exception { String szLastName = this.mszCurrentChosen; this.mszCurrentChosen = fnName; Object ret = this.mInvoker.invoke( this.mInnerFunctions.get( this.mszCurrentChosen ), args ) ; this.mszCurrentChosen = szLastName; return ret; } @Override public void dispatch( Object... args ) throws Exception { if( args.length <= 0 ){ throw new IllegalArgumentException( "No chosen be committed." ); } this.mszCurrentChosen = (String) args[0]; Executable fn = this.mInnerFunctions.get( this.mszCurrentChosen ); if( fn == null ){ fn = this.mInnerFunctions.get( "default" ); } this.mInvoker.invoke( fn, this ) ; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/functions/Executable.java ================================================ package com.pinecone.framework.system.functions; import com.pinecone.framework.system.prototype.Pinenut; public interface Executable extends Pinenut { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/functions/Executor.java ================================================ package com.pinecone.framework.system.functions; public interface Executor extends Executable { void execute() throws Exception; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/functions/Function.java ================================================ package com.pinecone.framework.system.functions; public interface Function extends Executable, Invokable { @Override Object invoke( Object...obj ) throws Exception; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/functions/FunctionTraits.java ================================================ package com.pinecone.framework.system.functions; import com.pinecone.framework.util.ReflectionUtils; import java.lang.reflect.Method; public abstract class FunctionTraits { public static String thisName(){ return FunctionTraits.thatName( 3 ); } public static String thatName( int level ){ return Thread.currentThread().getStackTrace()[ level ].getMethodName(); } public static Object invoke ( Invokable fn, Object... obj ) throws Exception { return fn.invoke( obj ); } public static Object invoke ( Executable fn, Object... obj ) throws Exception { if( fn instanceof Function ){ return FunctionTraits.invoke( (Invokable) fn , obj ); } else if ( fn instanceof Executor ){ ( (Executor) fn ).execute(); return null; } throw new IllegalArgumentException( "Not executable." ); } public static Object invoke ( Object that, Method fn, Object...obj ) throws Exception { ReflectionUtils.makeAccessible( fn ); try { return fn.invoke( that, obj ); } catch ( IllegalArgumentException e ){ return fn.invoke( that, new Object[]{ obj } ); } } public static Object invoke ( Object that, String szFnName, Object... obj ) throws Exception { try { //Most likely... Method fn = that.getClass().getDeclaredMethod( szFnName, Object[].class ); return FunctionTraits.invoke( that, fn, obj ); } catch ( NoSuchMethodException nsm ){ // Try this... Class[] protoArgs = new Class[ obj.length ]; int i = 0; for ( Object arg : obj ) { protoArgs [ i++ ] = arg.getClass(); } try { Method fn = that.getClass().getDeclaredMethod( szFnName, protoArgs ); return FunctionTraits.invoke( that, fn, obj ); } catch ( NoSuchMethodException e ){ // Let's do savage way... Method[] fns = that.getClass().getDeclaredMethods(); for( Method fn : fns ){ if( fn.getName().equals(szFnName) ){ try{ return FunctionTraits.invoke( that, fn, obj ); } catch ( NoSuchMethodException | IllegalArgumentException againAndAgain ){ } } } throw new NoSuchMethodException( "Exhaustively trialed, but still undefined founded." ); } } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/functions/Invokable.java ================================================ package com.pinecone.framework.system.functions; public interface Invokable { Object invoke( Object...obj ) throws Exception; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/functions/Invoker.java ================================================ package com.pinecone.framework.system.functions; import java.lang.reflect.Method; public interface Invoker { Object invoke ( Invokable fn, Object...obj ) throws Exception ; Object invoke ( Executable fn, Object...obj ) throws Exception ; Object invoke ( Object that, Method fn, Object...obj ) throws Exception ; Object invoke ( Object that, String szFnName, Object...obj ) throws Exception ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/functions/LinearDispatcher.java ================================================ package com.pinecone.framework.system.functions; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; import java.util.TreeSet; public class LinearDispatcher implements SteerableSegment { public static Set S_RESERVED = new TreeSet() { { add( "init" ); add( "final" ); } }; private Map mInnerFunctions; private Map mDynamicData; private String mszCurrentChosen ; private Invoker mInvoker; public LinearDispatcher( Map innerFns ){ this( null, innerFns, null ); } public LinearDispatcher( Map dynamicData, Map innerFns ){ this( dynamicData, innerFns, null ); } public LinearDispatcher( Map dynamicData, Map innerFns, Invoker invoker ){ this.mInnerFunctions = innerFns; this.mDynamicData = dynamicData != null ? dynamicData : new LinkedHashMap<>(); this.mInvoker = invoker != null ? invoker : new SystemInvoker(); } @Override public Map data(){ return this.mDynamicData; } @Override public String name() { return this.mszCurrentChosen; } @Override public Object invoke( String fnName, Object...args ) throws Exception { String szLastName = this.mszCurrentChosen; this.mszCurrentChosen = fnName; Object ret = this.mInvoker.invoke( this.mInnerFunctions.get( fnName ), args ) ; this.mszCurrentChosen = szLastName; return ret; } @Override public void dispatch( Object... args ) throws Exception { boolean bNotIgnoreExp = args.length > 0 && (boolean) args[0]; try{ this.mszCurrentChosen = "init"; Executable fnInit = this.mInnerFunctions.get( this.mszCurrentChosen ); if( fnInit instanceof Function ){ Object ret = ( ( Function ) fnInit ).invoke( this ); if( ret instanceof Boolean && !(boolean) ret ){ return; } } else { this.mInvoker.invoke( this.mInnerFunctions.get( this.mszCurrentChosen ), this ) ; } } catch ( Exception e ){ if( bNotIgnoreExp ){ throw e; } } for( Object each : this.mInnerFunctions.entrySet() ){ Map.Entry kv = ( Map.Entry ) each; this.mszCurrentChosen = (String) kv.getKey(); if( LinearDispatcher.S_RESERVED.contains( this.mszCurrentChosen ) ){ continue; } try { this.mInvoker.invoke( (Executable) kv.getValue(), this ); } catch ( Exception e ){ if( bNotIgnoreExp ){ throw e; } } } try { this.mszCurrentChosen = "final"; this.mInvoker.invoke( this.mInnerFunctions.get( this.mszCurrentChosen ), this ) ; } catch ( Exception e ){ if( bNotIgnoreExp ){ throw e; } } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/functions/SteerableSegment.java ================================================ package com.pinecone.framework.system.functions; import java.util.Map; public interface SteerableSegment { Map data(); String name(); Object invoke( String fnName, Object...args ) throws Exception; void dispatch( Object...args ) throws Exception; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/functions/SystemInvoker.java ================================================ package com.pinecone.framework.system.functions; import java.lang.reflect.Method; public class SystemInvoker implements Invoker { @Override public Object invoke( Invokable fn, Object... obj ) throws Exception { return FunctionTraits.invoke( fn, obj ); } @Override public Object invoke( Executable fn, Object... obj ) throws Exception { return FunctionTraits.invoke( fn, obj ); } @Override public Object invoke( Object that, Method fn, Object...obj ) throws Exception { return FunctionTraits.invoke( that, fn, obj ); } @Override public Object invoke( Object that, String szFnName, Object... obj ) throws Exception { return FunctionTraits.invoke( that, szFnName, obj ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/homotype/Assimilable.java ================================================ package com.pinecone.framework.system.homotype; public interface Assimilable { Object assimilate( Object that ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/homotype/HomoInjector.java ================================================ package com.pinecone.framework.system.homotype; public interface HomoInjector extends Injector { boolean isHomogeneity( Object that ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/homotype/Homotypic.java ================================================ package com.pinecone.framework.system.homotype; import com.pinecone.framework.system.prototype.Pinenut; public interface Homotypic extends Pinenut { boolean isHomogeneity( Object that ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/homotype/Injector.java ================================================ package com.pinecone.framework.system.homotype; import com.pinecone.framework.system.prototype.Pinenut; public interface Injector extends Pinenut { Object inject ( Object that ) throws Exception ; default Object inject ( Object that, Object instance ) throws Exception { return this.inject( that, that.getClass(), instance ); } Object inject ( Object that, Class stereotype, Object instance ) throws Exception ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/homotype/StereotypicInjector.java ================================================ package com.pinecone.framework.system.homotype; public interface StereotypicInjector extends Injector { Class getStereotype(); void setStereotype( Class stereotype ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/Ally.java ================================================ package com.pinecone.framework.system.prototype; public interface Ally { void beforeSummon() throws Exception ; void summoning() throws Exception ; void afterSummon() throws Exception ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/Factory.java ================================================ package com.pinecone.framework.system.prototype; public interface Factory extends Pinenut { ClassLoader getClassLoader(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/FamilyContext.java ================================================ package com.pinecone.framework.system.prototype; import java.nio.file.Path; import java.util.List; public interface FamilyContext extends Pinenut { List getGlobalScopes(); FamilyContext setGlobalScopes( List globalScopes ); Object parent(); Object thisScope(); Object root(); Path[] getParentPaths(); FamilyContext setParent( Object parent ); FamilyContext setThisScope( Object thisScope ); FamilyContext setRoot( Object root ); FamilyContext setParentPaths( Path[] parentPaths ); FamilyContext addParentPath( Path newPath ); FamilyContext addGlobalScope( Object scope ); default FamilyContext asProgenitor ( Object root ) { this.setThisScope( root ); this.setParent ( root ); this.setRoot ( root ); return this; } default boolean isFamilyAffinity ( FamilyContext otherContext ) { return this.root().equals( otherContext.root() ); } default boolean isParentAffinity ( FamilyContext otherContext ) { return this.parent().equals( otherContext.parent() ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/MapStructuresEvaluator.java ================================================ package com.pinecone.framework.system.prototype; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.List; import java.util.Map; import com.pinecone.framework.system.stereotype.JavaBeans; import com.pinecone.framework.util.ClassUtils; import com.pinecone.framework.util.json.JSONArray; public class MapStructuresEvaluator implements ObjectiveEvaluator { @Override public Object beanGet( Object that, String key ) { try { return this.beanGetExp( that, key ); } catch ( NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) { return null; } } @Override public Object beanGetExp( Object that, String key ) throws NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException { if ( that == null || key == null ) { return null; } String getterName = JavaBeans.MethodMajorKeyGet + Character.toUpperCase( key.charAt( 0 ) ) + key.substring( 1 ); Method getter = that.getClass().getMethod( getterName ); getter.setAccessible( true ); return getter.invoke( that ); } @Override public void beanSet( Object that, String key, Object val ) { try { this.beanSetExp( that, key, val ); } catch ( NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ignored ) { // Do nothing. } } @Override public void beanSetExp( Object that, String key, Object val ) throws NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException { if ( that == null || key == null ) { return; } String setterName = JavaBeans.MethodMajorKeySet + Character.toUpperCase( key.charAt( 0 ) ) + key.substring( 1 ); Method setter; if( val == null ) { setter = that.getClass().getMethod( setterName ); } else { try{ setter = that.getClass().getMethod( setterName, val.getClass() ); } catch ( NoSuchMethodException e ) { setter = null; Method[] candidates = that.getClass().getMethods(); for( Method candidate : candidates ) { Class[] pars = candidate.getParameterTypes(); if( candidate.getName().equals( setterName ) && pars.length == 1 ) { if( ClassUtils.isAssignable( pars[0], val.getClass() ) ){ setter = candidate; break; } } } if( setter == null ) { throw e; } } } setter.setAccessible( true ); setter.invoke( that, val ); } @Override public Object structGet( Object that, String key ) { try { return this.structGetExp( that, key ); } catch ( NoSuchFieldException | SecurityException | IllegalAccessException | IllegalArgumentException e ) { return null; } } public Object structGetExp( Object that, String key ) throws NoSuchFieldException, SecurityException, IllegalAccessException, IllegalArgumentException { if ( that == null || key == null ) { return null; } Field field = that.getClass().getField( key ); field.setAccessible( true ); return field.get( that ); } @Override public void structSet( Object that, String key, Object val ) { try { this.structSetExp( that, key, val ); } catch ( NoSuchFieldException | SecurityException | IllegalAccessException | IllegalArgumentException ignored ) { // Do nothing. } } public void structSetExp( Object that, String key, Object val ) throws NoSuchFieldException, SecurityException, IllegalAccessException, IllegalArgumentException { if ( that == null || key == null ) { return; } Field field = that.getClass().getField( key ); field.setAccessible( true ); field.set( that, val ); } @Override public void classSet( Object that, String key, Object val ) { try { this.beanSetExp( that, key, val ); } catch ( NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) { this.structSet( that, key, val ); } } @Override public Object classGet( Object that, String key ) { Object value = this.beanGet( that, key ); if( value == null ) { return this.structGet( that, key ); } return value; } @Override public Object get( Object that, String key ) { if ( that == null ) { return null; } if ( that instanceof Map ) { return ((Map) that).get(key); } else if ( that instanceof List ) { try { int index = Integer.parseInt( key ); return ((List) that).get(index); } catch ( NumberFormatException | IndexOutOfBoundsException e ) { return null; } } else if ( that.getClass().isArray() ) { try { int index = Integer.parseInt( key ); return ((Object[]) that)[ index ]; } catch ( NumberFormatException | ArrayIndexOutOfBoundsException e ) { return null; } } else if( that.getClass().isPrimitive() ) { return null; } else if( that.getClass().isEnum() ) { return null; } else if( that instanceof Number ) { return null; } else if( that instanceof String ) { return null; } else { return this.classGet( that, key ); } } @Override @SuppressWarnings( "unchecked" ) public void set( Object that, String key, Object val ) { if ( that == null ) { return; } if ( that instanceof Map ) { ((Map) that).put( key, val ); } else if ( that instanceof List ) { try { int index = Integer.parseInt( key ); ((List) that).set( index, val ); } catch ( NumberFormatException | IndexOutOfBoundsException e ) { // Do nothing. } } else if ( that.getClass().isArray() ) { try { int index = Integer.parseInt( key ); ((Object[]) that)[ index ] = val; } catch ( NumberFormatException | ArrayIndexOutOfBoundsException e ) { // Do nothing. } } if( that.getClass().isPrimitive() ) { return; } else if( that.getClass().isEnum() ) { return; } else if( that instanceof Number ) { return; } else if( that instanceof String ) { return; } this.classSet( that, key, val ); } @Override public Class beanGetType( Object that, String key ) { try { return this.beanGetTypeExp( that, key ); } catch ( NoSuchMethodException | SecurityException | IllegalArgumentException e ) { return null; } } public Class beanGetTypeExp( Object that, String key ) throws NoSuchMethodException, SecurityException, IllegalArgumentException { if ( that == null ) { return null; } if( key == null ) { return null; } String getterName = JavaBeans.MethodMajorKeyGet + Character.toUpperCase( key.charAt(0) ) + key.substring( 1 ); return that.getClass().getMethod( getterName ).getReturnType(); } @Override public Class structGetType( Object that, String key ) { try { return this.structGetTypeWithException( that, key ); } catch ( NoSuchFieldException | SecurityException e ) { return null; } } public Class structGetTypeWithException( Object that, String key ) throws NoSuchFieldException, SecurityException { if ( that == null || key == null ) { return null; } Field field = that.getClass().getField( key ); return field.getType(); } @Override public Class classGetType( Object that, String key ) { Class type = this.beanGetType( that, key ); if ( type == null ) { type = this.structGetType( that, key ); } return type; } @Override public Class getType( Object that, String key ) { if ( that == null ) { return null; } if ( that instanceof Map ) { Object value = ((Map) that).get( key ); return value != null ? value.getClass() : Object.class; } else if ( that instanceof List ) { try { int index = Integer.parseInt( key ); Object value ; if( that instanceof JSONArray ) { value = ((JSONArray) that).opt( index ); } else { value = ((List) that).get( index ); } return value != null ? value.getClass() : Object.class; } catch ( NumberFormatException | IndexOutOfBoundsException e ) { return null; } } else if ( that.getClass().isArray() ) { return that.getClass().getComponentType(); } else { return this.classGetType( that, key ); } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/ObjectiveArray.java ================================================ package com.pinecone.framework.system.prototype; import java.lang.reflect.InvocationTargetException; import java.util.LinkedHashMap; import java.util.Map; import com.pinecone.framework.unit.Units; import com.pinecone.framework.util.json.JSON; public class ObjectiveArray implements Objectom { protected Object[] mArray; public ObjectiveArray( Object[] arr ) { this.mArray = arr; } @Override public int size() { return this.mArray.length; } @Override public boolean isEmpty() { return this.size() == 0; } @Override public Object get( Object key ){ Integer i = ObjectiveList.affirmIntegerKey(key); if( i == null ) { return null; } return this.mArray[i]; } @Override public void set( Object key, Object val ){ Integer i = ObjectiveList.affirmIntegerKey(key); if( i == null ) { return ; } this.mArray[i] = val; } @Override public boolean hasOwnProperty( Object k ) { return this.containsKey( k ); } @Override public boolean containsKey( Object k ) { Integer i = ObjectiveList.affirmIntegerKey(k); if( i == null ) { return false; } int nLength = this.mArray.length; if( i < 0 || nLength == 0 ){ return false; } return nLength > i; } @Override public String toJSONString() { return JSON.stringify( this.mArray ); } @Override public Map toMap(Class mapType ) { Map map = Units.newInstance( mapType ); int i = 0; for( Object e : this.mArray ) { map.put( Integer.toString( i ), e ); ++i; } return map; } @Override public Map toMap() { return this.toMap( LinkedHashMap.class ); } @Override public TypeIndex prototype() { return Prototype.typeid( this.mArray ); } @Override public String prototypeName() { return Prototype.prototypeName(this.mArray); } @Override public Integer[] keys() { Integer[] list = new Integer[ this.mArray.length ]; for ( int i = 0; i < this.mArray.length; ++i ) { list[ i ] = i; } return list; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/ObjectiveBean.java ================================================ package com.pinecone.framework.system.prototype; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.Map; import com.pinecone.framework.system.stereotype.JavaBeans; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.unit.Units; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.json.JSONEncoder; public class ObjectiveBean implements Objectom { protected Object mObj; protected Entry[] mGetMethods; protected Entry[] mSetMethods; public ObjectiveBean( Object bean ) { this.mObj = bean; this.cacheMethods(); } protected void cacheMethods() { Class klass = this.mObj.getClass(); boolean includeSuperClass = klass.getClassLoader() != null; Method[] methods = includeSuperClass ? klass.getMethods() : klass.getDeclaredMethods(); ArrayList getDummy = new ArrayList<>(); ArrayList setDummy = new ArrayList<>(); for( int i = 0; i < methods.length; ++i ) { try { Method method = methods[i]; if ( Modifier.isPublic( method.getModifiers() ) ) { String key = JavaBeans.getGetterMethodKeyName( method ); if( StringUtils.isEmpty( key ) ) { key = JavaBeans.getSetterMethodKeyName( method ); if( !StringUtils.isEmpty( key ) ) { // Found setter if ( Character.isUpperCase( key.charAt(0) ) && method.getParameterTypes().length == 1 ) { key = JavaBeans.methodKeyNameLowerCaseNormalize( key ); setDummy.add( new Entry( key, method ) ); } } } else { // Found getter if ( Character.isUpperCase( key.charAt(0) ) && method.getParameterTypes().length == 0 ) { key = JavaBeans.methodKeyNameLowerCaseNormalize( key ); getDummy.add( new Entry( key, method ) ); } } } } catch ( Exception e ) { e.printStackTrace(); // Do nothing. } } this.mGetMethods = getDummy.toArray( new Entry[]{} ); this.mSetMethods = setDummy.toArray( new Entry[]{} ); Arrays.sort( this.mGetMethods ); Arrays.sort( this.mSetMethods ); } @Override public int size() { return this.mGetMethods.length; } @Override public boolean isEmpty() { return this.size() == 0; } @Override public Object get( Object key ) { String szKey = key.toString(); try { int index = ObjectiveBean.binarySearch( this.mGetMethods, szKey ); if( index < 0 ) { return null; } Method method = this.mGetMethods[ index ].method; method.setAccessible( true ); return method.invoke( this.mObj ); } catch ( IllegalAccessException | InvocationTargetException e ) { return null; } } protected static int binarySearch( Entry[] those, String key ) { int low = 0; int high = those.length - 1; while ( low <= high ) { int mid = (low + high) >>> 1; int cmp = those[ mid ].name.compareTo( key ); if ( cmp < 0 ) { low = mid + 1; } else if ( cmp > 0 ) { high = mid - 1; } else { return mid; } } return -(low + 1); } @Override public void set( Object key, Object val ) { String szKey = key.toString(); try { int index = ObjectiveBean.binarySearch( this.mSetMethods, szKey ); if( index < 0 ) { throw new IllegalArgumentException( "Specific setter-method not found: set" + JavaBeans.methodKeyNameUpperCaseNormalize( szKey ) ); } Method method = this.mSetMethods[ index ].method; method.setAccessible( true ); method.invoke( this.mObj, val ); } catch ( IllegalAccessException | InvocationTargetException e ) { throw new RuntimeException( e ); } } @Override public boolean hasOwnProperty( Object k ) { if ( this.mObj instanceof PineUnit ) { return ( (PineUnit) this.mObj ).hasOwnProperty(k); } return this.containsKey(k); } @Override public boolean containsKey( Object k ) { return ObjectiveBean.binarySearch( this.mGetMethods, k.toString() ) >= 0; } @Override public String toJSONString() { ArrayList > dummy = new ArrayList<>(); for( Entry kv : this.mGetMethods ) { Object val; try { kv.method.setAccessible( true ); val = kv.method.invoke( this.mObj ); } catch ( IllegalAccessException | InvocationTargetException e ) { break; } dummy.add( new KeyValue<>( kv.name, val ) ); } return JSONEncoder.stringifyMapFormat( dummy ); } @Override public Map toMap( Class mapType ) { Map map = Units.newInstance( mapType ); for( Entry kv : this.mGetMethods ) { Object val; try { kv.method.setAccessible( true ); val = kv.method.invoke( this.mObj ); map.put( kv.name, val ); } catch ( IllegalAccessException | InvocationTargetException e ) { break; } } return map; } @Override public Map toMap() { return this.toMap( LinkedHashMap.class ); } @Override public TypeIndex prototype() { return Prototype.typeid( this.mObj ); } @Override public String prototypeName() { return Prototype.prototypeName( this.mObj ); } static class Entry implements Comparable { String name; Method method; Entry( String name, Method method ) { this.name = name; this.method = method; } @Override public int compareTo( Entry o ) { return this.name.compareTo( o.name ); } } @Override public String[] keys() { String[] list = new String[ this.mGetMethods.length ]; for ( int i = 0; i < this.mGetMethods.length; ++i ) { list[ i ] = this.mGetMethods[ i ].name; } return list; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/ObjectiveClass.java ================================================ package com.pinecone.framework.system.prototype; import java.lang.reflect.Field; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.Map; import com.pinecone.framework.unit.Units; import com.pinecone.framework.util.ReflectionUtils; import com.pinecone.framework.util.json.homotype.StructJSONEncoder; public class ObjectiveClass implements Objectom { protected Object mObj; protected Entry[] mFields; protected boolean mbUsingOrderCache; public ObjectiveClass( Object that, boolean bUsingOrderCache ) { this.mObj = that; this.mbUsingOrderCache = bUsingOrderCache; if ( bUsingOrderCache ) { this.cacheFields(); } } public ObjectiveClass( Object that ) { this( that, true ); } @Override public int size() { if( this.mFields != null ) { return this.mFields.length; } return this.mObj.getClass().getFields().length; } @Override public boolean isEmpty() { return this.size() == 0; } private void cacheFields() { Field[] classFields = this.mObj.getClass().getFields(); this.mFields = new Entry[ classFields.length ]; for ( int i = 0; i < classFields.length; ++i ) { Field field = classFields[ i ]; String fieldName = field.getName(); this.mFields[ i ] = new Entry( fieldName, field ); } Arrays.sort( this.mFields ); } public Object get( Object key ) { String szKey = key.toString(); try { if ( this.mbUsingOrderCache ) { int index = this.binarySearch( szKey ); if ( index >= 0 ) { ReflectionUtils.makeAccessible( this.mFields[index].field ); return this.mFields[ index ].field.get( this.mObj ); } } else { Field field = this.mObj.getClass().getField( szKey ); ReflectionUtils.makeAccessible( field ); return field.get( this.mObj ); } } catch ( NoSuchFieldException | IllegalAccessException e ) { return null; } return null; } protected int binarySearch( String key ) { int low = 0; int high = this.mFields.length - 1; while ( low <= high ) { int mid = (low + high) >>> 1; int cmp = this.mFields[ mid ].name.compareTo( key ); if ( cmp < 0 ) { low = mid + 1; } else if ( cmp > 0 ) { high = mid - 1; } else { return mid; } } return -(low + 1); } public void set( Object key, Object val ) { String szKey = key.toString(); try { if ( this.mbUsingOrderCache ) { int index = this.binarySearch( szKey ); if ( index >= 0 ) { ReflectionUtils.makeAccessible( this.mFields[ index ].field ); this.mFields[index].field.set( this.mObj, val ); return; } } else { Field field = this.mObj.getClass().getField( szKey ); ReflectionUtils.makeAccessible( field ); field.set( this.mObj, val ); return; } } catch ( NoSuchFieldException | IllegalAccessException e ) { throw new RuntimeException( e ); } throw new IllegalArgumentException( "Field not found: " + key ); } @Override public boolean hasOwnProperty( Object k ) { if ( this.mObj instanceof PineUnit ) { return ( (PineUnit) this.mObj ).hasOwnProperty(k); } return this.containsKey(k); } @Override public boolean containsKey( Object k ) { String szKey = k.toString(); try { if ( this.mbUsingOrderCache ) { return this.binarySearch( szKey ) != -1; } else { Field field = this.mObj.getClass().getField( szKey ); return field != null; } } catch (NoSuchFieldException e) { return false; } } @Override public String toJSONString() { return StructJSONEncoder.BasicEncoder.encode( this.mObj ); } @Override public Map toMap( Class mapType ) { Map map = Units.newInstance( mapType ); Field[] classFields = this.mObj.getClass().getFields(); this.mFields = new Entry[ classFields.length ]; for ( int i = 0; i < classFields.length; ++i ) { Field field = classFields[ i ]; String fieldName = field.getName(); try { map.put( fieldName, field.get( this.mObj ) ); } catch ( IllegalAccessException e ) { try { field.setAccessible( true ); map.put( fieldName, field.get( this.mObj ) ); field.setAccessible( false ); } catch ( IllegalAccessException ignore ) { // Do nothing. } } } return map; } @Override public Map toMap() { return this.toMap( LinkedHashMap.class ); } @Override public TypeIndex prototype() { return Prototype.typeid( this.mObj ); } @Override public String prototypeName() { return Prototype.prototypeName( this.mObj ); } private static class Entry implements Comparable { String name; Field field; Entry( String name, Field field ) { this.name = name; this.field = field; } @Override public int compareTo( Entry o ) { return this.name.compareTo( o.name ); } } @Override public String[] keys() { int size = this.size(); // Saving some logic operations. String[] list = new String[ size ]; if( this.mFields != null && this.mFields.length > 0 ) { for ( int i = 0; i < size; ++i ) { list[ i ] = this.mFields[i].name; } } else { Field[] classFields = this.mObj.getClass().getFields(); for ( int i = 0; i < size; ++i ) { list[ i ] = classFields[i].getName(); } } return list; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/ObjectiveEvaluator.java ================================================ package com.pinecone.framework.system.prototype; import java.lang.reflect.Array; import java.lang.reflect.Field; import java.lang.reflect.GenericArrayType; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.lang.reflect.WildcardType; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import com.pinecone.framework.system.stereotype.JavaBeans; public interface ObjectiveEvaluator extends Pinenut { ObjectiveEvaluator MapStructures = new MapStructuresEvaluator(); Object beanGet( Object that, String key ); Object beanGetExp( Object that, String key ) throws NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException; default Object beanGet( Object that, Object key ) { return this.beanGet( that, key.toString() ); } Object structGet( Object that, String key ); default Object structGet( Object that, Object key ) { return this.structGet( that, key.toString() ); } Object get( Object that, String key ); default Object get( Object that, Object key ) { return this.get( that, key.toString() ); } Object classGet( Object that, String key ); default Object classGet( Object that, Object key ) { return this.classGet( that, key.toString() ); } void beanSet( Object that, String key, Object val ); void beanSetExp( Object that, String key, Object val ) throws NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException; default void beanSet( Object that, Object key, Object val ) { this.beanSet( that, key.toString(), val ); } void structSet( Object that, String key, Object val ); default void structSet( Object that, Object key, Object val ) { this.structSet( that, key.toString(), val ); } void set( Object that, String key, Object val ); void classSet( Object that, String key, Object val ); default void classSet( Object that, Object key, Object val ) { this.classSet( that, key.toString(), val ); } default void set( Object that, Object key, Object val ) { this.set( that, key.toString(), val ); } Class beanGetType( Object that, String key ); Class beanGetTypeExp( Object that, String key ) throws NoSuchMethodException, SecurityException, IllegalArgumentException; default Class beanGetType( Object that, Object key ) { return this.beanGetType( that, key.toString() ); } Class structGetType( Object that, String key ); default Class structGetType( Object that, Object key ) { return this.structGetType( that, key.toString() ); } Class getType( Object that, String key ); default Class getType( Object that, Object key ) { return this.getType( that, key.toString() ); } Class classGetType( Object that, String key ); default Class classGetType( Object that, Object key ) { return this.classGetType( that, key.toString() ); } default Type getFieldGenericType( Object obj, String fieldName ) { Type fieldGenericType = null; try{ if( obj != null ) { Field field = obj.getClass().getDeclaredField( fieldName ); fieldGenericType = field.getGenericType(); } } catch ( NoSuchFieldException | SecurityException e ) { fieldGenericType = null; } return fieldGenericType; } default Type getGetterGenericType( Object that, String key ) { Type genericType = null; try{ if( that != null ) { String getterName = JavaBeans.MethodMajorKeyGet + Character.toUpperCase( key.charAt(0) ) + key.substring( 1 ); Method getter = that.getClass().getMethod( getterName ); genericType = getter.getGenericReturnType(); } } catch ( NoSuchMethodException | SecurityException e ) { genericType = null; } return genericType; } default Type getSetterGenericType( Object that, String key ) { Type genericType = null; if( that != null ) { String getterName = JavaBeans.MethodMajorKeySet + Character.toUpperCase( key.charAt(0) ) + key.substring( 1 ); Method[] methods = that.getClass().getMethods(); for( Method method : methods ) { if( method.getName().equals( getterName ) && method.getParameterCount() == 1 ) { Type[] pars = method.getGenericParameterTypes(); genericType = pars[ 0 ]; break; } } } return genericType; } default Type getElementGenericType( Object that, String key ) { Type t = this.getSetterGenericType( that, key ); if( t == null ) { t = this.getGetterGenericType( that, key ); } if( t == null ) { t = this.getFieldGenericType( that, key ); } return t; } static Class resolveRawClass( Type type ) { if ( type instanceof Class ) { return (Class) type; } if ( type instanceof ParameterizedType ) return (Class) ((ParameterizedType) type).getRawType(); if ( type instanceof GenericArrayType ) { Type c = ((GenericArrayType) type).getGenericComponentType(); return Array.newInstance(resolveRawClass(c), 0).getClass(); } if ( type instanceof WildcardType ) { Type[] upper = ((WildcardType) type).getUpperBounds(); return resolveRawClass(upper[0]); } return Object.class; } static Type extractGenericElementType( Type type ) { if ( type instanceof ParameterizedType ) { ParameterizedType pt = (ParameterizedType) type; Type raw = pt.getRawType(); if (raw == List.class || raw == Set.class || raw == Collection.class) { return pt.getActualTypeArguments()[0]; } if (raw == Map.class) { return pt.getActualTypeArguments()[1]; // value type } } if ( type instanceof GenericArrayType ) { return ((GenericArrayType) type).getGenericComponentType(); } if ( type instanceof Class && ((Class) type).isArray() ) { return ((Class) type).getComponentType(); } return null; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/ObjectiveList.java ================================================ package com.pinecone.framework.system.prototype; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import com.pinecone.framework.unit.Units; import com.pinecone.framework.util.json.JSON; public class ObjectiveList implements Objectom { protected List mList; public ObjectiveList( List list ) { this.mList = list; } public static Integer affirmIntegerKey( Object key ) { if ( key instanceof Integer ) { return (Integer) key; } else if ( key instanceof Long ) { return (int)(long) key; } else if ( key instanceof Short ) { return (int)(short) key; } else if ( key instanceof Byte ) { return (int) (byte) key; } else if ( key instanceof String ) { String szKey = (String) key; return Integer.parseInt(szKey); } return null; } @Override public int size() { return this.mList.size(); } @Override public boolean isEmpty() { return this.mList.isEmpty(); } @Override public Object get( Object key ){ Integer i = ObjectiveList.affirmIntegerKey(key); if( i == null ) { return null; } return this.mList.get(i); } @Override @SuppressWarnings("unchecked") public void set( Object key, Object val ){ Integer i = ObjectiveList.affirmIntegerKey(key); if( i == null ) { return ; } this.mList.set(i, (T)val); } @Override public boolean hasOwnProperty( Object k ) { if( this.mList instanceof PineUnit ) { ( (PineUnit)this.mList ).hasOwnProperty( k ); } return this.containsKey( k ); } @Override public boolean containsKey( Object k ) { Integer i = ObjectiveList.affirmIntegerKey(k); if( i == null ) { return false; } int nLength = this.mList.size(); if( i < 0 || nLength == 0 ){ return false; } return nLength > i; } @Override public String toJSONString() { return JSON.stringify(this.mList); } @Override public Map toMap(Class mapType ) { Map map = Units.newInstance( mapType ); int i = 0; for( Object e : this.mList ) { map.put( Integer.toString( i ), e ); ++i; } return map; } @Override public Map toMap() { return this.toMap( LinkedHashMap.class ); } @Override public TypeIndex prototype() { return Prototype.typeid( this.mList ); } @Override public String prototypeName() { return Prototype.prototypeName(this.mList); } @Override public Integer[] keys() { Integer[] list = new Integer[ this.mList.size() ]; for ( int i = 0; i < this.mList.size(); ++i ) { list[ i ] = i; } return list; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/ObjectiveMap.java ================================================ package com.pinecone.framework.system.prototype; import com.pinecone.framework.unit.Units; import com.pinecone.framework.util.json.JSON; import java.util.LinkedHashMap; import java.util.Map; public class ObjectiveMap implements Objectom { protected Map mMap; public ObjectiveMap( Map map ) { this.mMap = map; } @Override public int size() { return this.mMap.size(); } @Override public boolean isEmpty() { return this.mMap.isEmpty(); } public Object get( Object key ){ return this.mMap.get(key); } @SuppressWarnings("unchecked") public void set( Object key, Object val ){ this.mMap.put((K)key, (V)val); } @Override public boolean hasOwnProperty( Object k ) { if( this.mMap instanceof PineUnit ) { ( (PineUnit)this.mMap ).hasOwnProperty( k ); } return this.containsKey(k); } @Override public boolean containsKey( Object k ) { return this.mMap.containsKey(k); } @Override public String toJSONString() { return JSON.stringify(this.mMap); } @Override public Map toMap( Class mapType ) { Map map = Units.newInstance( mapType ); int i = 0; for( Map.Entry kv : this.mMap.entrySet() ) { map.put( kv.getKey().toString(), kv.getValue() ); ++i; } return map; } @Override public Map toMap() { return this.toMap( LinkedHashMap.class ); } @Override public TypeIndex prototype() { return Prototype.typeid( this.mMap ); } @Override public String prototypeName() { return Prototype.prototypeName(this.mMap); } @Override @SuppressWarnings( "unchecked" ) public K[] keys() { return (K[])this.mMap.keySet().toArray(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/Objectom.java ================================================ package com.pinecone.framework.system.prototype; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Pinecone Ursus For Java Objectom * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Objectom is an uniformity map-operator, supported wrapped unified operation class. * It has implemented following types, and will let them conformed the unified interface. * Array, List, Map, Fielded-Class, Bean-Class * ***************************************************************************************** * Notice: * 1. All objects are un-appendable, and should consider as the `class`, only supported get/set. * 2. Excepted the `set`, other methods should consider as the const, `const Type* method() const;` * 3. Some scenarios likes the `bean`, which the `gets` could not paired with the `sets` therein. * 4. Some scenarios e.g. the `class`, the value needed to retrieve from inner fields or methods. * 4.1 In these condition, it may provokes exceptions, so no explicit `values()` method given. * 4.2 The implicated keys in the `class`, will be all retrieved, and may not be expected. * ***************************************************************************************** * Dragon King, the undefined */ public interface Objectom extends PineUnit { int size(); boolean isEmpty(); Object get( Object key ); void set( Object key, Object val ); boolean containsKey( Object k ) ; // Readonly // const Object* keys() const; Object[] keys(); Map toMap( Class mapType ); default Map toMap() { return this.toMap( LinkedHashMap.class ); } @SuppressWarnings("unchecked") static Objectom wrap( Object that ) { if( that instanceof Objectom ) { return (Objectom) that; } else if( that instanceof Map ) { return new ObjectiveMap<>( (Map) that ); } else if( that instanceof List) { return new ObjectiveList<>( (List) that ); } else if( that.getClass().isArray() ){ return new ObjectiveArray( (Object[]) that ); } return new ObjectiveBean(that); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/OverridableFamily.java ================================================ package com.pinecone.framework.system.prototype; public interface OverridableFamily extends FamilyContext { boolean isOverriddenAffinity(); void setOverriddenAffinity( boolean overrideAffinity ) ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/PineUnit.java ================================================ package com.pinecone.framework.system.prototype; public interface PineUnit extends Pinenut { boolean hasOwnProperty( Object elm ); boolean containsKey( Object key ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/Pinenut.java ================================================ package com.pinecone.framework.system.prototype; public interface Pinenut { default TypeIndex prototype() { return Prototype.typeid( this ); } default String prototypeName() { return this.className(); } default boolean isPrototypeOf( TypeIndex that ){ return this.prototype().equals( that ); } default String className() { return this.getClass().getSimpleName(); } default String toJSONString() { return String.format( "\"[object %s(0x%s)]\"", this.className() , Integer.toHexString( this.hashCode() ) ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/PinenutTraits.java ================================================ package com.pinecone.framework.system.prototype; import com.pinecone.framework.system.functions.Executor; import com.pinecone.framework.system.functions.Function; import com.pinecone.framework.util.ReflectionUtils; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.concurrent.Callable; public final class PinenutTraits { public static final String OBJ_STRINGIFY_DEFAULT = "[object %s]"; //I think javascript's format is marvelous. public static final String FUN_TO_JSON_STRING_NAME = "toJSONString"; public static String invokeToJSONString ( Object that ) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException { Method fnToJSONString = that.getClass().getMethod( PinenutTraits.FUN_TO_JSON_STRING_NAME ); ReflectionUtils.makeAccessible( fnToJSONString ); return (String) fnToJSONString.invoke( that ); } public static String invokeToJSONString ( Object that, int nIndentFactor, int nIndentBlankNum ) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException { Method fnToJSONString = that.getClass().getMethod( PinenutTraits.FUN_TO_JSON_STRING_NAME, int.class, int.class ); ReflectionUtils.makeAccessible( fnToJSONString ); return (String) fnToJSONString.invoke( that, nIndentFactor, nIndentBlankNum ); } public static String invokeToJSONString ( Object that, String szDefaultResult ) { try{ return PinenutTraits.invokeToJSONString( that ); } catch ( NoSuchMethodException | IllegalAccessException | InvocationTargetException e ){ if( szDefaultResult == null ){ return that.toString(); } return szDefaultResult; } } public static String invokeToString ( Object that, Object dyDefaultResult ) { try{ return PinenutTraits.invokeCaseToString( that, dyDefaultResult ); } catch ( IllegalArgumentException e ) { return String.format( PinenutTraits.OBJ_STRINGIFY_DEFAULT, that.getClass().getName() + "(0x" + Integer.toHexString( that.hashCode() ) + ")" ); } } public static String invokeCaseToString ( Object that, Object dyDefaultResult ) throws IllegalArgumentException { if( that == null ){ return "null"; } else if( that instanceof Function ){ return "[object Function]"; } else if( that instanceof Executor ){ return "[object Executor]"; } else if( that instanceof Runnable ){ return "[object Runnable]"; } else if( that instanceof Callable ){ return "[object Callable]"; } else if( that.getClass().isEnum() ){ return that.toString(); } else if( Prototype.isMethodDeclared( that, "toString" ) ){ return that.toString(); } else if( that.getClass() == Object.class ){ //Hei hei hei~ :) return "[object Object]"; } else if( dyDefaultResult instanceof Boolean && (boolean)dyDefaultResult ) { return that.toString(); } else if( dyDefaultResult instanceof String ){ return (String) dyDefaultResult; } throw new IllegalArgumentException(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/Prototype.java ================================================ package com.pinecone.framework.system.prototype; import java.lang.reflect.*; import java.util.Arrays; import java.util.HashSet; public abstract class Prototype { public static String prototypeName( Object that ){ try { return that.getClass().getSimpleName(); } catch ( Exception E ){ return "[object Object]"; } } public static TypeIndex typeid( Object that ) { return new TypeIndex( that ); } public static String namespace( Class that ){ //return that.getName().split( "." + that.getSimpleName() )[0]; return that.getPackage().getName(); } public static String namespace( Object that ){ return Prototype.namespace( that.getClass() ); } public static String namespaceNode ( Class that ) { String szNamespace = Prototype.namespace( that ); String[] debris = szNamespace.split("\\."); return debris.length <= 1 ? szNamespace : debris [ debris.length - 1 ]; } public static String namespaceNode ( Object that ) { return Prototype.namespaceNode( that.getClass() ); } public static boolean isAbstract ( Class that ) { return Modifier.isAbstract( that.getModifiers() ); } private static String[] getPropertyNames ( Object that, boolean bAllOwned ) { if ( that == null ) { return null; } else { Class klass = that.getClass(); Field[] fields = klass.getDeclaredFields(); int length = fields.length; if ( length == 0 ) { return null; } else { String[] names = new String[length]; int j = 0; for( int i = 0; i < length; ++i ) { Field field = fields[i]; if ( (!Modifier.isPublic(field.getModifiers()) || Modifier.isFinal(field.getModifiers())) && !field.isAccessible() ) { if( !bAllOwned ){ continue; } } names[j++] = fields[i].getName(); } if( !bAllOwned ){ return Arrays.copyOf( names, j ); } return names; } } } public static String[] getOwnPropertyNames ( Object that ){ return Prototype.getPropertyNames( that, true ); } public static String[] keys ( Object that ){ return Prototype.getPropertyNames( that, false ); } public static HashSet getDeclaredMethodsNameSet( Object that ){ HashSet hashSet = new HashSet<>(); Prototype.getDeclaredMethodsNameSet( hashSet, that ); return hashSet; } public static void getDeclaredMethodsNameSet( HashSet hSet, Object that ){ Prototype.getDeclaredMethodsNameSet( hSet, that.getClass() ); } public static HashSet getDeclaredMethodsNameSet( Class that ){ HashSet hashSet = new HashSet<>(); Prototype.getDeclaredMethodsNameSet( hashSet, that ); return hashSet; } public static void getDeclaredMethodsNameSet( HashSet set, Class hThatClass ){ Method[] methods = hThatClass.getDeclaredMethods(); for ( Method row : methods ) { set.add( row.getName() ); } } public static Object invokeNoParameterMethod ( Object that , String szFunctionName ) throws NoSuchMethodException, InvocationTargetException ,IllegalAccessException { Method method = that.getClass().getMethod( szFunctionName ); return method.invoke( that ); } public static boolean isMethodDeclared ( Object that, String szFnName, Class... parameterTypes ) { try{ return that.getClass().getDeclaredMethod( szFnName, parameterTypes ) != null; } catch ( NoSuchMethodException e ){ return false; } } public static Class primitivify( Class c ){ if( c == Byte.class ){ return byte.class; } else if( c == Short.class ){ return short.class; } else if( c == Integer.class ){ return int.class; } else if( c == Long.class ){ return long.class; } else if( c == Float.class ){ return float.class; } else if( c == Double.class ){ return double.class; } else if( c == Character.class ){ return char.class; } else if( c == Void.class ){ return void.class; } return c; } /** Element **/ public static boolean isNumber( Class stereotype ) { if( Number.class.isAssignableFrom( stereotype ) ){ return true; } else if( stereotype.isPrimitive() ){ return stereotype == byte.class || stereotype == short.class || stereotype == int.class || stereotype == long.class || stereotype == float.class || stereotype == double.class; } return false; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/Strategy.java ================================================ package com.pinecone.framework.system.prototype; public interface Strategy extends Pinenut, Cloneable { boolean matched( Object condition ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/Summoner.java ================================================ package com.pinecone.framework.system.prototype; public interface Summoner extends Pinenut { Object summon( String szClassPath, Object... args ) throws Exception ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/TypeIndex.java ================================================ package com.pinecone.framework.system.prototype; /** * Pinecone For Java TypeIndex [ Runtime Smart prototype Identity ] * Copyright © 2008 - 2024 Bean Nuts Foundation ( DR.Undefined ) All rights reserved. [Mr.A.R.B / WJH] * Tip: * ***************************************************************************************** * Author: undefined * Last Modified Date: 2021-03-13 * ***************************************************************************************** * For name: It's simple name of `class`. * Full name should be considered as Namespace(PackageName) + SimpleName * ***************************************************************************************** */ public class TypeIndex { private Class mClass = null; private Class mParent = null; private Object mThis = null; public TypeIndex( Object that ) { this.mParent = that.getClass().getSuperclass(); this.mThis = that; this.mClass = that.getClass(); } public TypeIndex prototype(){ return this; } public Object proto(){ return this.mThis; } public Class parent(){ return this.mParent; } public Class classType() { return this.mClass; } public String namespace() { return Prototype.namespace( this.mClass ); } public String name() { return this.mClass.getSimpleName(); } public String typeName(){ return this.mClass.getName(); } @Override public String toString(){ return this.typeName(); } @Override public boolean equals( Object that ) { if( that instanceof TypeIndex ){ TypeIndex realThat = (TypeIndex)that; return this.mClass.equals( realThat.mClass ) ; } return false; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/Automatus.java ================================================ package com.pinecone.framework.system.regime; /** * Automatus (Automaton) * 自动机 */ public interface Automatus extends Executioner { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/Censorate.java ================================================ package com.pinecone.framework.system.regime; /** * Censorate * 审阅器 */ public interface Censorate extends Volition, Supervisor { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/Examiner.java ================================================ package com.pinecone.framework.system.regime; /** * Examiner * 检察器 */ public interface Examiner extends Executioner, Supervisor { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/Executioner.java ================================================ package com.pinecone.framework.system.regime; import com.pinecone.framework.system.prototype.Pinenut; /** * Executor * 执行器 */ public interface Executioner extends Pinenut { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/Instrument.java ================================================ package com.pinecone.framework.system.regime; /** * Instrument * 编制器 */ public interface Instrument extends Volition { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/Orchestrator.java ================================================ package com.pinecone.framework.system.regime; /** * Orchestrator * 编排器 */ public interface Orchestrator extends Executioner, Volition { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/Regiment.java ================================================ package com.pinecone.framework.system.regime; public interface Regiment extends Volition, Executioner, Supervisor { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/Supervisor.java ================================================ package com.pinecone.framework.system.regime; import com.pinecone.framework.system.prototype.Pinenut; /** * Supervisor * 都察器 */ public interface Supervisor extends Pinenut { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/Tracker.java ================================================ package com.pinecone.framework.system.regime; /** * Tracker * 观察器 */ public interface Tracker extends Supervisor { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/Volition.java ================================================ package com.pinecone.framework.system.regime; import com.pinecone.framework.system.prototype.Pinenut; /** * Volition * 意志器 */ public interface Volition extends Pinenut { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/arch/Controllor.java ================================================ package com.pinecone.framework.system.regime.arch; /** * Controllor, Function Controller * 最小临界控制者 */ public interface Controllor extends Dominator { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/arch/Director.java ================================================ package com.pinecone.framework.system.regime.arch; /** * Director, Module Manger Controller * 分组控制者 */ public interface Director extends Manager { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/arch/Dominator.java ================================================ package com.pinecone.framework.system.regime.arch; import com.pinecone.framework.system.prototype.Pinenut; /** * Dominator, Control-Element * 支配 / 管理单元 * OR A.K.A. Driver. */ public interface Dominator extends Pinenut { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/arch/Dominus.java ================================================ package com.pinecone.framework.system.regime.arch; /** * Dominus, Central Controller * 中央控制者 */ public interface Dominus extends Dominator { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/arch/Lord.java ================================================ package com.pinecone.framework.system.regime.arch; /** * Lord, Domain Controller * 领域控制者 */ public interface Lord extends Dominator { void release(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/arch/Manager.java ================================================ package com.pinecone.framework.system.regime.arch; /** * Manager, Module Controller * 组件控制者 */ public interface Manager extends Dominator { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regimentation/CascadeNodus.java ================================================ package com.pinecone.framework.system.regimentation; import com.pinecone.framework.util.name.Namespace; import com.pinecone.framework.util.name.UniNamespace; public interface CascadeNodus extends Nodus { CascadeNodus parent(); default boolean isRoot() { return this.parent() == null; } default CascadeNodus root() { CascadeNodus p = this; CascadeNodus c = p; while ( p != null ) { c = p; p = p.parent(); } return c; } Namespace getTargetingName(); void setTargetingName( Namespace name ); default void setTargetingName( String name ) { Namespace p = null; if( this.parent() != null ) { p = this.parent().getTargetingName(); } this.setTargetingName( new UniNamespace( name, p ) ); } default String getSimpleName() { return this.getTargetingName().getSimpleName(); } default String getFullName() { return this.getTargetingName().getFullName(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regimentation/Nodus.java ================================================ package com.pinecone.framework.system.regimentation; import com.pinecone.framework.system.prototype.Pinenut; public interface Nodus extends Pinenut { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regimentation/UniformCascadeNodus.java ================================================ package com.pinecone.framework.system.regimentation; import com.pinecone.framework.util.name.Namespace; public interface UniformCascadeNodus extends CascadeNodus, UniformNodus { @Override default Namespace getUniformName() { return this.getTargetingName(); } @Override default void setUniformName( Namespace name ) { this.setTargetingName( name ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regimentation/UniformNodus.java ================================================ package com.pinecone.framework.system.regimentation; import com.pinecone.framework.util.name.Namespace; /** * Pinecone Framework For Java (Bean Nuts Pinecone Ursus for Java) * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * UniformNodus * Regimentation Uniform Node * 统一编组节点 * ***************************************************************************************** * Dragon King, the undefined */ public interface UniformNodus extends Nodus { /** * Nomenclature of node`s name, usually the path of a cascade centralized tree. * 编制节点的系统命名,通常是级联中央集权树的路径 */ Namespace getUniformName(); void setUniformName( Namespace name ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/stereotype/HungarianNotation.java ================================================ package com.pinecone.framework.system.stereotype; import com.pinecone.framework.system.functions.Executable; import com.pinecone.framework.system.prototype.Prototype; import java.lang.reflect.Method; public class HungarianNotation { public static final String S_PRE_STRING_ZERO = "sz"; public static final String S_PRE_STRING = "s"; public static final String S_PRE_NUMBER = "n"; public static final String S_PRE_BOOLEAN = "b"; public static final String S_PRE_CHAR = "c"; public static final String S_PRE_FUNCTION = "fn"; public static final String S_PRE_MEMBER = "m"; public static final String S_PRE_HANDLE = "h"; public static String toUpperCaseFirst( String szProto ){ StringBuilder sb = new StringBuilder(); sb.append( szProto ); sb.setCharAt( 0, Character.toUpperCase( sb.charAt(0) ) ); return sb.toString(); } public static String addPrefix( String szProto, Class stereotype ) { String szRealName = HungarianNotation.toUpperCaseFirst( szProto ); if( Prototype.isNumber( stereotype ) ){ return HungarianNotation.S_PRE_NUMBER + szRealName; } else if ( stereotype == String.class ){ return HungarianNotation.S_PRE_STRING_ZERO + szRealName; } else if ( stereotype == Boolean.class || stereotype == boolean.class ){ return HungarianNotation.S_PRE_BOOLEAN + szRealName; } else if ( stereotype == Character.class || stereotype == char.class ){ return HungarianNotation.S_PRE_CHAR + szRealName; } else if ( Executable.class.isAssignableFrom( stereotype ) || stereotype == Method.class ){ return HungarianNotation.S_PRE_FUNCTION + szRealName; } return szProto; } public static String unPrefix( String szProto, Class stereotype ) { StringBuilder sb = new StringBuilder(); sb.append( szProto ); if( Prototype.isNumber( stereotype ) ){ sb.deleteCharAt( 0 ); } else if ( stereotype == String.class ){ sb.delete( 0, 1 ); } else if ( stereotype == Boolean.class || stereotype == boolean.class ){ sb.deleteCharAt( 0 ); } else if ( stereotype == Character.class || stereotype == char.class ){ sb.deleteCharAt( 0 ); } else if ( Executable.class.isAssignableFrom( stereotype ) || stereotype == Method.class ){ sb.delete( 0, 1 ); } if( sb.length() != szProto.length() ){ sb.setCharAt( 0, Character.toLowerCase( sb.charAt(0) ) ); return sb.toString() ; } return szProto; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/stereotype/JavaBeans.java ================================================ package com.pinecone.framework.system.stereotype; import java.lang.reflect.Method; public final class JavaBeans { public static final String MethodKeyGetClass = "getClass"; public static final String MethodKeyGetDeclaringClass = "getDeclaringClass"; public static final String MethodMajorKeyGet = "get"; public static final int MethodMajorKeyGetLength = JavaBeans.MethodMajorKeyGet.length(); public static final String MethodMajorKeyIs = "is"; public static final int MethodMajorKeyIsLength = JavaBeans.MethodMajorKeyIs.length(); public static final String MethodMajorKeySet = "set"; public static final int MethodMajorKeySetLength = JavaBeans.MethodMajorKeySet.length(); public static String getGetterMethodKeyName( String szMethodName ) { String key = null; if ( szMethodName.startsWith( JavaBeans.MethodMajorKeyGet ) ) { if ( !JavaBeans.MethodKeyGetClass.equals(szMethodName) && !JavaBeans.MethodKeyGetDeclaringClass.equals(szMethodName) ) { key = szMethodName.substring( JavaBeans.MethodMajorKeyGetLength ); // "get" } } else if ( szMethodName.startsWith( JavaBeans.MethodMajorKeyIs ) ) { key = szMethodName.substring( JavaBeans.MethodMajorKeyIsLength ); // "is" } return key; } public static String getGetterMethodKeyName( Method method ) { return JavaBeans.getGetterMethodKeyName( method.getName() ); } // First character lower case. public static String methodKeyNameLowerCaseNormalize( String key ) { if ( key.length() == 1 ) { key = key.toLowerCase(); } else if ( !Character.isUpperCase( key.charAt( 1 ) ) ) { key = key.substring(0, 1).toLowerCase() + key.substring(1); } return key; } // First character lower case. public static String getKeyGetterMethodNameLowerCaseNormalized( String szMethodName ) { return JavaBeans.methodKeyNameLowerCaseNormalize( JavaBeans.getGetterMethodKeyName( szMethodName ) ); } // First character lower case. public static String getKeyGetterMethodNameLowerCaseNormalized( Method method ) { return JavaBeans.methodKeyNameLowerCaseNormalize( JavaBeans.getGetterMethodKeyName( method ) ); } public static String getSetterMethodKeyName( String szMethodName ) { String key = null; if ( szMethodName.startsWith( JavaBeans.MethodMajorKeySet ) ) { key = szMethodName.substring( JavaBeans.MethodMajorKeySetLength ); // "set" } return key; } public static String getSetterMethodKeyName( Method method ) { return JavaBeans.getSetterMethodKeyName( method.getName() ); } // First character upper case. public static String methodKeyNameUpperCaseNormalize( String key ) { if ( key.length() == 1 ) { key = key.toUpperCase(); } else if ( Character.isLowerCase( key.charAt( 0 ) ) ) { key = key.substring(0, 1).toUpperCase() + key.substring(1); } return key; } // First character upper case. public static String getKeySetterMethodNameLowerCaseNormalized( String szMethodName ) { return JavaBeans.methodKeyNameLowerCaseNormalize( JavaBeans.getSetterMethodKeyName( szMethodName ) ); } // First character upper case. public static String getKeySetterMethodNameLowerCaseNormalized( Method method ) { return JavaBeans.methodKeyNameLowerCaseNormalize( JavaBeans.getSetterMethodKeyName( method ) ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/AbstractMap.java ================================================ package com.pinecone.framework.unit; import com.pinecone.framework.system.prototype.PineUnit; import com.pinecone.framework.util.json.JSON; import java.io.Serializable; import java.util.Collection; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.AbstractSet; import java.util.AbstractCollection; public abstract class AbstractMap implements Map, PineUnit { protected AbstractMap() { } // Query Operations /** * {@inheritDoc} * * @implSpec * This implementation returns {@code entrySet().size()}. */ public int size() { return entrySet().size(); } /** * {@inheritDoc} * * @implSpec * This implementation returns {@code size() == 0}. */ public boolean isEmpty() { return size() == 0; } /** * {@inheritDoc} * * @implSpec * This implementation iterates over {@code entrySet()} searching * for an entry with the specified value. If such an entry is found, * {@code true} is returned. If the iteration terminates without * finding such an entry, {@code false} is returned. Note that this * implementation requires linear time in the size of the map. * * @throws ClassCastException {@inheritDoc} * @throws NullPointerException {@inheritDoc} */ public boolean containsValue(Object value) { Iterator> i = entrySet().iterator(); if (value==null) { while (i.hasNext()) { Entry e = i.next(); if (e.getValue()==null) return true; } } else { while (i.hasNext()) { Entry e = i.next(); if (value.equals(e.getValue())) return true; } } return false; } /** * {@inheritDoc} * * @implSpec * This implementation iterates over {@code entrySet()} searching * for an entry with the specified key. If such an entry is found, * {@code true} is returned. If the iteration terminates without * finding such an entry, {@code false} is returned. Note that this * implementation requires linear time in the size of the map; many * implementations will override this method. * * @throws ClassCastException {@inheritDoc} * @throws NullPointerException {@inheritDoc} */ public boolean containsKey(Object key) { Iterator> i = entrySet().iterator(); if (key==null) { while (i.hasNext()) { Entry e = i.next(); if (e.getKey()==null) return true; } } else { while (i.hasNext()) { Entry e = i.next(); if (key.equals(e.getKey())) return true; } } return false; } /** * {@inheritDoc} * * @implSpec * This implementation iterates over {@code entrySet()} searching * for an entry with the specified key. If such an entry is found, * the entry's value is returned. If the iteration terminates without * finding such an entry, {@code null} is returned. Note that this * implementation requires linear time in the size of the map; many * implementations will override this method. * * @throws ClassCastException {@inheritDoc} * @throws NullPointerException {@inheritDoc} */ public V get(Object key) { Iterator> i = entrySet().iterator(); if ( key == null ) { while ( i.hasNext() ) { Entry e = i.next(); if ( e.getKey() == null ) { return e.getValue(); } } } else { while ( i.hasNext() ) { Entry e = i.next(); if ( key.equals(e.getKey()) ) { return e.getValue(); } } } return null; } // Modification Operations /** * {@inheritDoc} * * @implSpec * This implementation always throws an * {@code UnsupportedOperationException}. * * @throws UnsupportedOperationException {@inheritDoc} * @throws ClassCastException {@inheritDoc} * @throws NullPointerException {@inheritDoc} * @throws IllegalArgumentException {@inheritDoc} */ public V put(K key, V value) { throw new UnsupportedOperationException(); } /** * {@inheritDoc} * * @implSpec * This implementation iterates over {@code entrySet()} searching for an * entry with the specified key. If such an entry is found, its value is * obtained with its {@code getValue} operation, the entry is removed * from the collection (and the backing map) with the iterator's * {@code remove} operation, and the saved value is returned. If the * iteration terminates without finding such an entry, {@code null} is * returned. Note that this implementation requires linear time in the * size of the map; many implementations will override this method. * *

Note that this implementation throws an * {@code UnsupportedOperationException} if the {@code entrySet} * iterator does not support the {@code remove} method and this map * contains a mapping for the specified key. * * @throws UnsupportedOperationException {@inheritDoc} * @throws ClassCastException {@inheritDoc} * @throws NullPointerException {@inheritDoc} */ public V remove(Object key) { Iterator> i = entrySet().iterator(); Entry correctEntry = null; if (key==null) { while (correctEntry==null && i.hasNext()) { Entry e = i.next(); if (e.getKey()==null) correctEntry = e; } } else { while (correctEntry==null && i.hasNext()) { Entry e = i.next(); if (key.equals(e.getKey())) correctEntry = e; } } V oldValue = null; if (correctEntry !=null) { oldValue = correctEntry.getValue(); i.remove(); } return oldValue; } // Bulk Operations /** * {@inheritDoc} * * @implSpec * This implementation iterates over the specified map's * {@code entrySet()} collection, and calls this map's {@code put} * operation once for each entry returned by the iteration. * *

Note that this implementation throws an * {@code UnsupportedOperationException} if this map does not support * the {@code put} operation and the specified map is nonempty. * * @throws UnsupportedOperationException {@inheritDoc} * @throws ClassCastException {@inheritDoc} * @throws NullPointerException {@inheritDoc} * @throws IllegalArgumentException {@inheritDoc} */ public void putAll(Map m) { for ( Map.Entry e : m.entrySet() ) { put( e.getKey(), e.getValue() ); } } /** * {@inheritDoc} * * @implSpec * This implementation calls {@code entrySet().clear()}. * *

Note that this implementation throws an * {@code UnsupportedOperationException} if the {@code entrySet} * does not support the {@code clear} operation. * * @throws UnsupportedOperationException {@inheritDoc} */ public void clear() { entrySet().clear(); } // Views protected transient Set keySet; protected transient Collection values; /** * {@inheritDoc} * * @implSpec * This implementation returns a set that subclasses {@link AbstractSet}. * The subclass's iterator method returns a "wrapper object" over this * map's {@code entrySet()} iterator. The {@code size} method * delegates to this map's {@code size} method and the * {@code contains} method delegates to this map's * {@code containsKey} method. * *

The set is created the first time this method is called, * and returned in response to all subsequent calls. No synchronization * is performed, so there is a slight chance that multiple calls to this * method will not all return the same set. */ public Set keySet() { Set ks = keySet; if (ks == null) { ks = new AbstractSet() { public Iterator iterator() { return new Iterator() { private Iterator> i = entrySet().iterator(); public boolean hasNext() { return i.hasNext(); } public K next() { return i.next().getKey(); } public void remove() { i.remove(); } }; } public int size() { return AbstractMap.this.size(); } public boolean isEmpty() { return AbstractMap.this.isEmpty(); } public void clear() { AbstractMap.this.clear(); } public boolean contains(Object k) { return AbstractMap.this.containsKey(k); } }; keySet = ks; } return ks; } /** * {@inheritDoc} * * @implSpec * This implementation returns a collection that subclasses {@link * AbstractCollection}. The subclass's iterator method returns a * "wrapper object" over this map's {@code entrySet()} iterator. * The {@code size} method delegates to this map's {@code size} * method and the {@code contains} method delegates to this map's * {@code containsValue} method. * *

The collection is created the first time this method is called, and * returned in response to all subsequent calls. No synchronization is * performed, so there is a slight chance that multiple calls to this * method will not all return the same collection. */ public Collection values() { Collection vals = values; if (vals == null) { vals = new AbstractCollection() { public Iterator iterator() { return new Iterator() { private Iterator> i = entrySet().iterator(); public boolean hasNext() { return i.hasNext(); } public V next() { return i.next().getValue(); } public void remove() { i.remove(); } }; } public int size() { return AbstractMap.this.size(); } public boolean isEmpty() { return AbstractMap.this.isEmpty(); } public void clear() { AbstractMap.this.clear(); } public boolean contains(Object v) { return AbstractMap.this.containsValue(v); } }; values = vals; } return vals; } public abstract Set> entrySet(); // Comparison and hashing /** * Compares the specified object with this map for equality. Returns * {@code true} if the given object is also a map and the two maps * represent the same mappings. More formally, two maps {@code m1} and * {@code m2} represent the same mappings if * {@code m1.entrySet().equals(m2.entrySet())}. This ensures that the * {@code equals} method works properly across different implementations * of the {@code Map} interface. * * @implSpec * This implementation first checks if the specified object is this map; * if so it returns {@code true}. Then, it checks if the specified * object is a map whose size is identical to the size of this map; if * not, it returns {@code false}. If so, it iterates over this map's * {@code entrySet} collection, and checks that the specified map * contains each mapping that this map contains. If the specified map * fails to contain such a mapping, {@code false} is returned. If the * iteration completes, {@code true} is returned. * * @param o object to be compared for equality with this map * @return {@code true} if the specified object is equal to this map */ public boolean equals(Object o) { if (o == this) return true; if (!(o instanceof Map)) return false; Map m = (Map) o; if (m.size() != size()) return false; try { for (Entry e : entrySet()) { K key = e.getKey(); V value = e.getValue(); if (value == null) { if (!(m.get(key) == null && m.containsKey(key))) return false; } else { if (!value.equals(m.get(key))) return false; } } } catch (ClassCastException unused) { return false; } catch (NullPointerException unused) { return false; } return true; } /** * Returns the hash code value for this map. The hash code of a map is * defined to be the sum of the hash codes of each entry in the map's * {@code entrySet()} view. This ensures that {@code m1.equals(m2)} * implies that {@code m1.hashCode()==m2.hashCode()} for any two maps * {@code m1} and {@code m2}, as required by the general contract of * {@link Object#hashCode}. * * @implSpec * This implementation iterates over {@code entrySet()}, calling * {@link Map.Entry#hashCode hashCode()} on each element (entry) in the * set, and adding up the results. * * @return the hash code value for this map * @see Map.Entry#hashCode() * @see Object#equals(Object) * @see Set#equals(Object) */ public int hashCode() { int h = 0; for (Entry entry : entrySet()) h += entry.hashCode(); return h; } /** * Returns a string representation of this map. The string representation * consists of a list of key-value mappings in the order returned by the * map's {@code entrySet} view's iterator, enclosed in braces * ({@code "{}"}). Adjacent mappings are separated by the characters * {@code ", "} (comma and space). Each key-value mapping is rendered as * the key followed by an equals sign ({@code "="}) followed by the * associated value. Keys and values are converted to strings as by * {@link String#valueOf(Object)}. * * @return a string representation of this map */ @Override public String toString() { Iterator> i = entrySet().iterator(); if (! i.hasNext()) return "{}"; StringBuilder sb = new StringBuilder(); sb.append('{'); for (;;) { Entry e = i.next(); K key = e.getKey(); V value = e.getValue(); sb.append(key == this ? "(this Map)" : key); sb.append('='); sb.append(value == this ? "(this Map)" : value); if (! i.hasNext()) return sb.append('}').toString(); sb.append(',').append(' '); } } @Override public String toJSONString() { return JSON.stringify( this ); } @Override public boolean hasOwnProperty( Object key ){ return this.containsKey( key ); } /** * Returns a shallow copy of this {@code AbstractMap} instance: the keys * and values themselves are not cloned. * * @return a shallow copy of this map */ protected Object clone() throws CloneNotSupportedException { AbstractMap result = (AbstractMap)super.clone(); result.keySet = null; result.values = null; return result; } /** * Utility method for SimpleEntry and SimpleImmutableEntry. * Test for equality, checking for nulls. * * NB: Do not replace with Object.equals until JDK-8015417 is resolved. */ private static boolean eq(Object o1, Object o2) { return o1 == null ? o2 == null : o1.equals(o2); } // Implementation Note: SimpleEntry and SimpleImmutableEntry // are distinct unrelated classes, even though they share // some code. Since you can't add or subtract final-ness // of a field in a subclass, they can't share representations, // and the amount of duplicated code is too small to warrant // exposing a common abstract class. /** * An Entry maintaining a key and a value. The value may be * changed using the {@code setValue} method. This class * facilitates the process of building custom map * implementations. For example, it may be convenient to return * arrays of {@code SimpleEntry} instances in method * {@code Map.entrySet().toArray}. * * @since 1.6 */ public static class SimpleEntry implements Entry, Serializable { private static final long serialVersionUID = -8499721149061103585L; private final K key; private V value; /** * Creates an entry representing a mapping from the specified * key to the specified value. * * @param key the key represented by this entry * @param value the value represented by this entry */ public SimpleEntry(K key, V value) { this.key = key; this.value = value; } /** * Creates an entry representing the same mapping as the * specified entry. * * @param entry the entry to copy */ public SimpleEntry(Entry entry) { this.key = entry.getKey(); this.value = entry.getValue(); } /** * Returns the key corresponding to this entry. * * @return the key corresponding to this entry */ public K getKey() { return key; } /** * Returns the value corresponding to this entry. * * @return the value corresponding to this entry */ public V getValue() { return value; } /** * Replaces the value corresponding to this entry with the specified * value. * * @param value new value to be stored in this entry * @return the old value corresponding to the entry */ public V setValue(V value) { V oldValue = this.value; this.value = value; return oldValue; } /** * Compares the specified object with this entry for equality. * Returns {@code true} if the given object is also a map entry and * the two entries represent the same mapping. More formally, two * entries {@code e1} and {@code e2} represent the same mapping * if

         *   (e1.getKey()==null ?
         *    e2.getKey()==null :
         *    e1.getKey().equals(e2.getKey()))
         *   &&
         *   (e1.getValue()==null ?
         *    e2.getValue()==null :
         *    e1.getValue().equals(e2.getValue()))
* This ensures that the {@code equals} method works properly across * different implementations of the {@code Map.Entry} interface. * * @param o object to be compared for equality with this map entry * @return {@code true} if the specified object is equal to this map * entry * @see #hashCode */ public boolean equals(Object o) { if (!(o instanceof Map.Entry)) return false; Map.Entry e = (Map.Entry)o; return eq(key, e.getKey()) && eq(value, e.getValue()); } /** * Returns the hash code value for this map entry. The hash code * of a map entry {@code e} is defined to be:
         *   (e.getKey()==null   ? 0 : e.getKey().hashCode()) ^
         *   (e.getValue()==null ? 0 : e.getValue().hashCode())
* This ensures that {@code e1.equals(e2)} implies that * {@code e1.hashCode()==e2.hashCode()} for any two Entries * {@code e1} and {@code e2}, as required by the general * contract of {@link Object#hashCode}. * * @return the hash code value for this map entry * @see #equals */ public int hashCode() { return (key == null ? 0 : key.hashCode()) ^ (value == null ? 0 : value.hashCode()); } /** * Returns a String representation of this map entry. This * implementation returns the string representation of this * entry's key followed by the equals character ("{@code =}") * followed by the string representation of this entry's value. * * @return a String representation of this map entry */ public String toString() { return key + "=" + value; } } /** * An Entry maintaining an immutable key and value. This class * does not support method {@code setValue}. This class may be * convenient in methods that return thread-safe snapshots of * key-value mappings. * * @since 1.6 */ public static class SimpleImmutableEntry implements Entry, Serializable { private static final long serialVersionUID = 7138329143949025153L; private final K key; private final V value; /** * Creates an entry representing a mapping from the specified * key to the specified value. * * @param key the key represented by this entry * @param value the value represented by this entry */ public SimpleImmutableEntry(K key, V value) { this.key = key; this.value = value; } /** * Creates an entry representing the same mapping as the * specified entry. * * @param entry the entry to copy */ public SimpleImmutableEntry(Entry entry) { this.key = entry.getKey(); this.value = entry.getValue(); } /** * Returns the key corresponding to this entry. * * @return the key corresponding to this entry */ public K getKey() { return key; } /** * Returns the value corresponding to this entry. * * @return the value corresponding to this entry */ public V getValue() { return value; } /** * Replaces the value corresponding to this entry with the specified * value (optional operation). This implementation simply throws * {@code UnsupportedOperationException}, as this class implements * an immutable map entry. * * @param value new value to be stored in this entry * @return (Does not return) * @throws UnsupportedOperationException always */ public V setValue(V value) { throw new UnsupportedOperationException(); } /** * Compares the specified object with this entry for equality. * Returns {@code true} if the given object is also a map entry and * the two entries represent the same mapping. More formally, two * entries {@code e1} and {@code e2} represent the same mapping * if
         *   (e1.getKey()==null ?
         *    e2.getKey()==null :
         *    e1.getKey().equals(e2.getKey()))
         *   &&
         *   (e1.getValue()==null ?
         *    e2.getValue()==null :
         *    e1.getValue().equals(e2.getValue()))
* This ensures that the {@code equals} method works properly across * different implementations of the {@code Map.Entry} interface. * * @param o object to be compared for equality with this map entry * @return {@code true} if the specified object is equal to this map * entry * @see #hashCode */ public boolean equals(Object o) { if (!(o instanceof Map.Entry)) return false; Map.Entry e = (Map.Entry)o; return eq(key, e.getKey()) && eq(value, e.getValue()); } /** * Returns the hash code value for this map entry. The hash code * of a map entry {@code e} is defined to be:
         *   (e.getKey()==null   ? 0 : e.getKey().hashCode()) ^
         *   (e.getValue()==null ? 0 : e.getValue().hashCode())
* This ensures that {@code e1.equals(e2)} implies that * {@code e1.hashCode()==e2.hashCode()} for any two Entries * {@code e1} and {@code e2}, as required by the general * contract of {@link Object#hashCode}. * * @return the hash code value for this map entry * @see #equals */ public int hashCode() { return (key == null ? 0 : key.hashCode()) ^ (value == null ? 0 : value.hashCode()); } /** * Returns a String representation of this map entry. This * implementation returns the string representation of this * entry's key followed by the equals character ("{@code =}") * followed by the string representation of this entry's value. * * @return a String representation of this map entry */ public String toString() { return key + "=" + value; } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/AbstractMultiValueMap.java ================================================ package com.pinecone.framework.unit; import java.util.Map; import java.util.Collection; import java.util.Iterator; import java.util.Collections; import java.util.NoSuchElementException; import java.util.AbstractCollection; public abstract class AbstractMultiValueMap implements MultiValueMapper { private transient EntryCollection mEntryCollection; private transient ValueCollection mValueCollection; @Override public Collection > collection() { Collection > es = this.mEntryCollection; return (es != null) ? es : ( this.mEntryCollection = new EntryCollection( this ) ); } @Override public Collection collectionValues(){ Collection vs = this.mValueCollection; return (vs != null) ? vs : ( this.mValueCollection = new ValueCollection( this ) ); } class DummyEntry extends KeyValue { public DummyEntry( K key, V value ) { super( key, value ); } public void setKey( K key ) { this.key = key; } } class EntryIterator implements Iterator > { private final Iterator > > entryIterator; private Iterator currentCollectionIterator; private K currentKey; protected DummyEntry dummyEntry = new DummyEntry( null, null ); EntryIterator( MultiValueMapper that ) { this.entryIterator = that.entrySet().iterator(); this.currentCollectionIterator = Collections.emptyIterator(); } @Override public boolean hasNext() { while ( !this.currentCollectionIterator.hasNext() && this.entryIterator.hasNext() ) { Map.Entry > entry = this.entryIterator.next(); this.currentKey = entry.getKey(); this.currentCollectionIterator = entry.getValue().iterator(); } return this.currentCollectionIterator.hasNext(); } @Override public Map.Entry next() { if ( !this.hasNext() ) { throw new NoSuchElementException(); } this.dummyEntry.setKey( this.currentKey ); this.dummyEntry.setValue( this.currentCollectionIterator.next() ); return this.dummyEntry; } } class EntryCollection extends AbstractCollection > { MultiValueMapper map; EntryCollection( MultiValueMapper that ) { this.map = that; } @Override public Iterator > iterator() { return new EntryIterator( this.map ); } @Override public int size() { int size = 0; for ( Collection values : this.map.values() ) { size += values.size(); } return size; } @Override public boolean isEmpty() { return this.map.isEmpty(); } @Override public void clear() { this.map.clear(); } @Override public boolean contains( Object o ) { if ( !(o instanceof Map.Entry) ) { return false; } Map.Entry entry = (Map.Entry) o; Collection values = this.map.get(entry.getKey()); return values != null && values.contains( entry.getValue() ); } @Override public boolean remove(Object o) { if (!(o instanceof Map.Entry)) { return false; } Map.Entry entry = (Map.Entry) o; Collection values = this.map.get(entry.getKey()); if ( values != null && values.remove( entry.getValue() ) ) { if ( values.isEmpty() ) { this.map.remove( entry.getKey() ); } return true; } return false; } } class ValueIterator implements Iterator { private final Iterator>> entryIterator; private Iterator currentCollectionIterator; ValueIterator(MultiValueMapper that) { this.entryIterator = that.entrySet().iterator(); this.currentCollectionIterator = Collections.emptyIterator(); } @Override public boolean hasNext() { while ( !this.currentCollectionIterator.hasNext() && this.entryIterator.hasNext() ) { Map.Entry > entry = this.entryIterator.next(); this.currentCollectionIterator = entry.getValue().iterator(); } return this.currentCollectionIterator.hasNext(); } @Override public V next() { if ( !this.hasNext() ) { throw new NoSuchElementException(); } return this.currentCollectionIterator.next(); } } class ValueCollection extends AbstractCollection { MultiValueMapper map; ValueCollection( MultiValueMapper that ) { this.map = that; } @Override public Iterator iterator() { return new ValueIterator(this.map); } @Override public int size() { int size = 0; for ( Collection values : this.map.values() ) { size += values.size(); } return size; } @Override public boolean isEmpty() { return this.map.isEmpty(); } @Override public void clear() { this.map.clear(); } @Override public boolean contains( Object o ) { for ( Collection values : this.map.values() ) { if ( values.contains(o) ) { return true; } } return false; } @Override public boolean remove( Object o ) { for ( Map.Entry > entry : this.map.entrySet() ) { Collection values = entry.getValue(); if ( values.remove(o) ) { if ( values.isEmpty() ) { this.map.remove( entry.getKey() ); } return true; } } return false; } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/BidLinkedEntry.java ================================================ package com.pinecone.framework.unit; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.system.prototype.Prototype; import com.pinecone.framework.system.prototype.TypeIndex; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.json.JSON; import java.util.Map; public class BidLinkedEntry implements Map.Entry, Pinenut { protected K key; protected V value; protected BidLinkedEntry before; protected BidLinkedEntry after; BidLinkedEntry( K key, V value ) { this.key = key; this.value = value; } public void extend( Map.Entry entry ) { this.key = entry.getKey(); this.value = entry.getValue(); } public K getKey() { return this.key; } public V getValue() { return this.value; } public V setValue(V value) { V oldValue = this.value; this.value = value; return oldValue; } @Override public int hashCode() { int keyHash = (key==null ? 0 : key.hashCode()); int valueHash = (value==null ? 0 : value.hashCode()); return keyHash ^ valueHash; } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return "{" + StringUtils.jsonQuote( this.key.toString() ) + ":" + JSON.stringify( this.value ) + "}"; } @Override public TypeIndex prototype() { return Prototype.typeid( this ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/BitSet64.java ================================================ package com.pinecone.framework.unit; import com.pinecone.framework.util.Bits; //import com.pinecone.framework.util.Debug; public final class BitSet64 { public static final int Int64MaxPos = Long.SIZE - 1; public static long setBit( long that, int position ) { return that | (1L << position); } public static long clearBit( long that, int position ) { return that & ~(1L << position); } public static boolean isBitSet( long that, int position ) { return (that & (1L << position)) != 0; } public static long flipBit( long that, int position ) { return that ^ (1L << position); } public static String toBinaryString( long that ) { return Long.toBinaryString(that); } public static String toBinaryStringMSB( long that ) { String binaryString = String.format( "%64s", Long.toBinaryString( Bits.reverse64Bits(that) ) ).replace( ' ', '0' ); return "0b" + binaryString; } public static String toBinaryStringLSB( long that ) { String binaryString = String.format( "%64s", Long.toBinaryString(that) ).replace( ' ', '0' ); return "0b" + binaryString; } public static String toIndexJSONString( long that ) { StringBuilder sb = new StringBuilder(); sb.append( '[' ); for ( int i = 0; i < Long.SIZE; ++i ) { if ( ( that & (1L << i) ) != 0 ) { sb.append( i ).append( ',' ); } } if( sb.charAt( sb.length() - 1 ) == ',' ) { sb.deleteCharAt( sb.length() - 1 ); } sb.append( ']' ); return sb.toString(); } public static long set( long that, int from, int to, boolean val ) throws IllegalArgumentException { int jt = BitSet64.check( from, to ); long mask = ((1L << (jt - from + 1)) - 1) << from; if ( val ) { that |= mask; if ( to >= BitSet64.Int64MaxPos ) { that = BitSet64.setBit( that, to ); } } else { that &= ~mask; if ( to >= BitSet64.Int64MaxPos ) { that = BitSet64.clearBit( that, to ); } } return that; } public static long set( long that, int from, int to ) throws IllegalArgumentException { return BitSet64.set( that, from, to, true ); } public static long unset( long that, int from, int to ) throws IllegalArgumentException { return BitSet64.set( that, from, to, false ); } private static int check( int from, int to ) throws IllegalArgumentException { if ( from > to || from < 0 || to > BitSet64.Int64MaxPos ) { throw new IllegalArgumentException( "Invalid bit positions" ); } int jt = to; if ( to == BitSet64.Int64MaxPos ) { jt = BitSet64.Int64MaxPos - 1; } return jt; } public static long extract( long that, int from, int to ) throws IllegalArgumentException { int jt = BitSet64.check( from, to ); long mask = ((1L << (jt - from + 1)) - 1) << from; long t = (that & mask) >>> from; if ( to >= BitSet64.Int64MaxPos ) { t = t | (that & 0x8000000000000000L); } return t; } public static long copy( long that, int from, int to, long segment ) { int jt = to; if ( to == BitSet64.Int64MaxPos ) { jt = BitSet64.Int64MaxPos - 1; } long seg = segment; int segmentLength = jt - from + 1; segment &= (1L << segmentLength) - 1; segment <<= from; long mask = ((1L << segmentLength) - 1) << from; that &= ~mask; if ( to >= BitSet64.Int64MaxPos ) { that &= ~(1L << to); } that |= segment; if ( to >= BitSet64.Int64MaxPos ) { long lastBit = seg & 0x8000000000000000L; that |= lastBit; } return that; } public static long reverse( long that, int from, int to ) { long seg = BitSet64.extract( that, from, to ); long re = Bits.reverse64Bits( seg ); int k = to - from; long sift = (re >>> (BitSet64.Int64MaxPos - k)) | (re << k); if ( to == BitSet64.Int64MaxPos ) { sift = re; } else { sift &= ~(1L << BitSet64.Int64MaxPos); } // Debug.bluef( BitSet64.toBinaryStringLSB( seg ).substring(2 + BitSet64.Int64MaxPos - k, 66) ); // Debug.bluef( BitSet64.toBinaryStringLSB( re ).substring(2, k + 3) ); // Debug.bluef( BitSet64.toBinaryStringLSB( sift ).substring(2 + BitSet64.Int64MaxPos - k, 66) ); return BitSet64.copy( that, from, to, sift ); } public static long flip( long that, int from, int to ) throws IllegalArgumentException { int jt = BitSet64.check( from, to ); long mask = ((1L << (jt - from + 1)) - 1) << from; if ( to >= BitSet64.Int64MaxPos ) { that = BitSet64.flipBit( that, to ); } return that ^ mask; } public static int existence ( long that ) { return Long.bitCount(that); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/ConcurrentReferenceHashMap.java ================================================ package com.pinecone.framework.unit; import java.lang.ref.ReferenceQueue; import java.lang.ref.SoftReference; import java.lang.ref.WeakReference; import java.lang.reflect.Array; import java.util.AbstractMap; import java.util.AbstractSet; import java.util.Collections; import java.util.EnumSet; import java.util.HashSet; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReentrantLock; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.prototype.PineUnit; import com.pinecone.framework.util.Assert; import com.pinecone.framework.util.ObjectUtils; import com.pinecone.framework.util.json.JSON; public class ConcurrentReferenceHashMap extends AbstractMap implements ConcurrentMap, PineUnit { private static final int DEFAULT_INITIAL_CAPACITY = 16; private static final float DEFAULT_LOAD_FACTOR = 0.75F; private static final int DEFAULT_CONCURRENCY_LEVEL = 16; private static final ConcurrentReferenceHashMap.ReferenceType DEFAULT_REFERENCE_TYPE; private static final int MAXIMUM_CONCURRENCY_LEVEL = 65536; private static final int MAXIMUM_SEGMENT_SIZE = 1073741824; private final ConcurrentReferenceHashMap.Segment[] segments; private final float loadFactor; private final ConcurrentReferenceHashMap.ReferenceType referenceType; private final int shift; @Nullable private volatile Set> entrySet; public ConcurrentReferenceHashMap() { this(16, 0.75F, 16, DEFAULT_REFERENCE_TYPE); } public ConcurrentReferenceHashMap(int initialCapacity) { this(initialCapacity, 0.75F, 16, DEFAULT_REFERENCE_TYPE); } public ConcurrentReferenceHashMap(int initialCapacity, float loadFactor) { this(initialCapacity, loadFactor, 16, DEFAULT_REFERENCE_TYPE); } public ConcurrentReferenceHashMap(int initialCapacity, int concurrencyLevel) { this(initialCapacity, 0.75F, concurrencyLevel, DEFAULT_REFERENCE_TYPE); } public ConcurrentReferenceHashMap(int initialCapacity, ConcurrentReferenceHashMap.ReferenceType referenceType) { this(initialCapacity, 0.75F, 16, referenceType); } public ConcurrentReferenceHashMap(int initialCapacity, float loadFactor, int concurrencyLevel) { this(initialCapacity, loadFactor, concurrencyLevel, DEFAULT_REFERENCE_TYPE); } public ConcurrentReferenceHashMap(int initialCapacity, float loadFactor, int concurrencyLevel, ConcurrentReferenceHashMap.ReferenceType referenceType) { Assert.isTrue(initialCapacity >= 0, "Initial capacity must not be negative"); Assert.isTrue(loadFactor > 0.0F, "Load factor must be positive"); Assert.isTrue(concurrencyLevel > 0, "Concurrency level must be positive"); Assert.notNull(referenceType, "Reference type must not be null"); this.loadFactor = loadFactor; this.shift = calculateShift(concurrencyLevel, 65536); int size = 1 << this.shift; this.referenceType = referenceType; int roundedUpSegmentCapacity = (int)(((long)(initialCapacity + size) - 1L) / (long)size); int initialSize = 1 << calculateShift(roundedUpSegmentCapacity, 1073741824); ConcurrentReferenceHashMap.Segment[] segments = (ConcurrentReferenceHashMap.Segment[])((ConcurrentReferenceHashMap.Segment[])Array.newInstance(ConcurrentReferenceHashMap.Segment.class, size)); int resizeThreshold = (int)((float)initialSize * this.getLoadFactor()); for(int i = 0; i < segments.length; ++i) { segments[i] = new ConcurrentReferenceHashMap.Segment(initialSize, resizeThreshold); } this.segments = segments; } protected final float getLoadFactor() { return this.loadFactor; } protected final int getSegmentsSize() { return this.segments.length; } protected final ConcurrentReferenceHashMap.Segment getSegment(int index) { return this.segments[index]; } protected ConcurrentReferenceHashMap.ReferenceManager createReferenceManager() { return new ConcurrentReferenceHashMap.ReferenceManager(); } protected int getHash(@Nullable Object o) { int hash = o != null ? o.hashCode() : 0; hash += hash << 15 ^ -12931; hash ^= hash >>> 10; hash += hash << 3; hash ^= hash >>> 6; hash += (hash << 2) + (hash << 14); hash ^= hash >>> 16; return hash; } @Nullable @Override public V get(@Nullable Object key) { ConcurrentReferenceHashMap.Reference ref = this.getReference(key, ConcurrentReferenceHashMap.Restructure.WHEN_NECESSARY); ConcurrentReferenceHashMap.Entry entry = ref != null ? ref.get() : null; return entry != null ? entry.getValue() : null; } @Nullable @Override public V getOrDefault(@Nullable Object key, @Nullable V defaultValue) { ConcurrentReferenceHashMap.Reference ref = this.getReference(key, ConcurrentReferenceHashMap.Restructure.WHEN_NECESSARY); ConcurrentReferenceHashMap.Entry entry = ref != null ? ref.get() : null; return entry != null ? entry.getValue() : defaultValue; } public boolean containsKey(@Nullable Object key) { ConcurrentReferenceHashMap.Reference ref = this.getReference(key, ConcurrentReferenceHashMap.Restructure.WHEN_NECESSARY); ConcurrentReferenceHashMap.Entry entry = ref != null ? ref.get() : null; return entry != null && ObjectUtils.nullSafeEquals(entry.getKey(), key); } @Nullable protected final ConcurrentReferenceHashMap.Reference getReference(@Nullable Object key, ConcurrentReferenceHashMap.Restructure restructure) { int hash = this.getHash(key); return this.getSegmentForHash(hash).getReference(key, hash, restructure); } @Nullable @Override public V put(@Nullable K key, @Nullable V value) { return this.put(key, value, true); } @Nullable @Override public V putIfAbsent(@Nullable K key, @Nullable V value) { return this.put(key, value, false); } @Nullable private V put(@Nullable K key, @Nullable final V value, final boolean overwriteExisting) { return this.doTask(key, new ConcurrentReferenceHashMap.Task(new ConcurrentReferenceHashMap.TaskOption[]{ConcurrentReferenceHashMap.TaskOption.RESTRUCTURE_BEFORE, ConcurrentReferenceHashMap.TaskOption.RESIZE}) { @Nullable protected V execute(@Nullable ConcurrentReferenceHashMap.Reference ref, @Nullable ConcurrentReferenceHashMap.Entry entry, @Nullable ConcurrentReferenceHashMap.Entries entries) { if (entry != null) { V oldValue = entry.getValue(); if (overwriteExisting) { entry.setValue(value); } return oldValue; } else { Assert.state(entries != null, "No entries segment"); entries.add(value); return null; } } }); } @Nullable @Override public V remove(Object key) { return this.doTask(key, new ConcurrentReferenceHashMap.Task(new ConcurrentReferenceHashMap.TaskOption[]{ConcurrentReferenceHashMap.TaskOption.RESTRUCTURE_AFTER, ConcurrentReferenceHashMap.TaskOption.SKIP_IF_EMPTY}) { @Nullable protected V execute(@Nullable ConcurrentReferenceHashMap.Reference ref, @Nullable ConcurrentReferenceHashMap.Entry entry) { if (entry != null) { if (ref != null) { ref.release(); } return entry.value; } else { return null; } } }); } @Override public boolean remove(Object key, final Object value) { Boolean result = (Boolean)this.doTask(key, new ConcurrentReferenceHashMap.Task(new ConcurrentReferenceHashMap.TaskOption[]{ConcurrentReferenceHashMap.TaskOption.RESTRUCTURE_AFTER, ConcurrentReferenceHashMap.TaskOption.SKIP_IF_EMPTY}) { protected Boolean execute(@Nullable ConcurrentReferenceHashMap.Reference ref, @Nullable ConcurrentReferenceHashMap.Entry entry) { if (entry != null && ObjectUtils.nullSafeEquals(entry.getValue(), value)) { if (ref != null) { ref.release(); } return true; } else { return false; } } }); return Boolean.TRUE.equals(result); } @Override public boolean replace(K key, final V oldValue, final V newValue) { Boolean result = (Boolean)this.doTask(key, new ConcurrentReferenceHashMap.Task(new ConcurrentReferenceHashMap.TaskOption[]{ConcurrentReferenceHashMap.TaskOption.RESTRUCTURE_BEFORE, ConcurrentReferenceHashMap.TaskOption.SKIP_IF_EMPTY}) { protected Boolean execute(@Nullable ConcurrentReferenceHashMap.Reference ref, @Nullable ConcurrentReferenceHashMap.Entry entry) { if (entry != null && ObjectUtils.nullSafeEquals(entry.getValue(), oldValue)) { entry.setValue(newValue); return true; } else { return false; } } }); return Boolean.TRUE.equals(result); } @Nullable @Override public V replace(K key, final V value) { return this.doTask(key, new ConcurrentReferenceHashMap.Task(new ConcurrentReferenceHashMap.TaskOption[]{ConcurrentReferenceHashMap.TaskOption.RESTRUCTURE_BEFORE, ConcurrentReferenceHashMap.TaskOption.SKIP_IF_EMPTY}) { @Nullable protected V execute(@Nullable ConcurrentReferenceHashMap.Reference ref, @Nullable ConcurrentReferenceHashMap.Entry entry) { if (entry != null) { V oldValue = entry.getValue(); entry.setValue(value); return oldValue; } else { return null; } } }); } @Override public void clear() { ConcurrentReferenceHashMap.Segment[] var1 = this.segments; int var2 = var1.length; for(int var3 = 0; var3 < var2; ++var3) { ConcurrentReferenceHashMap.Segment segment = var1[var3]; segment.clear(); } } public void purgeUnreferencedEntries() { ConcurrentReferenceHashMap.Segment[] var1 = this.segments; int var2 = var1.length; for(int var3 = 0; var3 < var2; ++var3) { ConcurrentReferenceHashMap.Segment segment = var1[var3]; segment.restructureIfNecessary(false); } } @Override public int size() { int size = 0; ConcurrentReferenceHashMap.Segment[] var2 = this.segments; int var3 = var2.length; for(int var4 = 0; var4 < var3; ++var4) { ConcurrentReferenceHashMap.Segment segment = var2[var4]; size += segment.getCount(); } return size; } @Override public boolean isEmpty() { ConcurrentReferenceHashMap.Segment[] var1 = this.segments; int var2 = var1.length; for(int var3 = 0; var3 < var2; ++var3) { ConcurrentReferenceHashMap.Segment segment = var1[var3]; if (segment.getCount() > 0) { return false; } } return true; } @Override public Set> entrySet() { Set> entrySet = this.entrySet; if (entrySet == null) { entrySet = new ConcurrentReferenceHashMap.EntrySet(); this.entrySet = (Set)entrySet; } return (Set)entrySet; } @Nullable private T doTask(@Nullable Object key, ConcurrentReferenceHashMap.Task task) { int hash = this.getHash(key); return this.getSegmentForHash(hash).doTask(hash, key, task); } @Override public boolean hasOwnProperty(Object elm) { return this.containsKey( elm ); } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return JSON.stringify( this ); } private ConcurrentReferenceHashMap.Segment getSegmentForHash(int hash) { return this.segments[hash >>> 32 - this.shift & this.segments.length - 1]; } protected static int calculateShift(int minimumValue, int maximumValue) { int shift = 0; for(int value = 1; value < minimumValue && value < maximumValue; ++shift) { value <<= 1; } return shift; } static { DEFAULT_REFERENCE_TYPE = ConcurrentReferenceHashMap.ReferenceType.SOFT; } private static final class WeakEntryReference extends WeakReference> implements ConcurrentReferenceHashMap.Reference { private final int hash; @Nullable private final ConcurrentReferenceHashMap.Reference nextReference; public WeakEntryReference(ConcurrentReferenceHashMap.Entry entry, int hash, @Nullable ConcurrentReferenceHashMap.Reference next, ReferenceQueue> queue) { super(entry, queue); this.hash = hash; this.nextReference = next; } public int getHash() { return this.hash; } @Nullable public ConcurrentReferenceHashMap.Reference getNext() { return this.nextReference; } public void release() { this.enqueue(); this.clear(); } } private static final class SoftEntryReference extends SoftReference> implements ConcurrentReferenceHashMap.Reference { private final int hash; @Nullable private final ConcurrentReferenceHashMap.Reference nextReference; public SoftEntryReference(ConcurrentReferenceHashMap.Entry entry, int hash, @Nullable ConcurrentReferenceHashMap.Reference next, ReferenceQueue> queue) { super(entry, queue); this.hash = hash; this.nextReference = next; } public int getHash() { return this.hash; } @Nullable public ConcurrentReferenceHashMap.Reference getNext() { return this.nextReference; } public void release() { this.enqueue(); this.clear(); } } protected class ReferenceManager { private final ReferenceQueue> queue = new ReferenceQueue(); protected ReferenceManager() { } public ConcurrentReferenceHashMap.Reference createReference(ConcurrentReferenceHashMap.Entry entry, int hash, @Nullable ConcurrentReferenceHashMap.Reference next) { return (ConcurrentReferenceHashMap.Reference)(ConcurrentReferenceHashMap.this.referenceType == ConcurrentReferenceHashMap.ReferenceType.WEAK ? new ConcurrentReferenceHashMap.WeakEntryReference(entry, hash, next, this.queue) : new ConcurrentReferenceHashMap.SoftEntryReference(entry, hash, next, this.queue)); } @Nullable public ConcurrentReferenceHashMap.Reference pollForPurge() { return (ConcurrentReferenceHashMap.Reference)this.queue.poll(); } } protected static enum Restructure { WHEN_NECESSARY, NEVER; private Restructure() { } } private class EntryIterator implements Iterator> { private int segmentIndex; private int referenceIndex; @Nullable private ConcurrentReferenceHashMap.Reference[] references; @Nullable private ConcurrentReferenceHashMap.Reference reference; @Nullable private ConcurrentReferenceHashMap.Entry next; @Nullable private ConcurrentReferenceHashMap.Entry last; public EntryIterator() { this.moveToNextSegment(); } public boolean hasNext() { this.getNextIfNecessary(); return this.next != null; } public ConcurrentReferenceHashMap.Entry next() { this.getNextIfNecessary(); if (this.next == null) { throw new NoSuchElementException(); } else { this.last = this.next; this.next = null; return this.last; } } private void getNextIfNecessary() { while(this.next == null) { this.moveToNextReference(); if (this.reference == null) { return; } this.next = this.reference.get(); } } private void moveToNextReference() { if (this.reference != null) { this.reference = this.reference.getNext(); } while(this.reference == null && this.references != null) { if (this.referenceIndex >= this.references.length) { this.moveToNextSegment(); this.referenceIndex = 0; } else { this.reference = this.references[this.referenceIndex]; ++this.referenceIndex; } } } private void moveToNextSegment() { this.reference = null; this.references = null; if (this.segmentIndex < ConcurrentReferenceHashMap.this.segments.length) { this.references = ConcurrentReferenceHashMap.this.segments[this.segmentIndex].references; ++this.segmentIndex; } } public void remove() { Assert.state(this.last != null, "No element to remove"); ConcurrentReferenceHashMap.this.remove(this.last.getKey()); } } private class EntrySet extends AbstractSet> { private EntrySet() { } public Iterator> iterator() { return ConcurrentReferenceHashMap.this.new EntryIterator(); } public boolean contains(@Nullable Object o) { if (o instanceof java.util.Map.Entry) { java.util.Map.Entry entry = (java.util.Map.Entry)o; ConcurrentReferenceHashMap.Reference ref = ConcurrentReferenceHashMap.this.getReference(entry.getKey(), ConcurrentReferenceHashMap.Restructure.NEVER); ConcurrentReferenceHashMap.Entry otherEntry = ref != null ? ref.get() : null; if (otherEntry != null) { return ObjectUtils.nullSafeEquals(otherEntry.getValue(), otherEntry.getValue()); } } return false; } public boolean remove(Object o) { if (o instanceof java.util.Map.Entry) { java.util.Map.Entry entry = (java.util.Map.Entry)o; return ConcurrentReferenceHashMap.this.remove(entry.getKey(), entry.getValue()); } else { return false; } } public int size() { return ConcurrentReferenceHashMap.this.size(); } public void clear() { ConcurrentReferenceHashMap.this.clear(); } } private interface Entries { void add(@Nullable V var1); } private static enum TaskOption { RESTRUCTURE_BEFORE, RESTRUCTURE_AFTER, SKIP_IF_EMPTY, RESIZE; private TaskOption() { } } private abstract class Task { private final EnumSet options; public Task(ConcurrentReferenceHashMap.TaskOption... options) { this.options = options.length == 0 ? EnumSet.noneOf(ConcurrentReferenceHashMap.TaskOption.class) : EnumSet.of(options[0], options); } public boolean hasOption(ConcurrentReferenceHashMap.TaskOption option) { return this.options.contains(option); } @Nullable protected T execute(@Nullable ConcurrentReferenceHashMap.Reference ref, @Nullable ConcurrentReferenceHashMap.Entry entry, @Nullable ConcurrentReferenceHashMap.Entries entries) { return this.execute(ref, entry); } @Nullable protected T execute(@Nullable ConcurrentReferenceHashMap.Reference ref, @Nullable ConcurrentReferenceHashMap.Entry entry) { return null; } } protected static final class Entry implements java.util.Map.Entry { @Nullable private final K key; @Nullable private volatile V value; public Entry(@Nullable K key, @Nullable V value) { this.key = key; this.value = value; } @Nullable public K getKey() { return this.key; } @Nullable public V getValue() { return this.value; } @Nullable public V setValue(@Nullable V value) { V previous = this.value; this.value = value; return previous; } public String toString() { return this.key + "=" + this.value; } public final boolean equals(@Nullable Object other) { if (this == other) { return true; } else if (!(other instanceof java.util.Map.Entry)) { return false; } else { java.util.Map.Entry otherEntry = (java.util.Map.Entry)other; return ObjectUtils.nullSafeEquals(this.getKey(), otherEntry.getKey()) && ObjectUtils.nullSafeEquals(this.getValue(), otherEntry.getValue()); } } public final int hashCode() { return ObjectUtils.nullSafeHashCode(this.key) ^ ObjectUtils.nullSafeHashCode(this.value); } } protected interface Reference { @Nullable ConcurrentReferenceHashMap.Entry get(); int getHash(); @Nullable ConcurrentReferenceHashMap.Reference getNext(); void release(); } protected final class Segment extends ReentrantLock { private final ConcurrentReferenceHashMap.ReferenceManager referenceManager = ConcurrentReferenceHashMap.this.createReferenceManager(); private final int initialSize; private volatile ConcurrentReferenceHashMap.Reference[] references; private final AtomicInteger count = new AtomicInteger(); private int resizeThreshold; public Segment(int initialSize, int resizeThreshold) { this.initialSize = initialSize; this.references = this.createReferenceArray(initialSize); this.resizeThreshold = resizeThreshold; } @Nullable public ConcurrentReferenceHashMap.Reference getReference(@Nullable Object key, int hash, ConcurrentReferenceHashMap.Restructure restructure) { if (restructure == ConcurrentReferenceHashMap.Restructure.WHEN_NECESSARY) { this.restructureIfNecessary(false); } if (this.count.get() == 0) { return null; } else { ConcurrentReferenceHashMap.Reference[] references = this.references; int index = this.getIndex(hash, references); ConcurrentReferenceHashMap.Reference head = references[index]; return this.findInChain(head, key, hash); } } @Nullable public T doTask(int hash, @Nullable Object key, ConcurrentReferenceHashMap.Task task) { boolean resize = task.hasOption(ConcurrentReferenceHashMap.TaskOption.RESIZE); if (task.hasOption(ConcurrentReferenceHashMap.TaskOption.RESTRUCTURE_BEFORE)) { this.restructureIfNecessary(resize); } if (task.hasOption(ConcurrentReferenceHashMap.TaskOption.SKIP_IF_EMPTY) && this.count.get() == 0) { return (T)task.execute((ConcurrentReferenceHashMap.Reference)null, (ConcurrentReferenceHashMap.Entry)null, (ConcurrentReferenceHashMap.Entries)null); } else { this.lock(); Object var10; try { int index = this.getIndex(hash, this.references); ConcurrentReferenceHashMap.Reference head = this.references[index]; ConcurrentReferenceHashMap.Reference ref = this.findInChain(head, key, hash); ConcurrentReferenceHashMap.Entry entry = ref != null ? ref.get() : null; ConcurrentReferenceHashMap.Entries entries = (value) -> { ConcurrentReferenceHashMap.Entry newEntry = new ConcurrentReferenceHashMap.Entry(key, value); ConcurrentReferenceHashMap.Reference newReference = this.referenceManager.createReference(newEntry, hash, head); this.references[index] = newReference; this.count.incrementAndGet(); }; var10 = task.execute(ref, entry, entries); } finally { this.unlock(); if (task.hasOption(ConcurrentReferenceHashMap.TaskOption.RESTRUCTURE_AFTER)) { this.restructureIfNecessary(resize); } } return (T)var10; } } public void clear() { if (this.count.get() != 0) { this.lock(); try { this.references = this.createReferenceArray(this.initialSize); this.resizeThreshold = (int)((float)this.references.length * ConcurrentReferenceHashMap.this.getLoadFactor()); this.count.set(0); } finally { this.unlock(); } } } protected final void restructureIfNecessary(boolean allowResize) { int currCount = this.count.get(); boolean needsResize = allowResize && currCount > 0 && currCount >= this.resizeThreshold; ConcurrentReferenceHashMap.Reference ref = this.referenceManager.pollForPurge(); if (ref != null || needsResize) { this.restructure(allowResize, ref); } } private void restructure(boolean allowResize, @Nullable ConcurrentReferenceHashMap.Reference ref) { this.lock(); try { int countAfterRestructure = this.count.get(); Set> toPurge = Collections.emptySet(); if (ref != null) { for(toPurge = new HashSet(); ref != null; ref = this.referenceManager.pollForPurge()) { ((Set)toPurge).add(ref); } } countAfterRestructure -= ((Set)toPurge).size(); boolean needsResize = countAfterRestructure > 0 && countAfterRestructure >= this.resizeThreshold; boolean resizing = false; int restructureSize = this.references.length; if (allowResize && needsResize && restructureSize < 1073741824) { restructureSize <<= 1; resizing = true; } ConcurrentReferenceHashMap.Reference[] restructured = resizing ? this.createReferenceArray(restructureSize) : this.references; for(int i = 0; i < this.references.length; ++i) { ref = this.references[i]; if (!resizing) { restructured[i] = null; } for(; ref != null; ref = ref.getNext()) { if (!((Set)toPurge).contains(ref)) { ConcurrentReferenceHashMap.Entry entry = ref.get(); if (entry != null) { int index = this.getIndex(ref.getHash(), restructured); restructured[index] = this.referenceManager.createReference(entry, ref.getHash(), restructured[index]); } } } } if (resizing) { this.references = restructured; this.resizeThreshold = (int)((float)this.references.length * ConcurrentReferenceHashMap.this.getLoadFactor()); } this.count.set(Math.max(countAfterRestructure, 0)); } finally { this.unlock(); } } @Nullable private ConcurrentReferenceHashMap.Reference findInChain(ConcurrentReferenceHashMap.Reference ref, @Nullable Object key, int hash) { for(ConcurrentReferenceHashMap.Reference currRef = ref; currRef != null; currRef = currRef.getNext()) { if (currRef.getHash() == hash) { ConcurrentReferenceHashMap.Entry entry = currRef.get(); if (entry != null) { K entryKey = entry.getKey(); if (ObjectUtils.nullSafeEquals(entryKey, key)) { return currRef; } } } } return null; } private ConcurrentReferenceHashMap.Reference[] createReferenceArray(int size) { return new ConcurrentReferenceHashMap.Reference[size]; } private int getIndex(int hash, ConcurrentReferenceHashMap.Reference[] references) { return hash & references.length - 1; } public final int getSize() { return this.references.length; } public final int getCount() { return this.count.get(); } } public static enum ReferenceType { SOFT, WEAK; private ReferenceType() { } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/Dictionary.java ================================================ package com.pinecone.framework.unit; import com.pinecone.framework.util.json.JSONUtils; import java.util.List; import java.util.Map; /** * Dictionary * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * PHP Array / Python Dictionary Style * ***************************************************************************************** * Dragon King, the undefined */ public interface Dictionary extends Dictium { default void reset () { this.resetAsList(); } @Override default void clear () { if( this.isMap() ) { this.getMap().clear(); } else { this.getList().clear(); } } default void reduce () { if( this.isMap() ) { this.resetAsList(); } else { this.getList().clear(); } } @Override default V get( Object key ) { if( this.isMap() ) { return this.getMap().get( key ); } int index = JSONUtils.asInt32Key( key ); return this.getList().get( index ); } @Override default V erase( Object key ) { if( this.isMap() ) { return this.getMap().remove( key ); } int index = JSONUtils.asInt32Key( key ); return this.getList().remove( index ); } boolean isMap(); boolean isList(); Map affirmMap() ; List affirmList() ; Map resetAsMap() ; List resetAsList() ; Dictionary convertToMap(); Dictionary convertToList(); Map getMap() throws ClassCastException ; List getList() throws ClassCastException ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/Dictium.java ================================================ package com.pinecone.framework.unit; import com.pinecone.framework.system.prototype.PineUnit; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; public interface Dictium extends PineUnit { int size(); boolean isEmpty(); void clear(); @Override boolean containsKey( Object key ); boolean containsValue( Object value ); V get( Object key ); V insertIfAbsent( Object key, V value ); V insert( Object key, V value ); V erase( Object key ); Set entrySet(); Collection values(); Map toMap(); List toList(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/DummyMap.java ================================================ package com.pinecone.framework.unit; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import com.pinecone.framework.system.prototype.PineUnit; public class DummyMap implements Map , PineUnit { @Override public V put(K key, V value) { return null; } @Override public V get(Object key) { return null; } @Override public int size() { return 0; } @Override public boolean isEmpty() { return true; } @Override public boolean remove(Object key, Object value) { return false; } @Override public V remove(Object key) { return null; } @Override public void putAll(Map m) { } @Override public void clear() { } @Override public boolean containsKey(Object key) { return false; } @Override public boolean containsValue(Object value) { return false; } @Override public boolean hasOwnProperty(Object elm) { return false; } @Override public Set> entrySet() { return Set.of(); } @Override public Set keySet() { return Set.of(); } @Override public Collection values() { return List.of(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/KeyValue.java ================================================ package com.pinecone.framework.unit; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.system.prototype.Prototype; import com.pinecone.framework.system.prototype.TypeIndex; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.json.JSON; import java.util.Map; public class KeyValue implements Map.Entry, Pinenut { protected K key; protected V value; public KeyValue( K key, V value ) { this.key = key; this.value = value; } public KeyValue( Map.Entry other ) { this( other.getKey(), other.getValue() ); } @Override public K getKey() { return this.key; } @Override public V getValue() { return this.value; } @Override public V setValue( V value ) { V oldValue = this.value; this.value = value; return oldValue; } @Override public boolean equals( Object o ) { if ( !(o instanceof Map.Entry) ) { return false; } Map.Entry e = (Map.Entry)o; return valEquals( this.key,e.getKey()) && valEquals( this.value,e.getValue() ); } @Override public int hashCode() { int keyHash = (this.key==null ? 0 : this.key.hashCode()); int valueHash = (this.value==null ? 0 : this.value.hashCode()); return keyHash ^ valueHash; } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return "{" + StringUtils.jsonQuote( this.key.toString() ) + ":" + JSON.stringify( this.value ) + "}"; } @Override public TypeIndex prototype() { return Prototype.typeid( this ); } static final boolean valEquals( Object o1, Object o2 ) { return (o1==null ? o2==null : o1.equals(o2)); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/LinkedCaseInsensitiveMap.java ================================================ package com.pinecone.framework.unit; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; public class LinkedCaseInsensitiveMap extends LinkedHashMap { private final Map caseInsensitiveKeys; private final Locale locale; public LinkedCaseInsensitiveMap() { this((Locale)null); } public LinkedCaseInsensitiveMap(Locale locale) { this.caseInsensitiveKeys = new HashMap(); this.locale = locale != null ? locale : Locale.getDefault(); } public LinkedCaseInsensitiveMap(int initialCapacity) { this(initialCapacity, (Locale)null); } public LinkedCaseInsensitiveMap(int initialCapacity, Locale locale) { super(initialCapacity); this.caseInsensitiveKeys = new HashMap<>(initialCapacity); this.locale = locale != null ? locale : Locale.getDefault(); } @Override public V put(String key, V value) { String oldKey = (String)this.caseInsensitiveKeys.put(this.convertKey(key), key); if (oldKey != null && !oldKey.equals(key)) { super.remove(oldKey); } return super.put(key, value); } @Override public void putAll(Map map) { if (!map.isEmpty()) { Iterator var2 = map.entrySet().iterator(); while(var2.hasNext()) { Entry entry = (Entry)var2.next(); this.put((String)entry.getKey(), entry.getValue()); } } } @Override public boolean containsKey(Object key) { return key instanceof String && this.caseInsensitiveKeys.containsKey(this.convertKey((String)key)); } @Override public V get(Object key) { return key instanceof String ? super.get(this.caseInsensitiveKeys.get(this.convertKey((String)key))) : null; } @Override public V remove(Object key) { return key instanceof String ? super.remove(this.caseInsensitiveKeys.remove(this.convertKey((String)key))) : null; } @Override public void clear() { this.caseInsensitiveKeys.clear(); super.clear(); } protected String convertKey(String key) { return key.toLowerCase(this.locale); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/LinkedMultiValueMap.java ================================================ package com.pinecone.framework.unit; import java.util.LinkedList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; public class LinkedMultiValueMap extends MultiValueMaptron > implements MultiValueMap { public LinkedMultiValueMap() { this( new LinkedHashMap<>() ); } public LinkedMultiValueMap( int initialCapacity ) { this( new LinkedHashMap<>( initialCapacity ) ); } public LinkedMultiValueMap( Map > otherMap ) { super( otherMap, false ); } @Override protected List newCollection() { return new LinkedList<>(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/LinkedTreeMap.java ================================================ package com.pinecone.framework.unit; import com.pinecone.framework.system.prototype.PineUnit; import com.pinecone.framework.system.prototype.Prototype; import com.pinecone.framework.system.prototype.TypeIndex; import com.pinecone.framework.util.json.JSON; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.*; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.Consumer; public class LinkedTreeMap extends TreeMap implements PineUnit, ListedSortedMap, Iterable > { protected static class LinkedEntry extends TreeMap.Entry { LinkedEntry before, after; LinkedEntry( K key, V value, TreeMap.Entry parent ) { super( key, value, parent ); } public void extend( Map.Entry entry ) { this.key = entry.getKey(); this.value = entry.getValue(); } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return super.toJSONString(); } @Override public TypeIndex prototype() { return Prototype.typeid( this ); } } protected transient LinkedTreeMap.LinkedEntry head; protected transient LinkedTreeMap.LinkedEntry tail; protected final boolean accessOrder; // internal utilities private void linkNodeFirst( LinkedTreeMap.LinkedEntry p ) { LinkedTreeMap.LinkedEntry front = this.head; this.head = p; if ( front == null ) { this.tail = p; } else { p.after = front; front.before = p; } } private void linkNodeLast( LinkedTreeMap.LinkedEntry p ) { LinkedTreeMap.LinkedEntry last = this.tail; this.tail = p; if ( last == null ) { this.head = p; } else { p.before = last; last.after = p; } } private void linkBefore( LinkedTreeMap.LinkedEntry newNode, LinkedTreeMap.LinkedEntry succ ) { // assert succ != null; final LinkedEntry pred = succ.before; //final LinkedEntry newNode = new Node<>(pred, e, succ); newNode.before = pred; newNode.after = succ; succ.before = newNode; if ( pred == null ) { this.head = newNode; } else { pred.after = newNode; } } private LinkedEntry detachLastTailInsert() { LinkedEntry lastInserted = LinkedTreeMap.this.tail; if( LinkedTreeMap.this.tail.before != null ){ LinkedTreeMap.this.tail.before.after = null; } LinkedTreeMap.this.tail = LinkedTreeMap.this.tail.before; lastInserted.before = null; lastInserted.after = null; return lastInserted; } protected V putValFront( K key, V value, boolean onlyIfAbsent, boolean evict ) { TreeMap.Entry t = this.root; if ( t == null ) { this.compare( key, key ); // type (and possibly null) check this.root = this.spawnNodeFront( key, value, null ); this.size = 1; ++this.modCount; return null; } int cmp; TreeMap.Entry parent; // split comparator and comparable paths Comparator cpr = this.comparator; TreeMap.Entry legacy = null; if ( cpr != null ) { do { parent = t; cmp = cpr.compare(key, t.key); if (cmp < 0) { t = t.left; } else if ( cmp > 0 ) { t = t.right; } else { legacy = t; break; } } while ( t != null ); } else { if ( key == null ) { throw new NullPointerException(); } @SuppressWarnings("unchecked") Comparable k = (Comparable) key; do { parent = t; cmp = k.compareTo(t.key); if ( cmp < 0 ) { t = t.left; } else if ( cmp > 0 ) { t = t.right; } else { legacy = t; break; } } while ( t != null ); } if ( legacy != null ) { // existing mapping for key V oldValue = legacy.value; if ( !onlyIfAbsent || oldValue == null ) { legacy.setValue( value ); } this.afterNodeAccess( legacy ); return oldValue; } TreeMap.Entry e = this.spawnNodeFront( key, value, parent ); if ( cmp < 0 ) { parent.left = e; } else { parent.right = e; } this.fixAfterInsertion(e); ++this.size; ++this.modCount; this.afterNodeInsertion( evict ); return null; } @Override protected TreeMap.Entry spawnNode( K key, V value, TreeMap.Entry parent ) { LinkedTreeMap.LinkedEntry p = new LinkedTreeMap.LinkedEntry<>( key, value, parent ); this.linkNodeLast(p); return p; } protected TreeMap.Entry spawnNodeFront( K key, V value, TreeMap.Entry parent ) { LinkedTreeMap.LinkedEntry p = new LinkedTreeMap.LinkedEntry<>( key, value, parent ); this.linkNodeFirst(p); return p; } protected void unlinkFirst( LinkedEntry f ) { f = (LinkedEntry)this.onlyDeleteEntry( f ); this.unlink( f ); // // assert f == first && f != null; // final LinkedEntry next = f.after; // f.after = null; // help GC // this.head = next; // if ( next == null ) { // this.tail = null; // } // else { // next.before = null; // } } private void unlinkLast( LinkedEntry l ) { l = (LinkedEntry)this.onlyDeleteEntry( l ); this.unlink( l ); // assert l == last && l != null; // final LinkedEntry prev = l.before; // l.before = null; // help GC // this.tail = prev; // if ( prev == null ) { // this.head = null; // } // else { // prev.after = null; // } } protected void unlink( TreeMap.Entry e ) { LinkedTreeMap.LinkedEntry p = ( LinkedTreeMap.LinkedEntry )e, b = p.before, a = p.after; p.before = p.after = null; if ( b == null ) { this.head = a; } else { b.after = a; } if ( a == null ) { this.tail = b; } else { a.before = b; } } @Override protected void afterNodeRemoval( TreeMap.Entry e ) { // unlink this.unlink( e ); } protected void afterNodeInsertion( boolean evict ) { // possibly remove eldest LinkedEntry first; if ( evict && (first = this.head) != null && this.removeEldestEntry(first) ) { K key = first.key; TreeMap.Entry candidate = getEntry( key ); this.deleteEntry( candidate ); } } protected void afterNodeAccess( TreeMap.Entry e ) { // move node to last LinkedTreeMap.LinkedEntry last; if ( this.accessOrder && (last = this.tail) != e ) { LinkedTreeMap.LinkedEntry p = (LinkedTreeMap.LinkedEntry)e, b = p.before, a = p.after; p.after = null; if ( b == null ) { this.head = a; } else { b.after = a; } if ( a != null ) { a.before = b; } else { last = b; } if ( last == null ) { this.head = p; } else { p.before = last; last.after = p; } this.tail = p; ++this.modCount; } } @Override protected void internalWriteEntries( ObjectOutputStream s ) throws IOException { for ( LinkedTreeMap.LinkedEntry e = head; e != null; e = e.after ) { s.writeObject(e.key); s.writeObject(e.value); } } @Override protected void internalReadEntries( int size, final ObjectInputStream s ) throws IOException, ClassNotFoundException { for ( int i = 0; i < size; i++ ) { @SuppressWarnings("unchecked") K key = (K) s.readObject(); @SuppressWarnings("unchecked") V value = (V) s.readObject(); this.putVal( key, value, false, false ); } } public LinkedTreeMap() { super(); this.accessOrder = false; } public LinkedTreeMap( Comparator comparator ) { this( comparator, false ); } public LinkedTreeMap( boolean accessOrder ) { super(); this.accessOrder = accessOrder; } public LinkedTreeMap( Comparator comparator, boolean accessOrder ) { super( comparator ); this.accessOrder = accessOrder; } public LinkedTreeMap( Map m ) { super(); this.accessOrder = false; this.putMapEntries( m, false ); } public LinkedTreeMap( SortedMap m ) { super( m ); this.accessOrder = false; } public boolean containsValue( Object value ) { for ( LinkedTreeMap.LinkedEntry e = head; e != null; e = e.after ) { V v = e.value; if ( v == value || (value != null && value.equals(v)) ) { return true; } } return false; } public V get( Object key ) { TreeMap.Entry e; if ( (e = this.getEntry( key )) == null ) { return null; } if ( this.accessOrder ) { this.afterNodeAccess(e); } return e.value; } public V getOrDefault( Object key, V defaultValue ) { TreeMap.Entry e; if ( (e = this.getEntry( key ) ) == null ) { return defaultValue; } if ( this.accessOrder ) { this.afterNodeAccess(e); } return super.getOrDefault( key, defaultValue ); } public void clear() { super.clear(); this.head = this.tail = null; } protected boolean removeEldestEntry( Map.Entry eldest ) { return false; } // Linked & Deque operations public boolean contains( Object o ) { if( o instanceof Map.Entry ) { @SuppressWarnings("unchecked") Map.Entry kv = (Map.Entry) o; TreeMap.Entry treeEntry = this.getEntry( kv.getKey() ); return treeEntry != null && kv.getValue().equals( treeEntry.value ); } return false; } public boolean add( Map.Entry e ) { this.addLast( e ); return true; } public boolean addAll( Collection > c ) { if( c.size() == 0 ) { return false; } for ( Map.Entry e : c ) { this.addLast( e.getKey(), e.getValue() ); } return true; } public V addFirst( K key, V value ) { return this.putValFront( key, value, false, true ); } public V addLast( K key, V value ) { return this.put( key, value ); } //@Override public void addFirst( Map.Entry e ) { this.addFirst( e.getKey(), e.getValue() ); } //@Override public void addLast( Map.Entry e ) { this.addLast( e.getKey(), e.getValue() ); } public Map.Entry getFirst() { final Map.Entry f = this.head; if ( f == null ) { throw new NoSuchElementException(); } return f; } public Map.Entry getLast() { final Map.Entry l = this.tail; if ( l == null ) { throw new NoSuchElementException(); } return l; } public Map.Entry removeFirst() { final LinkedEntry f = this.head; if ( f == null ) { throw new NoSuchElementException(); } this.unlinkFirst( f ); return f; } public Map.Entry removeLast() { final LinkedEntry l = this.tail; if ( l == null ) { throw new NoSuchElementException(); } this.unlinkLast( l ); return l; } // Queue operations. public Map.Entry peek() { return this.head; } public Map.Entry element() { return this.getFirst(); } public Map.Entry poll() { final LinkedEntry f = this.head; if( f != null ) { this.unlinkFirst(f); } return f; } public Map.Entry remove() { return this.removeFirst(); } public boolean offer( Map.Entry e ) { return this.add(e); } // Deque operations public boolean offerFirst( Map.Entry e ) { this.addFirst( e ); return true; } public boolean offerLast( Map.Entry e ) { this.addLast(e); return true; } public Map.Entry peekFirst() { return this.head; } public Map.Entry peekLast() { return this.tail; } public Map.Entry pollFirst() { final LinkedEntry f = this.head; if( f != null ) { this.unlinkFirst(f); } return f; } public Map.Entry pollLast() { final LinkedEntry l = this.tail; if( l != null ) { this.unlinkLast(l); } return l; } public void push( Map.Entry e ) { this.addFirst(e); } public Map.Entry pop() { return this.removeFirst(); } public boolean removeFirstOccurrence( Object o ) { if( o instanceof Map.Entry ) { @SuppressWarnings("unchecked") Map.Entry kv = (Map.Entry) o; TreeMap.Entry treeEntry = this.getEntry( kv.getKey() ); if( treeEntry != null && kv.getValue().equals( treeEntry.value ) ) { this.deleteEntry( treeEntry ); return true; } } return false; } public boolean removeLastOccurrence( Object o ) { return this.removeFirstOccurrence( o ); // This is a map, all keys are unique. } private boolean isElementIndex( int index ) { return index >= 0 && index < this.size; } private boolean isPositionIndex( int index ) { return index >= 0 && index <= this.size; } private String outOfBoundsMsg( int index ) { return "Index: "+index+", Size: "+ this.size; } private void checkElementIndex( int index ) { if ( !this.isElementIndex(index) ) { throw new IndexOutOfBoundsException(this.outOfBoundsMsg(index)); } } private void checkPositionIndex(int index) { if ( !this.isPositionIndex(index) ) { throw new IndexOutOfBoundsException(this.outOfBoundsMsg(index)); } } public ListIterator > listIterator( int index ) { this.checkPositionIndex(index); return new LinkedListIterator( index ); } public Iterator > iterator() { return new LinkedEntryIterator(); } public Iterator > descendingIterator(){ return new DescendingIterator(); } public Object[] toArray() { Object[] result = new Object[ this.size ]; int i = 0; for ( LinkedEntry x = this.head; x != null; x = x.after ) { result[ i++ ] = x; } return result; } @SuppressWarnings("unchecked") public T[] toArray( T[] a ) { if ( a.length < this.size ) { a = (T[])java.lang.reflect.Array.newInstance( a.getClass().getComponentType(), this.size ); } int i = 0; Object[] result = a; for ( LinkedEntry x = this.head; x != null; x = x.after ) { result[i++] = x; } if ( a.length > this.size ) { a[ this.size ] = null; } return a; } public boolean containsAll( Collection c ) { for ( Object e : c ) { if ( !this.contains(e) ) { return false; } } return true; } public boolean removeAll( Collection c ) { Objects.requireNonNull(c); boolean modified = false; Iterator it = this.iterator(); while ( it.hasNext() ) { if ( c.contains(it.next()) ) { it.remove(); modified = true; } } return modified; } public boolean retainAll( Collection c ) { Objects.requireNonNull(c); boolean modified = false; Iterator > it = this.iterator(); while (it.hasNext()) { if (!c.contains(it.next())) { it.remove(); modified = true; } } return modified; } @Override public Set keySet() { Set ks = this.keySet; if ( ks == null ) { ks = new LinkedKeySet(); this.keySet = ks; } return ks; } final class LinkedKeySet extends AbstractSet { public final int size() { return size; } public final void clear() { LinkedTreeMap.this.clear(); } public final Iterator iterator() { return new LinkedKeyIterator(); } public final boolean contains( Object o ) { return containsKey(o); } public final boolean remove( Object key ) { TreeMap.Entry candidate = getEntry( key ); boolean b = candidate != null; deleteEntry( candidate ); return b; } public final Spliterator spliterator() { return Spliterators.spliterator( this, Spliterator.SIZED | Spliterator.ORDERED | Spliterator.DISTINCT ); } public final void forEach( Consumer action ) { if ( action == null ) { throw new NullPointerException(); } int mc = modCount; for ( LinkedTreeMap.LinkedEntry e = head; e != null; e = e.after ) { action.accept(e.key); } if ( modCount != mc ) { throw new ConcurrentModificationException(); } } } @Override public Collection values() { Collection vs = this.values; if ( vs == null ) { vs = new LinkedValues(); this.values = vs; } return vs; } final class LinkedValues extends AbstractCollection { public final int size() { return size; } public final void clear() { LinkedTreeMap.this.clear(); } public final Iterator iterator() { return new LinkedValueIterator(); } public final boolean contains( Object o ) { return containsValue(o); } public final Spliterator spliterator() { return Spliterators.spliterator( this, Spliterator.SIZED | Spliterator.ORDERED ); } public final void forEach( Consumer action ) { if (action == null) { throw new NullPointerException(); } int mc = modCount; for ( LinkedTreeMap.LinkedEntry e = head; e != null; e = e.after ) { action.accept(e.value); } if ( modCount != mc ) { throw new ConcurrentModificationException(); } } } @Override public Set > entrySet() { Set> es; return (es = this.entrySet) == null ? (this.entrySet = new LinkedEntrySet()) : es; } public Set > treeEntrySet() { return new EntrySet(); } protected final class LinkedEntrySet extends AbstractSet > { public final int size() { return size; } public final void clear() { LinkedTreeMap.this.clear(); } public final Iterator > iterator() { return new LinkedEntryIterator(); } public final boolean contains( Object o ) { if ( !(o instanceof Map.Entry) ) { return false; } Map.Entry e = (Map.Entry) o; Object key = e.getKey(); TreeMap.Entry candidate = getEntry( key ); return candidate != null && candidate.equals(e); } public final boolean remove( Object o ) { if ( o instanceof Map.Entry ) { Map.Entry e = (Map.Entry) o; Object key = e.getKey(); TreeMap.Entry candidate = getEntry( key ); boolean b = candidate != null; deleteEntry( candidate ); return b; } return false; } public final Spliterator> spliterator() { return Spliterators.spliterator( this, Spliterator.SIZED | Spliterator.ORDERED | Spliterator.DISTINCT ); } public final void forEach(Consumer> action) { if ( action == null ) { throw new NullPointerException(); } int mc = modCount; for ( LinkedTreeMap.LinkedEntry e = head; e != null; e = e.after ) { action.accept(e); } if ( modCount != mc ) { throw new ConcurrentModificationException(); } } } // Map overrides @Override public void forEach( BiConsumer action ) { if ( action == null ) { throw new NullPointerException(); } int mc = modCount; for ( LinkedTreeMap.LinkedEntry e = head; e != null; e = e.after ) { action.accept(e.key, e.value); } if ( modCount != mc ) { throw new ConcurrentModificationException(); } } @Override public void replaceAll( BiFunction function ) { if ( function == null ) { throw new NullPointerException(); } int mc = modCount; for ( LinkedTreeMap.LinkedEntry e = head; e != null; e = e.after ) { e.value = function.apply(e.key, e.value); } if ( modCount != mc ) { throw new ConcurrentModificationException(); } } // Iterators protected LinkedEntry queryNodeByIndex( int index ) { // assert isElementIndex(index); if ( index < (this.size >> 1) ) { LinkedEntry x = this.head; for ( int i = 0; i < index; i++ ) { x = x.after; } return x; } else { LinkedEntry x = this.tail; for ( int i = size - 1; i > index; i-- ){ x = x.before; } return x; } } protected abstract class LinkedTreeIterator { LinkedTreeMap.LinkedEntry next; LinkedTreeMap.LinkedEntry current; int expectedModCount; LinkedTreeIterator() { this.next = head; this.expectedModCount = modCount; this.current = null; } public final boolean hasNext() { return this.next != null; } final LinkedTreeMap.LinkedEntry nextNode() { LinkedTreeMap.LinkedEntry e = next; if ( modCount != this.expectedModCount ) { throw new ConcurrentModificationException(); } if ( e == null ) { throw new NoSuchElementException(); } this.current = e; this.next = e.after; return e; } public final void remove() { TreeMap.Entry p = this.current; if ( p == null ) { throw new IllegalStateException(); } if ( modCount != this.expectedModCount ) { throw new ConcurrentModificationException(); } this.current = null; deleteEntry( p ); this.expectedModCount = modCount; } } protected final class LinkedKeyIterator extends LinkedTreeIterator implements Iterator { public final K next() { return nextNode().getKey(); } } protected final class LinkedValueIterator extends LinkedTreeIterator implements Iterator { public final V next() { return nextNode().value; } } protected final class LinkedEntryIterator extends LinkedTreeIterator implements Iterator> { public final Map.Entry next() { return nextNode(); } } protected class LinkedListIterator implements ListIterator > { private LinkedEntry lastReturned; private LinkedEntry next; private int nextIndex; private int expectedModCount = modCount; LinkedListIterator( int index ) { // assert isPositionIndex(index); next = (index == size) ? null : LinkedTreeMap.this.queryNodeByIndex( index ); nextIndex = index; } public boolean hasNext() { return nextIndex < size; } public Map.Entry next() { checkForComodification(); if (!hasNext()) throw new NoSuchElementException(); lastReturned = next; next = next.after; nextIndex++; return lastReturned; } public boolean hasPrevious() { return nextIndex > 0; } public Map.Entry previous() { checkForComodification(); if (!hasPrevious()) throw new NoSuchElementException(); lastReturned = next = (next == null) ? tail : next.before; nextIndex--; return lastReturned; } public int nextIndex() { return nextIndex; } public int previousIndex() { return nextIndex - 1; } public void remove() { this.checkForComodification(); if ( this.lastReturned == null ) { throw new IllegalStateException(); } LinkedEntry lastNext = this.lastReturned.after; deleteEntry( this.lastReturned ); if ( this.next == this.lastReturned ) { this.next = lastNext; } else { this.nextIndex--; } this.lastReturned = null; this.expectedModCount++; } public void set( Map.Entry e ) { if ( this.lastReturned == null ) { throw new IllegalStateException(); } checkForComodification(); this.lastReturned.extend( e ); } public void add( Map.Entry e ) { this.checkForComodification(); this.lastReturned = null; if ( this.next == null ) { //linkLast(e); LinkedTreeMap.this.addLast( e ); } else { LinkedTreeMap.this.addLast( e ); LinkedEntry lastInserted = LinkedTreeMap.this.detachLastTailInsert(); LinkedTreeMap.this.linkBefore( lastInserted, next ); } this.nextIndex++; this.expectedModCount++; } public void forEachRemaining( Consumer> action ) { Objects.requireNonNull(action); while ( modCount == this.expectedModCount && this.nextIndex < size ) { action.accept( this.next ); this.lastReturned = this.next; this.next = this.next.after; this.nextIndex++; } this.checkForComodification(); } final void checkForComodification() { if ( modCount != this.expectedModCount ) { throw new ConcurrentModificationException(); } } } protected class DescendingIterator implements Iterator > { private final LinkedListIterator itr = new LinkedListIterator(size()); public boolean hasNext() { return this.itr.hasPrevious(); } public Map.Entry next() { return this.itr.previous(); } public void remove() { this.itr.remove(); } } @SuppressWarnings("unchecked") @Override public Object clone() { LinkedTreeMap clone = (LinkedTreeMap ) super.superClone(); clone.head = null; clone.tail = null; clone.putMapEntries( this, false ); return clone; } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return JSON.stringify( this ); } public Deque > toQueue() { return new LinkedTreeMapList<>( this ); } public List > toList() { return new LinkedTreeMapList<>( this ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/LinkedTreeMapList.java ================================================ package com.pinecone.framework.unit; import java.util.Map; import java.util.AbstractSequentialList; import java.util.List; import java.util.Deque; import java.util.Comparator; import java.util.SortedMap; import java.util.Collection; import java.util.ListIterator; import java.util.Iterator; public class LinkedTreeMapList extends AbstractSequentialList > implements List >, Deque > { static final long serialVersionUID = -5024789606714721619L; private transient LinkedTreeMap map; public LinkedTreeMapList( LinkedTreeMap map ) { super(); this.map = map; } public LinkedTreeMapList() { super(); this.map = new LinkedTreeMap<>(); } public LinkedTreeMapList( Comparator comparator ) { super(); this.map = new LinkedTreeMap<>( comparator ); } public LinkedTreeMapList( Comparator comparator, boolean accessOrder ) { super(); this.map = new LinkedTreeMap<>( comparator, accessOrder ); } public LinkedTreeMapList( boolean accessOrder ) { super(); this.map = new LinkedTreeMap<>( accessOrder ); } public LinkedTreeMapList( Map m ) { super(); this.map = new LinkedTreeMap<>( m ); } public LinkedTreeMapList( SortedMap m ) { super(); this.map = new LinkedTreeMap<>( m ); } public LinkedTreeMap getMap() { return this.map; } public int size() { return this.map.size(); } public boolean isEmpty() { return this.map.isEmpty(); } public boolean remove(Object o) { return this.map.remove(o) != null; } public void clear() { this.map.clear(); } // Linked & Deque operations public boolean contains( Object o ) { return this.map.contains(o); } public boolean add( Map.Entry e ) { return this.map.put(e.getKey(), e.getValue())==null; } public boolean addAll( Collection > c ) { return this.map.addAll( c ); } public V addFirst( K key, V value ) { return this.map.addFirst( key, value ); } public V addLast( K key, V value ) { return this.map.addLast( key, value ); } //@Override public void addFirst( Map.Entry e ) { this.map.addFirst( e ); } //@Override public void addLast( Map.Entry e ) { this.map.addLast( e ); } public Map.Entry getFirst() { return this.map.getFirst(); } public Map.Entry getLast() { return this.map.getLast(); } public Map.Entry removeFirst() { return this.map.removeFirst(); } public Map.Entry removeLast() { return this.map.removeLast(); } // Queue operations. public Map.Entry peek() { return this.map.peek(); } public Map.Entry element() { return this.map.element(); } public Map.Entry poll() { return this.map.poll(); } public Map.Entry remove() { return this.map.remove(); } public boolean offer( Map.Entry e ) { return this.map.offer(e); } // Deque operations public boolean offerFirst( Map.Entry e ) { return this.map.offerFirst(e); } public boolean offerLast( Map.Entry e ) { return this.map.offerLast(e); } public Map.Entry peekFirst() { return this.map.peekFirst(); } public Map.Entry peekLast() { return this.map.peekLast(); } public Map.Entry pollFirst() { return this.map.pollFirst(); } public Map.Entry pollLast() { return this.map.pollLast(); } public void push( Map.Entry e ) { this.map.push(e); } public Map.Entry pop() { return this.map.pop(); } public boolean removeFirstOccurrence( Object o ) { return this.map.removeFirstOccurrence(o); } public boolean removeLastOccurrence( Object o ) { return this.map.removeLastOccurrence(o); } public ListIterator > listIterator( int index ) { return this.map.listIterator( index ); } public Iterator > iterator() { return this.map.iterator(); } public Iterator > descendingIterator(){ return this.map.descendingIterator(); } public Object[] toArray() { return this.map.toArray(); } public T[] toArray( T[] a ) { return this.map.toArray(a); } public boolean containsAll( Collection c ) { return this.map.containsAll(c); } public boolean removeAll( Collection c ) { return this.map.removeAll(c); } public boolean retainAll( Collection c ) { return this.map.retainAll(c); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/LinkedTreeSet.java ================================================ package com.pinecone.framework.unit; import java.io.InvalidObjectException; import java.io.Serializable; import java.util.*; public class LinkedTreeSet extends AbstractSet implements Set, Cloneable, Serializable { static final long serialVersionUID = -5024744406713321676L; private transient LinkedTreeMap map; private static final Object PRESENT = new Object(); public LinkedTreeSet() { this.map = new LinkedTreeMap<>(); } public LinkedTreeSet( Collection c ) { this.map = new LinkedTreeMap<>(); this.addAll(c); } public LinkedTreeSet( Comparator comparator ) { this.map = new LinkedTreeMap<>( comparator ); } public LinkedTreeSet( Comparator comparator, boolean accessOrder ) { super(); this.map = new LinkedTreeMap<>( comparator, accessOrder ); } public LinkedTreeSet( boolean accessOrder ) { super(); this.map = new LinkedTreeMap<>( accessOrder ); } public Iterator iterator() { return this.map.keySet().iterator(); } public int size() { return this.map.size(); } public boolean isEmpty() { return this.map.isEmpty(); } public boolean contains( Object o ) { return this.map.containsKey(o); } public boolean add(E e) { return this.map.put(e, PRESENT)==null; } public boolean remove(Object o) { return this.map.remove(o)==PRESENT; } public void clear() { this.map.clear(); } @SuppressWarnings("unchecked") public Object clone() { try { LinkedTreeSet newSet = (LinkedTreeSet) super.clone(); newSet.map = (LinkedTreeMap ) this.map.clone(); return newSet; } catch ( CloneNotSupportedException e ) { throw new InternalError(e); } } private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { s.defaultWriteObject(); s.writeInt( this.map.size() ); // Write out all elements in the proper order. for ( E e : this.map.keySet() ) { s.writeObject(e); } } private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); int size = s.readInt(); if ( size < 0 ) { throw new InvalidObjectException("Illegal size: " + size); } this.map = new LinkedTreeMap<>(); for ( int i = 0; i < size; i++ ) { @SuppressWarnings("unchecked") E e = (E) s.readObject(); this.map.put( e, PRESENT ); } } public Spliterator spliterator() { return LinkedTreeMap.keySpliteratorFor( this.map ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/ListDictium.java ================================================ package com.pinecone.framework.unit; import com.pinecone.framework.system.prototype.PineUnit; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.system.prototype.Prototype; import com.pinecone.framework.system.prototype.TypeIndex; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.json.JSON; import java.math.BigDecimal; import java.util.List; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.ListIterator; import java.util.Map; import java.util.Set; import java.util.AbstractSet; import java.util.Spliterator; import java.util.Spliterators; import java.util.NoSuchElementException; public class ListDictium implements Dictium, List { private List mTargetList; public ListDictium( List target ) { this.mTargetList = target; } public ListDictium() { this( new ArrayList<>() ); } public static int asInt32Key( Object key ) { if ( key instanceof Integer ) { return (int) key; } else if ( key instanceof Float || key instanceof Double || key instanceof BigDecimal ) { throw new IllegalArgumentException( "Array does not allow float as key." ); } else if ( key instanceof Number ) { return ((Number) key).intValue(); } else if ( key instanceof String ) { return Integer.parseInt((String) key); } throw new IllegalArgumentException( "Key of Array should be integer or integer-fmt-string." ); } @Override public int size() { return this.mTargetList.size(); } @Override public boolean isEmpty() { return this.mTargetList.isEmpty(); } @Override public void clear() { this.mTargetList.clear(); } @Override public boolean containsKey( Object key ) { try { int index = ListDictium.asInt32Key( key ); return index >= 0 && index < this.mTargetList.size(); } catch ( IllegalArgumentException e ) { return false; } } @Override public boolean containsValue( Object value ) { return this.contains( value ); } @Override public boolean contains( Object o ) { return this.mTargetList.contains( o ); } @Override public boolean add( V v ) { return this.mTargetList.add( v ); } @Override public boolean containsAll( Collection c ) { return this.mTargetList.containsAll( c ); } @Override public boolean addAll( int index, Collection c ) { return this.mTargetList.addAll( index, c ); } @Override public boolean addAll( Collection c ) { return this.mTargetList.addAll( c ); } @Override public boolean removeAll( Collection c ) { return this.mTargetList.removeAll( c ); } @Override public boolean retainAll( Collection c ) { return this.mTargetList.retainAll( c ); } @Override public V get( Object key ) { try { int index = ListDictium.asInt32Key( key ); if ( index >= 0 && index < this.mTargetList.size() ) { return this.mTargetList.get( index ); } } catch ( IllegalArgumentException e ) { // Do nothing } return null; } @Override public V get( int index ) { return this.mTargetList.get( index ); } @Override public V set( int index, V value ) { while ( this.mTargetList.size() <= index ) { this.mTargetList.add( null ); } return this.mTargetList.set( index, value ); } @Override public void add( int index, V value ) { while ( this.mTargetList.size() <= index ) { this.mTargetList.add( null ); } this.mTargetList.add( index, value ); } @Override public V insert( Object key, V value ) { int index = ListDictium.asInt32Key( key ); return this.set( index, value ); } @Override public V insertIfAbsent( Object key, V value ) { if( !this.containsKey( ListDictium.asInt32Key( key ) ) ){ this.insert( key, value ); } return null; } @Override public V erase( Object key ) { try { int index = ListDictium.asInt32Key( key ); if ( index >= 0 && index < this.mTargetList.size() ) { return this.mTargetList.remove(index); } } catch ( IllegalArgumentException e ) { // Do nothing } return null; } @Override public boolean remove( Object key ) { this.erase( key ); return true; } @Override public V remove( int index ) { return this.mTargetList.remove( index ); } @Override public int indexOf( Object o ) { return this.mTargetList.indexOf( o ); } @Override public int lastIndexOf( Object o ) { return this.mTargetList.lastIndexOf( o ); } @Override public ListIterator listIterator() { return this.mTargetList.listIterator(); } @Override public ListIterator listIterator( int index ) { return this.mTargetList.listIterator( index ); } @Override public List subList( int fromIndex, int toIndex ) { return this.mTargetList.subList( fromIndex, toIndex ); } @Override public Iterator iterator() { return this.mTargetList.iterator(); } @Override public Set > entrySet() { return new ListEntrySet(); } public Set keySet() { return new ListKeyEntrySet(); } @Override public Collection values() { return this.mTargetList; } @Override public Map toMap() { return new ListMap(); } @Override public List toList() { return this.mTargetList; } @Override public boolean hasOwnProperty( Object index ) { return this.containsKey( index ); } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return JSON.stringify( this ); } @Override public T[] toArray( T[] a ) { return this.mTargetList.toArray( a ); } @Override public Object[] toArray() { return this.mTargetList.toArray(); } protected class ListEntrySet extends AbstractSet > { @Override public Iterator > iterator() { return new ListEntryIterator(); } @Override public int size() { return ListDictium.this.mTargetList.size(); } @Override public void clear() { ListDictium.this.clear(); } @Override public boolean contains(Object o) { if (!(o instanceof Map.Entry)) { return false; } Map.Entry e = (Map.Entry) o; Object key = e.getKey(); Object v = ListDictium.this.get(key); return v != null && v.equals(e.getValue()); } @Override public boolean remove(Object o) { if (this.contains(o)) { Map.Entry e = (Map.Entry) o; Object key = e.getKey(); return ListDictium.this.erase(key) != null; } return false; } @Override public Spliterator> spliterator() { return Spliterators.spliterator(this, Spliterator.SIZED | Spliterator.ORDERED | Spliterator.DISTINCT); } } protected abstract class DiListEntryIterator { protected Iterator currentIterator; protected int index; protected ListEntry dummyEntry; public DiListEntryIterator() { this.index = 0; this.currentIterator = ListDictium.this.mTargetList.iterator(); this.dummyEntry = new ListEntry<>( this.index, null ); } public boolean hasNext() { return this.currentIterator.hasNext(); } protected Map.Entry nextNode() { if ( !this.hasNext() ) { throw new NoSuchElementException(); } this.dummyEntry.setKey( this.index++ ); this.dummyEntry.setValue( this.currentIterator.next() ); return this.dummyEntry; } public void remove() { this.currentIterator.remove(); } } protected class ListEntryIterator extends DiListEntryIterator implements Iterator> { @Override public Map.Entry next() { return this.nextNode(); } } protected static boolean valEquals( Object o1, Object o2 ) { return (o1==null ? o2==null : o1.equals(o2)); } protected static class ListEntry implements Map.Entry, Pinenut { Integer key; V value; ListEntry( Integer key, V value ) { this.key = key; this.value = value; } @Override public Integer getKey() { return this.key; } @Override public V getValue() { return this.value; } @Override public V setValue( V value ) { V oldValue = this.value; this.value = value; return oldValue; } public void setKey( Integer key ) { this.key = key; } @Override public boolean equals( Object o ) { if (!(o instanceof Map.Entry)) return false; Map.Entry e = (Map.Entry)o; return valEquals(key,e.getKey()) && valEquals(value,e.getValue()); } @Override public int hashCode() { int keyHash = (key==null ? 0 : key.hashCode()); int valueHash = (value==null ? 0 : value.hashCode()); return keyHash ^ valueHash; } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return "{" + StringUtils.jsonQuote( this.key.toString() ) + ":" + JSON.stringify( this.value ) + "}"; } @Override public TypeIndex prototype() { return Prototype.typeid( this ); } } protected class ListKeyIterator extends DiListEntryIterator implements Iterator { @Override public Integer next() { return this.nextNode().getKey(); } } protected class ListKeyEntrySet extends AbstractSet { @Override public Iterator iterator() { return new ListKeyIterator(); } @Override public int size() { return ListDictium.this.mTargetList.size(); } @Override public void clear() { ListDictium.this.clear(); } @Override public boolean contains( Object o ) { if (!(o instanceof Map.Entry)) { return false; } Map.Entry e = (Map.Entry) o; Object key = e.getKey(); Object v = ListDictium.this.get(key); return v != null && v.equals(e.getValue()); } @Override public boolean remove( Object o ) { if (this.contains(o)) { Map.Entry e = (Map.Entry) o; Object key = e.getKey(); return ListDictium.this.erase(key) != null; } return false; } @Override public Spliterator spliterator() { return Spliterators.spliterator(this, Spliterator.SIZED | Spliterator.ORDERED | Spliterator.DISTINCT); } } protected class ListMap implements Map, PineUnit { @Override public int size() { return ListDictium.this.size(); } @Override public boolean isEmpty() { return ListDictium.this.isEmpty(); } @Override public void clear() { ListDictium.this.clear(); } @Override public boolean containsKey( Object key ) { return ListDictium.this.containsKey( key ); } @Override public boolean containsValue( Object value ) { return ListDictium.this.containsValue( value ); } @Override public V get( Object key ) { return ListDictium.this.get( key ); } @Override public V put( Integer index, V value ) { return ListDictium.this.set( index, value ); } @Override public V remove( Object key ) { return ListDictium.this.erase( key ); } @Override public void putAll( Map m ) { for( Map.Entry kv: m.entrySet() ) { this.put( kv.getKey(), kv.getValue() ); } } @Override public Set > entrySet() { return new ListEntrySet(); } @Override public Set keySet() { return new ListKeyEntrySet(); } @Override public Collection values() { return ListDictium.this.values(); } @Override public boolean hasOwnProperty( Object index ) { return this.containsKey( index ); } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return JSON.stringify( this ); } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/ListedSortedMap.java ================================================ package com.pinecone.framework.unit; import java.util.ListIterator; import java.util.SortedMap; public interface ListedSortedMap extends SortedMap { ListIterator > listIterator(int index ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/MapDictium.java ================================================ package com.pinecone.framework.unit; import com.pinecone.framework.util.json.JSON; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Collection; import java.util.LinkedHashMap; public class MapDictium implements Dictium, Map { private Map mTargetMap; public MapDictium( Map map ) { this.mTargetMap = map; } @SuppressWarnings( "unchecked" ) public MapDictium( Map map, boolean bUnchecked ) { this( map ); } public MapDictium() { this( new LinkedHashMap<>() ); } @Override public int size() { return this.mTargetMap.size(); } @Override public boolean isEmpty() { return this.mTargetMap.isEmpty(); } @Override public void clear() { this.mTargetMap.clear(); } @Override public boolean containsKey( Object key ) { return this.mTargetMap.containsKey(key); } @Override public boolean containsValue( Object value ) { return this.mTargetMap.containsValue( value ); } @Override public V get( Object key ) { return this.mTargetMap.get( key ); } @Override public V put( Object key, V value ) { return this.insert( key, value ); } @Override public void putAll( Map m ) { this.mTargetMap.putAll( m ); } @Override public V insert( Object key, V value ) { return this.mTargetMap.put( key, value ); } @Override public V insertIfAbsent( Object key, V value ) { return this.mTargetMap.putIfAbsent( key, value ); } @Override public V erase( Object key ) { return this.mTargetMap.remove( key ); } @Override public V remove( Object key ) { return this.erase( key ); } @Override public boolean remove( Object key, Object value ) { return this.mTargetMap.remove( key, value ); } @Override public Set > entrySet() { return this.mTargetMap.entrySet(); } @Override public Set keySet() { return this.mTargetMap.keySet(); } @Override public Collection values() { return this.mTargetMap.values(); } @Override public Map toMap() { return this.mTargetMap; } @Override public List toList() { return new ArrayList<>( this.mTargetMap.values() ); } @Override public boolean hasOwnProperty( Object index ) { return this.containsKey( index ); } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return JSON.stringify( this.mTargetMap ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/Mapnut.java ================================================ package com.pinecone.framework.unit; import com.pinecone.framework.system.prototype.PineUnit; import java.util.Map; public interface Mapnut extends PineUnit, Map { // WARNING, Modified outside will provokes unpredictable results. [ Readonly for performance purpose, in principle ] // Java has not the const function, this inconvenient... // Equals `Map::Entry getEntryByKey( Object compatibleKey )` Map.Entry getEntryByKey( Object compatibleKey ); // Jesus christ... Even it is not full-safe outside, but we need this! // Equals `const Map::Entry getEntryByKey( Object compatibleKey ) const` Map.Entry getEntryCopyByKey( Object compatibleKey ); // Ah, this one is more safer. default long megaSize(){ return this.size(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/MultiScopeMap.java ================================================ package com.pinecone.framework.unit; import java.util.*; public interface MultiScopeMap extends ScopeMap { List > getParents (); Map thisScope (); MultiScopeMap setParents ( List > that ); MultiScopeMap setThisScope ( Map that ); MultiScopeMap addParent ( MultiScopeMap that ); String getName (); MultiScopeMap setName ( String name ); default boolean isAnonymous () { return this.getName().isEmpty(); } @Override default boolean isProgenitor () { return this.getParents() == null; } @Override default void purge() { this.setParents( null ); this.clear(); } @Override default void depurate() { List > p = this.getParents(); if( p != null ) { for ( MultiScopeMap m : p ) { m.depurate(); } } this.clear(); } @Override default void overrideTo ( Map neo ) { Map self = this.thisScope(); for ( Map.Entry e : self.entrySet() ){ neo.putIfAbsent( e.getKey(), e.getValue() ); } List > p = this.getParents(); if( p != null ) { for ( MultiScopeMap m : p ) { m.overrideTo( neo ); } } } @Override default boolean isScopeEmpty () { boolean b = this.isEmpty(); if( b ) { List > p = this.getParents(); if( p != null ) { for ( MultiScopeMap m : p ) { b = m.isScopeEmpty(); if( !b ) { break; } } } } return b; } @Override @SuppressWarnings("unchecked") default ScopeMap[] ancestors (){ List> l = new ArrayList<>(); ScopeTrees.groupByNodes( this, l ); return l.toArray( (ScopeMap[]) new MultiScopeMap[0] ); } @Override @SuppressWarnings("unchecked") default ScopeMap[] scopes (){ ArrayList > l = new ArrayList<>(); l.add( this ); ScopeTrees.groupByNodes( this, l ); return l.toArray( (ScopeMap[]) new MultiScopeMap[0] ); } @Override @SuppressWarnings("unchecked") default ScopeMap getAll ( Object key, List ret ) { V v = this.thisScope().get( key ); if( v != null ) { ret.add( v ); } ScopeTrees.search( this, ( Object...args )->{ MultiScopeMap currentMap = (MultiScopeMap) args[0]; if( currentMap != this ) { V t = currentMap.thisScope().get( key ); if( t != null ) { ret.add( t ); } } return false; } ); return this; } @SuppressWarnings("unchecked") default V query ( Object key, String szParentNS ) { final V[] v = (V[]) new Object[1]; v[0] = this.thisScope().get(key); if (v[0] != null) { return v[0]; } ScopeTrees.search(this, (Object... args) -> { MultiScopeMap currentMap = (MultiScopeMap) args[0]; if (currentMap != this) { V t = currentMap.thisScope().get(key); if (t != null && currentMap.getName().equals(szParentNS)) { v[0] = t; return true; } } return false; }); return v[0]; } @SuppressWarnings("unchecked") default MultiScopeMap getScopeByNS ( String szParentNS ) { final Object[] v = new Object[1]; ScopeTrees.search(this, (Object... args) -> { MultiScopeMap currentMap = (MultiScopeMap) args[0]; if ( currentMap != this ) { if ( currentMap.getName().equals(szParentNS) ) { v[0] = currentMap; return true; } } return false; }); return (MultiScopeMap )v[0]; } @Override default MultiScopeMap removeAll ( Object key ) { this.thisScope().remove( key ); List > p = this.getParents(); if( p != null ) { for ( MultiScopeMap m : p ) { m.removeAll( key ); } } return this; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/MultiScopeMaptron.java ================================================ package com.pinecone.framework.unit; import com.pinecone.framework.system.prototype.PineUnit; import com.pinecone.framework.util.json.JSON; import java.io.Serializable; import java.util.*; public class MultiScopeMaptron implements PineUnit, Map, MultiScopeMap, Cloneable, Serializable, Iterable > { protected String mszName ; protected List > mParents ; protected Map mThisMap ; protected transient Set > entrySet ; protected transient Set scKeySet ; protected transient Collection scValues ; public MultiScopeMaptron() { this( true, null ); } public MultiScopeMaptron( String name ) { this( true, null ); this.setName( name ); } public MultiScopeMaptron( Map thisMap, List > prototypes, String name ){ this.mThisMap = thisMap; this.mParents = prototypes; this.mszName = name; if( this.mThisMap == null ) { this.mThisMap = new LinkedHashMap<>(); } } public MultiScopeMaptron( Map thisMap, List > prototypes ){ this( thisMap, prototypes, "" ); } public MultiScopeMaptron( boolean bLinked, List > prototypes ){ this( bLinked ? new LinkedHashMap<>() : new HashMap<>(), prototypes ); } public MultiScopeMaptron( Map thisMap ){ this( thisMap, null ); } @Override public String getName () { return this.mszName; } @Override public MultiScopeMap setName ( String name ) { this.mszName = name; return this; } @Override public List > getParents () { return this.mParents; } @Override public Map thisScope () { return this.mThisMap; } @Override public MultiScopeMap setParents ( List > that ) { this.mParents = that; return this; } @Override public MultiScopeMap setThisScope ( Map that ) { this.mThisMap = that; return this; } @Override public MultiScopeMap addParent ( MultiScopeMap that ) { if( this.getParents() == null ) { this.mParents = new ArrayList<>(); } this.mParents.add( that ); return this; } @Override public ScopeMap elevate ( Map child ) { MultiScopeMaptron sup = new MultiScopeMaptron<>( this.mThisMap, this.mParents ); this.setThisScope ( child ); ArrayList > a = new ArrayList<>(); a.add( sup ); this.setParents ( a ); return this; } /** Basic Map **/ @Override public int size() { return this.mThisMap.size(); } @Override public boolean isEmpty() { return this.mThisMap.isEmpty(); } @Override public boolean containsKey( Object key ) { boolean result = this.mThisMap.containsKey(key); if ( !result && this.mParents != null ) { for ( MultiScopeMap m : this.mParents ) { result = m.containsKey( key ); if( result ) { break; } } } return result; } @Override public boolean containsValue( Object value ) { boolean result = this.mThisMap.containsValue(value); if ( !result && this.mParents != null ) { for ( MultiScopeMap m : this.mParents ) { result = m.containsValue( value ); if( result ) { break; } } } return result; } // For Multiple-Inheritance Scope, for ambiguous sibling-super-key, it will only find in the nearest parent. @Override public V get( Object key ) { V val = this.mThisMap.get( key ); if ( val == null && this.mParents != null ) { for ( MultiScopeMap m : this.mParents ) { val = m.get( key ); if( val != null ) { break; } } } return val; } @Override public void putAll( Map m ) { this.mThisMap.putAll( m ); } public MultiScopeMaptron xPutAll(Map m ) { this.putAll(m); return this; } @Override public void clear() { this.mThisMap.clear(); } public MultiScopeMaptron xClear() { this.clear(); return this; } @Override public V remove( Object key ) { V v = this.mThisMap.remove(key); if ( v == null && this.mParents != null ) { for ( MultiScopeMap m : this.mParents ) { v = m.remove( key ); if( v != null ) { break; } } } return v; } public MultiScopeMaptron xRemove( Object key ) { this.remove(key); return this; } @Override public Set keySet() { return this.mThisMap.keySet(); } @Override public Collection values() { return this.mThisMap.values(); } @Override public Set> entrySet() { return this.mThisMap.entrySet(); } @Override public Iterator > iterator() { return this.mThisMap.entrySet().iterator(); } @Override public V put( K key, V value ) { return this.mThisMap.put( key, value ); } @Override public V putIfAbsent( K key, V value ) { return this.mThisMap.putIfAbsent( key, value ); } @Override public boolean hasOwnProperty ( Object key ) { return this.mThisMap.containsKey( key ); } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return JSON.stringify( this.mThisMap ); } @Override public Iterator > scopeIterator() { return null; } @Override public Set > scopeEntrySet() { Set> es; return (es = this.entrySet) == null ? (this.entrySet = new ScopeEntrySet()) : es; } @Override public Set scopeKeySet() { return null; } @Override public Collection scopeValues() { return null; } protected final class ScopeEntrySet extends AbstractSet > { public final int size() { throw new UnsupportedOperationException("Iterator only."); } public final void clear() { MultiScopeMaptron.this.clear(); } public final Iterator > iterator() { return new ScopeEntryIterator(); } public final boolean contains( Object o ) { if ( !(o instanceof Map.Entry) ) { return false; } Map.Entry e = (Map.Entry) o; Object key = e.getKey(); V v = MultiScopeMaptron.this.get(key); return v != null && v.equals(e.getValue()); } public final boolean remove( Object o ) { if ( this.contains(o) ) { Map.Entry e = (Map.Entry) o; Object key = e.getKey(); return MultiScopeMaptron.this.remove(key) != null; } return false; } public final Spliterator> spliterator() { return Spliterators.spliterator( this, Spliterator.SIZED | Spliterator.ORDERED | Spliterator.DISTINCT ); } } protected abstract class ScopeIterator { protected MultiScopeMap[] parentsStack; protected int stackAt; protected Iterator > currentIterator; public ScopeIterator() { this.parentsStack = (MultiScopeMap[]) MultiScopeMaptron.this.ancestors(); this.stackAt = 0; this.currentIterator = MultiScopeMaptron.this.mThisMap.entrySet().iterator(); } private boolean parentsHasNext() { boolean b = this.stackAt < this.parentsStack.length; if( b ) { MultiScopeMap parentMap = this.parentsStack[ this.stackAt ]; if( parentMap != null && parentMap.thisScope() != null ) { return true; } } return b; } public boolean hasNext() { if ( this.currentIterator.hasNext() ) { return true; } else { while ( parentsHasNext() ) { MultiScopeMap parentMap = this.parentsStack[ this.stackAt ]; ++this.stackAt; if ( parentMap != null ) { this.currentIterator = parentMap.thisScope().entrySet().iterator(); if ( this.currentIterator.hasNext() ) { return true; } } } return false; } } protected Map.Entry nextNode() { if (!hasNext()) { throw new NoSuchElementException(); } return this.currentIterator.next(); } public void remove() { this.currentIterator.remove(); } } final class ScopeKeySet extends AbstractSet { public final int size() { throw new UnsupportedOperationException("Iterator only."); } public final void clear() { MultiScopeMaptron.this.clear(); } public final Iterator iterator() { return new ScopeKeyIterator(); } public final boolean contains( Object o ) { return containsKey(o); } public final boolean remove( Object key ) { return MultiScopeMaptron.this.remove(key) != null; } public final Spliterator spliterator() { return Spliterators.spliterator( this, Spliterator.SIZED | Spliterator.ORDERED | Spliterator.DISTINCT ); } } protected final class ScopeKeyIterator extends ScopeIterator implements Iterator { public final K next() { return nextNode().getKey(); } } final class ScopeValues extends AbstractCollection { public final int size() { throw new IllegalStateException("Iterator only."); } public final void clear() { MultiScopeMaptron.this.clear(); } public final Iterator iterator() { return new ScopeValueIterator(); } public final boolean contains( Object o ) { return containsValue(o); } public final Spliterator spliterator() { return Spliterators.spliterator( this, Spliterator.SIZED | Spliterator.ORDERED ); } } protected final class ScopeValueIterator extends ScopeIterator implements Iterator { public final V next() { return (V)nextNode().getValue(); } } protected final class ScopeEntryIterator extends ScopeIterator implements Iterator> { public final Map.Entry next() { return nextNode(); } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/MultiValueMap.java ================================================ package com.pinecone.framework.unit; import com.pinecone.framework.unit.multi.MultiCollectionProxyMap; import java.util.List; public interface MultiValueMap extends MultiCollectionProxyMap > { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/MultiValueMapper.java ================================================ package com.pinecone.framework.unit; import com.pinecone.framework.system.prototype.PineUnit; import java.util.Collection; import java.util.Map; import java.util.Set; public interface MultiValueMapper extends PineUnit { V getFirst( K k ); V add( K k, V v ); V set( K k, V v ); void setAll( Map m ); V erase( Object key, V value ); Map toSingleValueMap(); V get( Object k, V v ); @Override default boolean hasOwnProperty( Object key ) { return this.containsKey( key ); } int size(); boolean isEmpty(); boolean containsKey( Object key ); boolean containsValue( Object value ); Collection get(Object key ); Collection puts( K key, Collection value ); Collection remove( Object key ); void putsAll( Map > m ); void clear(); Set keySet(); Collection > values(); Set > > entrySet(); Collection > collection() ; Collection collectionValues(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/MultiValueMaptron.java ================================================ package com.pinecone.framework.unit; import com.pinecone.framework.unit.multi.MultiCollectionProxyMap; import com.pinecone.framework.util.json.JSON; import java.io.Serializable; import java.util.Collection; import java.util.Map; import java.util.LinkedHashMap; import java.util.ArrayList; import java.util.Iterator; import java.util.Set; public class MultiValueMaptron > extends AbstractMultiValueMap implements MultiCollectionProxyMap, Serializable { private static final long serialVersionUID = 3801124242820219131L; private final Map mTargetMap; public MultiValueMaptron( Map otherMap, boolean bAssimilate ) { if( bAssimilate ) { this.mTargetMap = otherMap; } else { this.mTargetMap = new LinkedHashMap<>( otherMap ); } } public MultiValueMaptron() { this( new LinkedHashMap<>(), true ); } protected Map getTargetMap(){ return this.mTargetMap; } @SuppressWarnings( "unchecked" ) protected U newCollection() { return (U) new ArrayList(); } @Override public V add( K key, V value ) { U values = (U)this.mTargetMap.get( key ); if ( values == null ) { values = this.newCollection(); this.mTargetMap.put( key, values ); } if( ((U)values).add( value ) ){ return value; } return null; } @Override public V getFirst( K key ) { U values = (U)this.mTargetMap.get( key ); return values != null ? values.iterator().next() : null; } @Override public V set( K key, V value ) { U values = this.newCollection(); boolean b = values.add( value ); this.mTargetMap.put( key, values ); if( b ) { return value; } return null; } @Override public void setAll( Map values ) { Iterator > iter = values.entrySet().iterator(); while( iter.hasNext() ) { Entry entry = (Entry)iter.next(); this.set( entry.getKey(), entry.getValue() ); } } @Override public Map toSingleValueMap() { LinkedHashMap singleValueMap = new LinkedHashMap<>(this.mTargetMap.size()); Iterator > iter = this.mTargetMap.entrySet().iterator(); while( iter.hasNext() ) { Entry entry = (Entry)iter.next(); singleValueMap.put( entry.getKey(), ((U)entry.getValue()).iterator().next() ); } return singleValueMap; } @Override public int size() { return this.mTargetMap.size(); } @Override public boolean isEmpty() { return this.mTargetMap.isEmpty(); } @Override public boolean containsKey( Object key ) { return this.mTargetMap.containsKey(key); } @Override public boolean containsValue( Object value ) { return this.mTargetMap.containsValue(value); } @Override public U get( Object key ) { return (U)this.mTargetMap.get(key); } @Override public U put( K key, U value ) { return (U)this.mTargetMap.put( key, value ); } @Override public U remove( Object key ) { return (U)this.mTargetMap.remove(key); } @Override public void putAll( Map m ) { this.mTargetMap.putAll(m); } @Override public void clear() { this.mTargetMap.clear(); } @Override public Set keySet() { return this.mTargetMap.keySet(); } @Override public Collection values() { return this.mTargetMap.values(); } @Override public Set > entrySet() { return this.mTargetMap.entrySet(); } @Override public boolean equals( Object obj ) { return this.mTargetMap.equals(obj); } @Override public int hashCode() { return this.mTargetMap.hashCode(); } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return JSON.stringify( this.mTargetMap ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/PrecedeMultiMaptron.java ================================================ package com.pinecone.framework.unit; import java.util.*; public class PrecedeMultiMaptron extends MultiScopeMaptron implements PrecedeMultiScopeMap { protected MultiScopeMap mPrecedeScope; public PrecedeMultiMaptron() { this( true, null ); } public PrecedeMultiMaptron( String name ) { this( true, null ); this.setName( name ); } public PrecedeMultiMaptron( Map thisMap, List > prototypes, String name, MultiScopeMap precedeScope ){ super( thisMap, prototypes, name ); this.mPrecedeScope = precedeScope; if( this.mPrecedeScope == null ) { this.mPrecedeScope = new MultiScopeMaptron<>(); } } public PrecedeMultiMaptron( Map thisMap, List > prototypes, String name, Map precedeScope ){ super( thisMap, prototypes, name ); this.mPrecedeScope = new MultiScopeMaptron<>( precedeScope ); } public PrecedeMultiMaptron( Map thisMap, List > prototypes, String name ){ this( thisMap, prototypes, name, (MultiScopeMap ) null ); } public PrecedeMultiMaptron( Map thisMap, List > prototypes ){ this( thisMap, prototypes, "" ); } public PrecedeMultiMaptron( boolean bLinked, List > prototypes ){ this( bLinked ? new LinkedHashMap<>() : new HashMap<>(), prototypes ); } public PrecedeMultiMaptron( Map thisMap ){ this( thisMap, null ); } @Override public MultiScopeMap getPrecedeScope() { return this.mPrecedeScope; } @Override public MultiScopeMap setPrecedeScope ( MultiScopeMap that ){ this.mPrecedeScope = that; return this; } @Override @SuppressWarnings("unchecked") public ScopeMap[] scopes (){ ArrayList > l = new ArrayList<>(); l.add( this.getPrecedeScope() ); l.add( this ); ScopeTrees.groupByNodes( this, l ); return l.toArray( (ScopeMap[]) new MultiScopeMap[0] ); } @Override public boolean containsKey( Object key ) { boolean result = this.getPrecedeScope().containsKey( key ); if( !result ) { result = super.containsKey( key ); } return result; } @Override public boolean containsValue( Object value ) { boolean result = this.getPrecedeScope().containsValue( value ); if( !result ) { result = super.containsKey( value ); } return result; } @Override public V get( Object key ) { V val = this.getPrecedeScope().get( key ); if( val == null ) { val = super.get( key ); } return val; } @Override public MultiScopeMap removeAll ( Object key ) { this.getPrecedeScope().removeAll( key ); return super.removeAll( key ); } @Override public void purge() { this.getPrecedeScope().clear(); super.purge(); } @Override public void depurate() { this.getPrecedeScope().clear(); super.depurate(); } @Override public void overrideTo ( Map neo ) { Map p = this.getPrecedeScope(); for ( Map.Entry e : p.entrySet() ){ neo.putIfAbsent( e.getKey(), e.getValue() ); } super.overrideTo( neo ); } @Override public void onlyOverrideFamilyTo ( Map neo ) { super.overrideTo( neo ); } @Override public boolean isScopeEmpty () { boolean b = this.getPrecedeScope().isScopeEmpty(); if( b ) { return super.isScopeEmpty(); } return b; } @Override public MultiScopeMap getAll ( Object key, List ret ) { V v = this.getPrecedeScope().get( key ); if( v != null ) { ret.add( v ); } super.getAll( key, ret ); return this; } @Override public V query ( Object key, String szParentNS ) { V v = this.getPrecedeScope().query( key, szParentNS ); if ( v != null ) { return v; } return super.query( key, szParentNS ); } @Override public MultiScopeMap getScopeByNS ( String szParentNS ) { MultiScopeMap s = this.getPrecedeScope().getScopeByNS( szParentNS ); if ( s != null ) { return s; } return super.getScopeByNS( szParentNS ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/PrecedeMultiScopeMap.java ================================================ package com.pinecone.framework.unit; import java.util.Map; public interface PrecedeMultiScopeMap extends MultiScopeMap { MultiScopeMap getPrecedeScope(); MultiScopeMap setPrecedeScope ( MultiScopeMap that ); default MultiScopeMap setPrecedeScope ( Map that ) { return this.setPrecedeScope( new MultiScopeMaptron<>( that ) ); } void onlyOverrideFamilyTo ( Map neo ) ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/ScopeMap.java ================================================ package com.pinecone.framework.unit; import com.pinecone.framework.system.prototype.PineUnit; import java.util.*; public interface ScopeMap extends PineUnit, Map { Iterator > scopeIterator() ; Set > scopeEntrySet() ; Set scopeKeySet() ; Collection scopeValues() ; boolean isProgenitor() ; // [肃清] Clear itself and its ancestors, nothing left. void purge(); // [净化] Clear itself and its ancestors's elements, reserving its ancestors tree. void depurate(); // Override and apply all ancestors and itself to a new map. void overrideTo ( Map neo ) ; boolean isScopeEmpty(); // Elevate self to a new super class as a new parent, append this child to current self this-scope ScopeMap elevate ( Map child ) ; ScopeMap[] ancestors (); ScopeMap[] scopes (); ScopeMap getAll ( Object key, List ret ); @SuppressWarnings("unchecked") default V[] getAll ( Object key ) { ArrayList a = new ArrayList<>(); this.getAll( key, a ); return (V[]) a.toArray(); } ScopeMap removeAll ( Object key ) ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/ScopeTrees.java ================================================ package com.pinecone.framework.unit; import com.pinecone.framework.system.PineRuntimeException; import com.pinecone.framework.system.functions.Function; import java.util.ArrayDeque; import java.util.Deque; import java.util.List; public abstract class ScopeTrees { public static void search ( MultiScopeMap that, Function fn ) { Deque> stack = new ArrayDeque<>(); stack.push( that ); while ( !stack.isEmpty() ) { MultiScopeMap currentMap = stack.pop(); try{ if( (boolean) fn.invoke( currentMap ) ) { break; } } catch ( Exception e ) { throw new PineRuntimeException( e ); } List > parents = currentMap.getParents(); if( parents != null ) { for ( MultiScopeMap parent : parents ) { stack.push( parent ); } } } } public static void groupByNodes( MultiScopeMap that, List > list ) { Deque> stack = new ArrayDeque<>(); stack.push( that ); while ( !stack.isEmpty() ) { MultiScopeMap currentMap = stack.pop(); if( currentMap != that ) { list.add( currentMap ); } List > parents = currentMap.getParents(); if( parents != null ) { for ( MultiScopeMap parent : parents ) { stack.push( parent ); } } } } public static void groupByNodes( UniScopeMap that, List > list ) { UniScopeMap p = that.parent(); while ( p != null ) { list.add( p ); p = p.parent(); } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/SharedList.java ================================================ package com.pinecone.framework.unit; import java.io.Serializable; import java.util.*; import java.util.stream.Collectors; import com.pinecone.framework.system.prototype.PineUnit; /** * Pinecone Ursus For Java SharedList * SharedList Author: Genius (https://geniusay.com) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ********************************************************** * Thanks for genius`s contribution. * ********************************************************** */ public class SharedList extends AbstractList implements List, Serializable, PineUnit { public interface SharedListBuilder{ static SharedList slice(int length, List list){ return slice(0, length, list); } static SharedList slice(int start, int length, List list){ checkStartAndLength(start, length, list); return freezeList(start, length, list); } static SharedList merge(List list){ listNotNull(list); return freedomList(list); } @SafeVarargs static SharedList merge(List...lists){ List> sharedLists = Arrays.stream(lists).map(SharedListBuilder::freezeList).collect(Collectors.toList()); return new SharedList<>(0,new ArrayList<>(), sharedLists); } static SharedList freedomList(List list){ return freedomList(0, freezeList(list)); } static SharedList freedomList(int start, List list){ return new SharedList<>(0, freezeList(start, list)); } private static SharedList freezeList(List list){ return freezeList(0, list); } private static SharedList freezeList(int start, List list){ return freezeList(start, list.size() - start, list); } private static SharedList freezeList(int start, int length, List list){ checkStartAndLength(start, length, list); return new SharedList<>(start, length, list); } } private static final int MAX_END_OFFSET = Integer.MAX_VALUE; private final int endPtr; private final int startPtr; private int size; private final List elementData; private final List sharedSizeList; private final List> sharedLists; private SharedList(int startPtr, SharedList sharedList) { this(startPtr, MAX_END_OFFSET, new ArrayList<>(), new ArrayList<>(List.of(sharedList))); } private SharedList(int startPtr, List elementData) { this(startPtr, MAX_END_OFFSET, elementData, new ArrayList<>()); } private SharedList(int startPtr, List elementData, List> sharedLists) { this(startPtr, MAX_END_OFFSET, elementData, sharedLists); } private SharedList(int startPtr, int endPtr, List elementData) { this(startPtr, endPtr, elementData, new ArrayList<>()); } private SharedList(int startPtr, int endPtr, List elementData, List> sharedLists) { this.endPtr = endPtr; this.startPtr = startPtr; this.elementData = elementData; this.sharedSizeList = new ArrayList<>(); this.sharedLists = new ArrayList<>(); this.size = 0; initSharedList(sharedLists); incrementSizeAndCheckExceed(elementData.size()); } // 初始化 共享 list的时候,如果某个共享list超出当前的endPtr,则停止统计后续size以及sharedSizeList private void initSharedList(List> sharedLists){ for (SharedList sharedList : sharedLists) { if(sharedList.isEmpty()){ continue; } this.sharedLists.add(sharedList); int sharedSize = sharedList.size(); addSharedSizeList(sharedSize); if(incrementSizeAndCheckExceed(sharedSize)){ break; } } } private void addSharedSizeList(int sharedSize){ int size = Math.min(endPtr, sharedSizeList.isEmpty()?sharedSize:sharedSize+sharedSizeList.get(sharedSizeList.size()-1)); sharedSizeList.add(size); } private boolean incrementSizeAndCheckExceed(int num){ int nextSize; if((nextSize = size + num) > endPtr){ size = endPtr; return true; } size = nextSize; return false; } private int selfSizeIndex(){ return sharedSizeList.size(); } /** * 基于startPtr的相对index * @param index * @return */ private int offsetIndex(int index){ return startPtr + index; } /** * 映射到对应共享列表的下标 * @param index * @return */ private int invokeIndex(int index, int sharedIndex){ return quickInvokeIndex(index, (sharedIndex == 0?0:sharedSizeList.get(sharedIndex-1))); } private int quickInvokeIndex(int index, int offset){ return offsetIndex(index) - offset; } /** * 当为第一个元素时,查看index是否在 [0, sharedSizeList.get(index)] 范围内 * 当为最后一个元素时,查看index是否在 [sharedSizeList.get(index-1), selfSize()] 范围内 * @param index * @param sharedIndex * @return */ private boolean inSizeRange(int index, int sharedIndex){ int l = sharedIndex == 0?0:sharedSizeList.get(sharedIndex-1); int r = sharedIndex == sharedSizeList.size()?selfSize():sharedSizeList.get(sharedIndex); return index >= l && index < r; } // 弱缓存 private int lastAccess = -1; private int findListIndex(int index){ indexOutOfSizeThrow(index); int selfIndex = selfSizeIndex(); if(sharedLists.isEmpty() && inSizeRange(index, selfIndex)){ return selfIndex; } if(lastAccess != -1 && inSizeRange(index, lastAccess)){ return lastAccess; } lastAccess = -1; int l = 0; int r = sharedSizeList.size()-1; int mid = (l + r) >> 1; while(l < r){ if(inSizeRange(index, mid)){ lastAccess = mid; return mid; }else if(index >= sharedSizeList.get(mid)){ l = mid + 1; }else{ r = mid - 1; } } lastAccess = l; return l; } private List findList(int listIndex){ SharedList sharedList = listIndex == selfSizeIndex()?this:sharedLists.get(listIndex); if(sharedList == this){ return this.elementData; } return sharedList; } @Override public T get(int index) { int listIndex = findListIndex(index); int ptr = invokeIndex(index, listIndex); return findList(listIndex).get(ptr); } @Override public T set(int index, T element) { int listIndex = findListIndex(index); int ptr = invokeIndex(index, listIndex); return findList(listIndex).set(ptr, element); } @Override public boolean add(T t) { incrementSizeThrowOutOfSize(1); return this.elementData.add(t); } @Override public int indexOf(Object o) { return super.indexOf(o); } @Override public int lastIndexOf(Object o) { return super.lastIndexOf(o); } public SharedList subList(){ return SharedListBuilder.slice(0, this.size(), this); } public SharedList subList(int start, int length){ return SharedListBuilder.slice(start, length, this); } @Override public int size() { return size; } public int selfSize(){ return this.elementData.size(); } private boolean indexOutOfSize(int index){ return index < 0 || index >= size(); } private void indexOutOfSizeThrow(int index){ if(indexOutOfSize(index)){ throw new IndexOutOfBoundsException(String.format("index %s out of range [0, %s]", index, size())); } } private void incrementSizeThrowOutOfSize(int num){ if (incrementSizeAndCheckExceed(num)) { throw new IndexOutOfBoundsException(String.format("size %s + %s out of end ptr %s", size(), num, endPtr)); } } private static void checkStartAndLength(int start, int length, List list){ listNotNull(list); if(start < 0 || length < 0){ throw new IndexOutOfBoundsException("start and length must >= 0"); } int totalLength; if((totalLength = start + length) > list.size()){ throw new IndexOutOfBoundsException(String.format("start + length %s must in range [0, %s)", totalLength, list.size())); } } private static void listNotNull(List list){ if(Objects.isNull(list)){ throw new NullPointerException("list is nullptr"); } } public Iterator iterator(){ return new Itr(); } private Iterator skipIterator(int skipNum){ return new Itr(skipNum); } private class Itr implements Iterator { private int cursor; private int nowSharedListIndex = -1; private Iterator currentSharedItr = null; private Iterator selfItr = null; int sum = 0; private boolean selfFlag = false; private final boolean selfElementIsShared = elementData instanceof SharedList; private int lastCursor = -1; public Itr() { } public Itr(int cursor) { this.cursor = cursor; } @Override public boolean hasNext() { return !indexOutOfSize(cursor); } /** * 采用优化遍历法,不再采用二分的方式去一个个寻找对应的index * 而是采用遍历的方式,遍历过程中,如果发现当前的index已经大于等于endPtr,则需要切换到下一个sharedList * @return T */ @Override public T next() { int i = cursor++; if(currentSharedItr!=null && currentSharedItr.hasNext()){ return currentSharedItr.next(); } for(;;){ if(selfFlag || sharedLists.isEmpty()){ int ptr = quickInvokeIndex(i, sum); if(selfElementIsShared){ if(selfItr == null) { selfItr = ((SharedList) elementData).skipIterator(ptr); } return selfItr.next(); }else{ return elementData.get(lastCursor = ptr); } } nowSharedListIndex++; if(nowSharedListIndex>=sharedLists.size()){ selfFlag = true; continue; } SharedList sharedList = sharedLists.get(nowSharedListIndex); currentSharedItr = sharedList.skipIterator(quickInvokeIndex(i, sum)); sum += sharedList.size(); return currentSharedItr.next(); } } } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("["); for (T t : this) { sb.append(t).append(","); } sb.deleteCharAt(sb.length()-1); sb.append("]"); return sb.toString(); } @Override public boolean containsKey( Object elm ) { try { if( elm instanceof Number ) { int nElm = ( (Number)elm ).intValue(); int nLength = this.size(); if( nElm < 0 || nLength == 0 ){ return false; } return nLength > nElm; } return this.containsKey( Integer.parseInt(elm.toString()) ); } catch ( NumberFormatException e ){ return false; } } @Override public boolean hasOwnProperty( Object elm ) { return this.containsKey( elm ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/SingletonSupplier.java ================================================ package com.pinecone.framework.unit; import java.util.function.Supplier; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.Assert; public class SingletonSupplier implements Supplier { @Nullable private final Supplier instanceSupplier; @Nullable private final Supplier defaultSupplier; @Nullable private volatile T singletonInstance; public SingletonSupplier(@Nullable T instance, Supplier defaultSupplier) { this.instanceSupplier = null; this.defaultSupplier = defaultSupplier; this.singletonInstance = instance; } public SingletonSupplier(@Nullable Supplier instanceSupplier, Supplier defaultSupplier) { this.instanceSupplier = instanceSupplier; this.defaultSupplier = defaultSupplier; } private SingletonSupplier(Supplier supplier) { this.instanceSupplier = supplier; this.defaultSupplier = null; } private SingletonSupplier(T singletonInstance) { this.instanceSupplier = null; this.defaultSupplier = null; this.singletonInstance = singletonInstance; } @Nullable @Override public T get() { T instance = this.singletonInstance; if (instance == null) { synchronized(this) { instance = this.singletonInstance; if (instance == null) { if (this.instanceSupplier != null) { instance = this.instanceSupplier.get(); } if (instance == null && this.defaultSupplier != null) { instance = this.defaultSupplier.get(); } this.singletonInstance = instance; } } } return instance; } public T obtain() { T instance = this.get(); Assert.state(instance != null, "No instance from Supplier"); return instance; } public static SingletonSupplier of(T instance) { return new SingletonSupplier<>(instance); } @Nullable public static SingletonSupplier ofNullable( @Nullable T instance ) { return instance != null ? new SingletonSupplier<>(instance) : null; } public static SingletonSupplier of( Supplier supplier ) { return new SingletonSupplier<>(supplier); } @Nullable public static SingletonSupplier ofNullable( @Nullable Supplier supplier ) { return supplier != null ? new SingletonSupplier<>(supplier) : null; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/TreeMap.java ================================================ package com.pinecone.framework.unit; import com.pinecone.framework.system.prototype.PineUnit; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.system.prototype.Prototype; import com.pinecone.framework.system.prototype.TypeIndex; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.json.JSON; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.*; import java.util.function.Consumer; import java.io.Serializable; import java.util.function.BiConsumer; import java.util.function.BiFunction; public class TreeMap extends AbstractMap implements NavigableMap, Cloneable, Serializable, Mapnut { protected final Comparator comparator; protected transient Entry root; protected transient int size = 0; protected transient int modCount = 0; public TreeMap() { this.comparator = null; } public TreeMap( Comparator comparator ) { this.comparator = comparator; } public TreeMap( Map m ) { this.comparator = null; this.putAll(m); } public TreeMap( SortedMap m ) { this.comparator = m.comparator(); try { this.buildFromSorted(m.size(), m.entrySet().iterator(), null, null); } catch ( IOException | ClassNotFoundException cannotHappen ) { } } // Query Operations public int size() { return this.size; } @Override public boolean hasOwnProperty( Object key ){ return this.containsKey( key ); } public boolean containsKey( Object key ) { return this.getEntry(key) != null; } public boolean containsValue( Object value ) { for ( Entry e = this.getFirstEntry(); e != null; e = successor(e) ) if ( valEquals(value, e.value) ) { return true; } return false; } public V get( Object key ) { Entry p = this.getEntry(key); return (p==null ? null : p.value); } @Override public Map.Entry getEntryByKey( Object compatibleKey ) { return this.getEntry( compatibleKey ); } @Override public Map.Entry getEntryCopyByKey( Object compatibleKey ) { Map.Entry auth = this.getEntryByKey( compatibleKey ); if( auth != null ) { return new KeyValue<>( auth.getKey(), auth.getValue() ); } return null; } public Comparator comparator() { return comparator; } /** * @throws NoSuchElementException {@inheritDoc} */ public K firstKey() { return key(getFirstEntry()); } /** * @throws NoSuchElementException {@inheritDoc} */ public K lastKey() { return key(getLastEntry()); } /** * Copies all of the mappings from the specified map to this map. * These mappings replace any mappings that this map had for any * of the keys currently in the specified map. * * @param map mappings to be stored in this map * @throws ClassCastException if the class of a key or value in * the specified map prevents it from being stored in this map * @throws NullPointerException if the specified map is null or * the specified map contains a null key and this map does not * permit null keys */ public void putAll( Map map ) { this.putMapEntries( map, true ); } protected void putMapEntries( Map map, boolean evict ) { int mapSize = map.size(); if ( this.size == 0 && mapSize != 0 && map instanceof SortedMap && !( map instanceof ListedSortedMap ) ) { Comparator c = ((SortedMap)map).comparator(); if ( c == this.comparator || (c != null && c.equals(this.comparator)) ) { ++this.modCount; try { this.buildFromSorted( mapSize, map.entrySet().iterator(), null, null ); } catch (IOException | ClassNotFoundException cannotHappen) { } return; } } for ( Map.Entry e : map.entrySet() ) { K key = e.getKey(); V value = e.getValue(); this.putVal( key, value, false, evict ); } } /** * Returns this map's entry for the given key, or {@code null} if the map * does not contain an entry for the key. * * @return this map's entry for the given key, or {@code null} if the map * does not contain an entry for the key * @throws ClassCastException if the specified key cannot be compared * with the keys currently in the map * @throws NullPointerException if the specified key is null * and this map uses natural ordering, or its comparator * does not permit null keys */ protected final Entry getEntry(Object key) { // Offload comparator-based version for sake of performance if ( this.comparator != null ) { return this.getEntryUsingComparator(key); } if ( key == null ) { throw new NullPointerException(); } @SuppressWarnings("unchecked") Comparable k = (Comparable) key; Entry p = this.root; while ( p != null ) { int cmp = k.compareTo(p.key); if ( cmp < 0 ) { p = p.left; } else if ( cmp > 0 ) { p = p.right; } else { return p; } } return null; } protected final Entry getEntryUsingComparator( Object key ) { @SuppressWarnings("unchecked") K k = (K) key; Comparator cpr = comparator; if ( cpr != null ) { Entry p = this.root; while ( p != null ) { int cmp = cpr.compare(k, p.key); if ( cmp < 0 ) { p = p.left; } else if ( cmp > 0 ) { p = p.right; } else { return p; } } } return null; } /** * Gets the entry corresponding to the specified key; if no such entry * exists, returns the entry for the least key greater than the specified * key; if no such entry exists (i.e., the greatest key in the Tree is less * than the specified key), returns {@code null}. */ protected final Entry getCeilingEntry(K key) { Entry p = root; while (p != null) { int cmp = compare(key, p.key); if (cmp < 0) { if (p.left != null) p = p.left; else return p; } else if (cmp > 0) { if (p.right != null) { p = p.right; } else { Entry parent = p.parent; Entry ch = p; while (parent != null && ch == parent.right) { ch = parent; parent = parent.parent; } return parent; } } else return p; } return null; } /** * Gets the entry corresponding to the specified key; if no such entry * exists, returns the entry for the greatest key less than the specified * key; if no such entry exists, returns {@code null}. */ protected final Entry getFloorEntry(K key) { Entry p = root; while (p != null) { int cmp = compare(key, p.key); if (cmp > 0) { if (p.right != null) p = p.right; else return p; } else if (cmp < 0) { if (p.left != null) { p = p.left; } else { Entry parent = p.parent; Entry ch = p; while (parent != null && ch == parent.left) { ch = parent; parent = parent.parent; } return parent; } } else return p; } return null; } /** * Gets the entry for the least key greater than the specified * key; if no such entry exists, returns the entry for the least * key greater than the specified key; if no such entry exists * returns {@code null}. */ protected final Entry getHigherEntry(K key) { Entry p = root; while (p != null) { int cmp = compare(key, p.key); if (cmp < 0) { if (p.left != null) p = p.left; else return p; } else { if (p.right != null) { p = p.right; } else { Entry parent = p.parent; Entry ch = p; while (parent != null && ch == parent.right) { ch = parent; parent = parent.parent; } return parent; } } } return null; } /** * Returns the entry for the greatest key less than the specified key; if * no such entry exists (i.e., the least key in the Tree is greater than * the specified key), returns {@code null}. */ protected final Entry getLowerEntry(K key) { Entry p = root; while (p != null) { int cmp = compare(key, p.key); if (cmp > 0) { if (p.right != null) p = p.right; else return p; } else { if (p.left != null) { p = p.left; } else { Entry parent = p.parent; Entry ch = p; while (parent != null && ch == parent.left) { ch = parent; parent = parent.parent; } return parent; } } } return null; } /** Overridable Method, for unified customize.*/ protected Entry spawnNode( K key, V value, Entry parent ) { return new Entry<>( key, value, parent ); } protected void afterNodeAccess( Entry p ) { } protected void afterNodeInsertion( boolean evict ) { } protected void afterNodeRemoval( Entry p ) { } protected V putVal( K key, V value, boolean onlyIfAbsent, boolean evict ) { Entry t = this.root; if ( t == null ) { this.compare( key, key ); // type (and possibly null) check this.root = this.spawnNode( key, value, null ); this.size = 1; ++this.modCount; return null; } int cmp; Entry parent; // split comparator and comparable paths Comparator cpr = this.comparator; Entry legacy = null; if ( cpr != null ) { do { parent = t; cmp = cpr.compare(key, t.key); if (cmp < 0) { t = t.left; } else if ( cmp > 0 ) { t = t.right; } else { legacy = t; break; } } while ( t != null ); } else { if ( key == null ) { throw new NullPointerException(); } @SuppressWarnings("unchecked") Comparable k = (Comparable) key; do { parent = t; cmp = k.compareTo(t.key); if ( cmp < 0 ) { t = t.left; } else if ( cmp > 0 ) { t = t.right; } else { legacy = t; break; } } while ( t != null ); } if ( legacy != null ) { // existing mapping for key V oldValue = legacy.value; if ( !onlyIfAbsent || oldValue == null ) { legacy.setValue( value ); } this.afterNodeAccess( legacy ); return oldValue; } Entry e = this.spawnNode( key, value, parent ); if ( cmp < 0 ) { parent.left = e; } else { parent.right = e; } this.fixAfterInsertion(e); ++this.size; ++this.modCount; this.afterNodeInsertion( evict ); return null; } public V put( K key, V value ) { return this.putVal( key, value, false, true ); } @Override public V putIfAbsent(K key, V value) { return this.putVal( key, value, true, true ); } public V remove( Object key ) { Entry p = this.getEntry(key); if ( p == null ) { return null; } V oldValue = p.value; this.deleteEntry(p); return oldValue; } public void clear() { this.modCount++; this.size = 0; this.root = null; } protected TreeMap superClone() { TreeMap clone; try { clone = (TreeMap) super.clone(); } catch ( CloneNotSupportedException e ) { throw new InternalError(e); } // Put clone into "virgin" state (except for comparator) clone.root = null; clone.size = 0; clone.modCount = 0; clone.entrySet = null; clone.navigableKeySet = null; clone.descendingMap = null; return clone; } /** * Returns a shallow copy of this {@code TreeMap} instance. (The keys and * values themselves are not cloned.) * * @return a shallow copy of this map */ public Object clone() { TreeMap clone = this.superClone(); // Initialize clone with our mappings try { clone.buildFromSorted(size, entrySet().iterator(), null, null); } catch ( IOException | ClassNotFoundException cannotHappen ) { } return clone; } // NavigableMap API methods public Map.Entry firstEntry() { return exportEntry(getFirstEntry()); } public Map.Entry lastEntry() { return exportEntry(getLastEntry()); } public Map.Entry pollFirstEntry() { Entry p = getFirstEntry(); Map.Entry result = exportEntry(p); if (p != null) deleteEntry(p); return result; } public Map.Entry pollLastEntry() { Entry p = getLastEntry(); Map.Entry result = exportEntry(p); if (p != null) deleteEntry(p); return result; } public Map.Entry lowerEntry(K key) { return exportEntry(getLowerEntry(key)); } public K lowerKey(K key) { return keyOrNull(getLowerEntry(key)); } public Map.Entry floorEntry(K key) { return exportEntry(getFloorEntry(key)); } public K floorKey(K key) { return keyOrNull(getFloorEntry(key)); } public Map.Entry ceilingEntry(K key) { return exportEntry(getCeilingEntry(key)); } public K ceilingKey(K key) { return keyOrNull(getCeilingEntry(key)); } public Map.Entry higherEntry(K key) { return exportEntry(getHigherEntry(key)); } public K higherKey(K key) { return keyOrNull(getHigherEntry(key)); } // Views /** * Fields initialized to contain an instance of the entry set view * the first time this view is requested. Views are stateless, so * there's no reason to create more than one. */ protected transient Set > entrySet; protected transient KeySet navigableKeySet; protected transient NavigableMap descendingMap; /** * Returns a {@link Set} view of the keys contained in this map. * *

The set's iterator returns the keys in ascending order. * The set's spliterator is * late-binding, * fail-fast, and additionally reports {@link Spliterator#SORTED} * and {@link Spliterator#ORDERED} with an encounter order that is ascending * key order. The spliterator's comparator (see * {@link java.util.Spliterator#getComparator()}) is {@code null} if * the tree map's comparator (see {@link #comparator()}) is {@code null}. * Otherwise, the spliterator's comparator is the same as or imposes the * same total ordering as the tree map's comparator. * *

The set is backed by the map, so changes to the map are * reflected in the set, and vice-versa. If the map is modified * while an iteration over the set is in progress (except through * the iterator's own {@code remove} operation), the results of * the iteration are undefined. The set supports element removal, * which removes the corresponding mapping from the map, via the * {@code Iterator.remove}, {@code Set.remove}, * {@code removeAll}, {@code retainAll}, and {@code clear} * operations. It does not support the {@code add} or {@code addAll} * operations. */ public Set keySet() { return navigableKeySet(); } /** * @since 1.6 */ public NavigableSet navigableKeySet() { KeySet nks = navigableKeySet; return (nks != null) ? nks : (navigableKeySet = new KeySet<>(this)); } /** * @since 1.6 */ public NavigableSet descendingKeySet() { return descendingMap().navigableKeySet(); } /** * Returns a {@link Collection} view of the values contained in this map. * *

The collection's iterator returns the values in ascending order * of the corresponding keys. The collection's spliterator is * late-binding, * fail-fast, and additionally reports {@link Spliterator#ORDERED} * with an encounter order that is ascending order of the corresponding * keys. * *

The collection is backed by the map, so changes to the map are * reflected in the collection, and vice-versa. If the map is * modified while an iteration over the collection is in progress * (except through the iterator's own {@code remove} operation), * the results of the iteration are undefined. The collection * supports element removal, which removes the corresponding * mapping from the map, via the {@code Iterator.remove}, * {@code Collection.remove}, {@code removeAll}, * {@code retainAll} and {@code clear} operations. It does not * support the {@code add} or {@code addAll} operations. */ public Collection values() { Collection vs = values; if (vs == null) { vs = new Values(); values = vs; } return vs; } @Override public Set > entrySet() { Set> es = this.entrySet; return (es != null) ? es : (this.entrySet = new EntrySet()); } /** * @since 1.6 */ public NavigableMap descendingMap() { NavigableMap km = descendingMap; return (km != null) ? km : (descendingMap = new DescendingSubMap<>(this, true, null, true, true, null, true)); } /** * @throws ClassCastException {@inheritDoc} * @throws NullPointerException if {@code fromKey} or {@code toKey} is * null and this map uses natural ordering, or its comparator * does not permit null keys * @throws IllegalArgumentException {@inheritDoc} * @since 1.6 */ public NavigableMap subMap(K fromKey, boolean fromInclusive, K toKey, boolean toInclusive) { return new AscendingSubMap<>(this, false, fromKey, fromInclusive, false, toKey, toInclusive); } /** * @throws ClassCastException {@inheritDoc} * @throws NullPointerException if {@code toKey} is null * and this map uses natural ordering, or its comparator * does not permit null keys * @throws IllegalArgumentException {@inheritDoc} * @since 1.6 */ public NavigableMap headMap(K toKey, boolean inclusive) { return new AscendingSubMap<>(this, true, null, true, false, toKey, inclusive); } /** * @throws ClassCastException {@inheritDoc} * @throws NullPointerException if {@code fromKey} is null * and this map uses natural ordering, or its comparator * does not permit null keys * @throws IllegalArgumentException {@inheritDoc} * @since 1.6 */ public NavigableMap tailMap(K fromKey, boolean inclusive) { return new AscendingSubMap<>(this, false, fromKey, inclusive, true, null, true); } /** * @throws ClassCastException {@inheritDoc} * @throws NullPointerException if {@code fromKey} or {@code toKey} is * null and this map uses natural ordering, or its comparator * does not permit null keys * @throws IllegalArgumentException {@inheritDoc} */ public SortedMap subMap(K fromKey, K toKey) { return subMap(fromKey, true, toKey, false); } /** * @throws ClassCastException {@inheritDoc} * @throws NullPointerException if {@code toKey} is null * and this map uses natural ordering, or its comparator * does not permit null keys * @throws IllegalArgumentException {@inheritDoc} */ public SortedMap headMap(K toKey) { return headMap(toKey, false); } /** * @throws ClassCastException {@inheritDoc} * @throws NullPointerException if {@code fromKey} is null * and this map uses natural ordering, or its comparator * does not permit null keys * @throws IllegalArgumentException {@inheritDoc} */ public SortedMap tailMap(K fromKey) { return tailMap(fromKey, true); } @Override public boolean replace( K key, V oldValue, V newValue ) { Entry p = this.getEntry(key); if ( p != null && Objects.equals(oldValue, p.value) ) { p.value = newValue; this.afterNodeAccess( p ); return true; } return false; } @Override public V replace( K key, V value ) { Entry p = getEntry(key); if ( p != null ) { V oldValue = p.value; p.value = value; this.afterNodeAccess( p ); return oldValue; } return null; } @Override public void forEach(BiConsumer action) { Objects.requireNonNull(action); int expectedModCount = modCount; for (Entry e = getFirstEntry(); e != null; e = successor(e)) { action.accept(e.key, e.value); if (expectedModCount != modCount) { throw new ConcurrentModificationException(); } } } @Override public void replaceAll(BiFunction function) { Objects.requireNonNull(function); int expectedModCount = modCount; for (Entry e = getFirstEntry(); e != null; e = successor(e)) { e.value = function.apply(e.key, e.value); if (expectedModCount != modCount) { throw new ConcurrentModificationException(); } } } // View class support class Values extends AbstractCollection { public Iterator iterator() { return new ValueIterator(getFirstEntry()); } public int size() { return TreeMap.this.size(); } public boolean contains(Object o) { return TreeMap.this.containsValue(o); } public boolean remove(Object o) { for (Entry e = getFirstEntry(); e != null; e = successor(e)) { if (valEquals(e.getValue(), o)) { deleteEntry(e); return true; } } return false; } public void clear() { TreeMap.this.clear(); } public Spliterator spliterator() { return new ValueSpliterator<>(TreeMap.this, null, null, 0, -1, 0); } } class EntrySet extends AbstractSet> { public Iterator> iterator() { return new EntryIterator(getFirstEntry()); } public boolean contains(Object o) { if (!(o instanceof Map.Entry)) return false; Map.Entry entry = (Map.Entry) o; Object value = entry.getValue(); Entry p = getEntry(entry.getKey()); return p != null && valEquals(p.getValue(), value); } public boolean remove(Object o) { if (!(o instanceof Map.Entry)) return false; Map.Entry entry = (Map.Entry) o; Object value = entry.getValue(); Entry p = getEntry(entry.getKey()); if (p != null && valEquals(p.getValue(), value)) { deleteEntry(p); return true; } return false; } public int size() { return TreeMap.this.size(); } public void clear() { TreeMap.this.clear(); } public Spliterator> spliterator() { return new EntrySpliterator<>(TreeMap.this, null, null, 0, -1, 0); } } /* * Unlike Values and EntrySet, the KeySet class is static, * delegating to a NavigableMap to allow use by SubMaps, which * outweighs the ugliness of needing type-tests for the following * Iterator methods that are defined appropriately in main versus * submap classes. */ Iterator keyIterator() { return new KeyIterator(getFirstEntry()); } Iterator descendingKeyIterator() { return new DescendingKeyIterator(getLastEntry()); } static final class KeySet extends AbstractSet implements NavigableSet { private final NavigableMap m; KeySet(NavigableMap map) { m = map; } public Iterator iterator() { if (m instanceof TreeMap) return ((TreeMap)m).keyIterator(); else return ((TreeMap.NavigableSubMap)m).keyIterator(); } public Iterator descendingIterator() { if (m instanceof TreeMap) return ((TreeMap)m).descendingKeyIterator(); else return ((TreeMap.NavigableSubMap)m).descendingKeyIterator(); } public int size() { return m.size(); } public boolean isEmpty() { return m.isEmpty(); } public boolean contains(Object o) { return m.containsKey(o); } public void clear() { m.clear(); } public E lower(E e) { return m.lowerKey(e); } public E floor(E e) { return m.floorKey(e); } public E ceiling(E e) { return m.ceilingKey(e); } public E higher(E e) { return m.higherKey(e); } public E first() { return m.firstKey(); } public E last() { return m.lastKey(); } public Comparator comparator() { return m.comparator(); } public E pollFirst() { Map.Entry e = m.pollFirstEntry(); return (e == null) ? null : e.getKey(); } public E pollLast() { Map.Entry e = m.pollLastEntry(); return (e == null) ? null : e.getKey(); } public boolean remove(Object o) { int oldSize = size(); m.remove(o); return size() != oldSize; } public NavigableSet subSet(E fromElement, boolean fromInclusive, E toElement, boolean toInclusive) { return new KeySet<>(m.subMap(fromElement, fromInclusive, toElement, toInclusive)); } public NavigableSet headSet(E toElement, boolean inclusive) { return new KeySet<>(m.headMap(toElement, inclusive)); } public NavigableSet tailSet(E fromElement, boolean inclusive) { return new KeySet<>(m.tailMap(fromElement, inclusive)); } public SortedSet subSet(E fromElement, E toElement) { return subSet(fromElement, true, toElement, false); } public SortedSet headSet(E toElement) { return headSet(toElement, false); } public SortedSet tailSet(E fromElement) { return tailSet(fromElement, true); } public NavigableSet descendingSet() { return new KeySet<>(m.descendingMap()); } public Spliterator spliterator() { return keySpliteratorFor(m); } } /** * Base class for TreeMap Iterators */ abstract class PrivateEntryIterator implements Iterator { Entry next; Entry lastReturned; int expectedModCount; PrivateEntryIterator(Entry first) { expectedModCount = modCount; lastReturned = null; next = first; } public final boolean hasNext() { return next != null; } final Entry nextEntry() { Entry e = next; if (e == null) throw new NoSuchElementException(); if (modCount != expectedModCount) throw new ConcurrentModificationException(); next = successor(e); lastReturned = e; return e; } final Entry prevEntry() { Entry e = next; if (e == null) throw new NoSuchElementException(); if (modCount != expectedModCount) throw new ConcurrentModificationException(); next = predecessor(e); lastReturned = e; return e; } public void remove() { if (lastReturned == null) throw new IllegalStateException(); if (modCount != expectedModCount) throw new ConcurrentModificationException(); // deleted entries are replaced by their successors if (lastReturned.left != null && lastReturned.right != null) next = lastReturned; deleteEntry(lastReturned); expectedModCount = modCount; lastReturned = null; } } final class EntryIterator extends PrivateEntryIterator> { EntryIterator(Entry first) { super(first); } public Map.Entry next() { return nextEntry(); } } final class ValueIterator extends PrivateEntryIterator { ValueIterator(Entry first) { super(first); } public V next() { return nextEntry().value; } } final class KeyIterator extends PrivateEntryIterator { KeyIterator(Entry first) { super(first); } public K next() { return nextEntry().key; } } final class DescendingKeyIterator extends PrivateEntryIterator { DescendingKeyIterator(Entry first) { super(first); } public K next() { return prevEntry().key; } public void remove() { if (lastReturned == null) throw new IllegalStateException(); if (modCount != expectedModCount) throw new ConcurrentModificationException(); deleteEntry(lastReturned); lastReturned = null; expectedModCount = modCount; } } // Little utilities /** * Compares two keys using the correct comparison method for this TreeMap. */ @SuppressWarnings("unchecked") final int compare(Object k1, Object k2) { return this.comparator == null ? ((Comparable)k1).compareTo((K)k2) : this.comparator.compare((K)k1, (K)k2); } /** * Test two values for equality. Differs from o1.equals(o2) only in * that it copes with {@code null} o1 properly. */ static final boolean valEquals(Object o1, Object o2) { return (o1==null ? o2==null : o1.equals(o2)); } /** * Return SimpleImmutableEntry for entry, or null if null */ static Map.Entry exportEntry(TreeMap.Entry e) { return (e == null) ? null : new AbstractMap.SimpleImmutableEntry<>(e); } /** * Return key for entry, or null if null */ static K keyOrNull(TreeMap.Entry e) { return (e == null) ? null : e.key; } /** * Returns the key corresponding to the specified Entry. * @throws NoSuchElementException if the Entry is null */ static K key(Entry e) { if (e==null) throw new NoSuchElementException(); return e.key; } // SubMaps /** * Dummy value serving as unmatchable fence key for unbounded * SubMapIterators */ private static final Object UNBOUNDED = new Object(); /** * @serial include */ abstract static class NavigableSubMap extends AbstractMap implements NavigableMap, java.io.Serializable { private static final long serialVersionUID = -2102997345730753016L; /** * The backing map. */ final TreeMap m; /** * Endpoints are represented as triples (fromStart, lo, * loInclusive) and (toEnd, hi, hiInclusive). If fromStart is * true, then the low (absolute) bound is the start of the * backing map, and the other values are ignored. Otherwise, * if loInclusive is true, lo is the inclusive bound, else lo * is the exclusive bound. Similarly for the upper bound. */ final K lo, hi; final boolean fromStart, toEnd; final boolean loInclusive, hiInclusive; NavigableSubMap(TreeMap m, boolean fromStart, K lo, boolean loInclusive, boolean toEnd, K hi, boolean hiInclusive) { if (!fromStart && !toEnd) { if (m.compare(lo, hi) > 0) throw new IllegalArgumentException("fromKey > toKey"); } else { if (!fromStart) // type check m.compare(lo, lo); if (!toEnd) m.compare(hi, hi); } this.m = m; this.fromStart = fromStart; this.lo = lo; this.loInclusive = loInclusive; this.toEnd = toEnd; this.hi = hi; this.hiInclusive = hiInclusive; } // internal utilities final boolean tooLow(Object key) { if (!fromStart) { int c = m.compare(key, lo); if (c < 0 || (c == 0 && !loInclusive)) return true; } return false; } final boolean tooHigh(Object key) { if (!toEnd) { int c = m.compare(key, hi); if (c > 0 || (c == 0 && !hiInclusive)) return true; } return false; } final boolean inRange(Object key) { return !tooLow(key) && !tooHigh(key); } final boolean inClosedRange(Object key) { return (fromStart || m.compare(key, lo) >= 0) && (toEnd || m.compare(hi, key) >= 0); } final boolean inRange(Object key, boolean inclusive) { return inclusive ? inRange(key) : inClosedRange(key); } /* * Absolute versions of relation operations. * Subclasses map to these using like-named "sub" * versions that invert senses for descending maps */ final TreeMap.Entry absLowest() { TreeMap.Entry e = (fromStart ? m.getFirstEntry() : (loInclusive ? m.getCeilingEntry(lo) : m.getHigherEntry(lo))); return (e == null || tooHigh(e.key)) ? null : e; } final TreeMap.Entry absHighest() { TreeMap.Entry e = (toEnd ? m.getLastEntry() : (hiInclusive ? m.getFloorEntry(hi) : m.getLowerEntry(hi))); return (e == null || tooLow(e.key)) ? null : e; } final TreeMap.Entry absCeiling(K key) { if (tooLow(key)) return absLowest(); TreeMap.Entry e = m.getCeilingEntry(key); return (e == null || tooHigh(e.key)) ? null : e; } final TreeMap.Entry absHigher(K key) { if (tooLow(key)) return absLowest(); TreeMap.Entry e = m.getHigherEntry(key); return (e == null || tooHigh(e.key)) ? null : e; } final TreeMap.Entry absFloor(K key) { if (tooHigh(key)) return absHighest(); TreeMap.Entry e = m.getFloorEntry(key); return (e == null || tooLow(e.key)) ? null : e; } final TreeMap.Entry absLower(K key) { if (tooHigh(key)) return absHighest(); TreeMap.Entry e = m.getLowerEntry(key); return (e == null || tooLow(e.key)) ? null : e; } /** Returns the absolute high fence for ascending traversal */ final TreeMap.Entry absHighFence() { return (toEnd ? null : (hiInclusive ? m.getHigherEntry(hi) : m.getCeilingEntry(hi))); } /** Return the absolute low fence for descending traversal */ final TreeMap.Entry absLowFence() { return (fromStart ? null : (loInclusive ? m.getLowerEntry(lo) : m.getFloorEntry(lo))); } // Abstract methods defined in ascending vs descending classes // These relay to the appropriate absolute versions abstract TreeMap.Entry subLowest(); abstract TreeMap.Entry subHighest(); abstract TreeMap.Entry subCeiling(K key); abstract TreeMap.Entry subHigher(K key); abstract TreeMap.Entry subFloor(K key); abstract TreeMap.Entry subLower(K key); /** Returns ascending iterator from the perspective of this submap */ abstract Iterator keyIterator(); abstract Spliterator keySpliterator(); /** Returns descending iterator from the perspective of this submap */ abstract Iterator descendingKeyIterator(); // public methods public boolean isEmpty() { return (fromStart && toEnd) ? m.isEmpty() : entrySet().isEmpty(); } public int size() { return (fromStart && toEnd) ? m.size() : entrySet().size(); } public final boolean containsKey(Object key) { return inRange(key) && m.containsKey(key); } public final V put(K key, V value) { if (!inRange(key)) throw new IllegalArgumentException("key out of range"); return m.put(key, value); } public final V get(Object key) { return !inRange(key) ? null : m.get(key); } public final V remove(Object key) { return !inRange(key) ? null : m.remove(key); } public final Map.Entry ceilingEntry(K key) { return exportEntry(subCeiling(key)); } public final K ceilingKey(K key) { return keyOrNull(subCeiling(key)); } public final Map.Entry higherEntry(K key) { return exportEntry(subHigher(key)); } public final K higherKey(K key) { return keyOrNull(subHigher(key)); } public final Map.Entry floorEntry(K key) { return exportEntry(subFloor(key)); } public final K floorKey(K key) { return keyOrNull(subFloor(key)); } public final Map.Entry lowerEntry(K key) { return exportEntry(subLower(key)); } public final K lowerKey(K key) { return keyOrNull(subLower(key)); } public final K firstKey() { return key(subLowest()); } public final K lastKey() { return key(subHighest()); } public final Map.Entry firstEntry() { return exportEntry(subLowest()); } public final Map.Entry lastEntry() { return exportEntry(subHighest()); } public final Map.Entry pollFirstEntry() { TreeMap.Entry e = subLowest(); Map.Entry result = exportEntry(e); if (e != null) m.deleteEntry(e); return result; } public final Map.Entry pollLastEntry() { TreeMap.Entry e = subHighest(); Map.Entry result = exportEntry(e); if (e != null) m.deleteEntry(e); return result; } // Views transient NavigableMap descendingMapView; transient EntrySetView entrySetView; transient KeySet navigableKeySetView; public final NavigableSet navigableKeySet() { KeySet nksv = navigableKeySetView; return (nksv != null) ? nksv : (navigableKeySetView = new TreeMap.KeySet<>(this)); } public final Set keySet() { return navigableKeySet(); } public NavigableSet descendingKeySet() { return descendingMap().navigableKeySet(); } public final SortedMap subMap(K fromKey, K toKey) { return subMap(fromKey, true, toKey, false); } public final SortedMap headMap(K toKey) { return headMap(toKey, false); } public final SortedMap tailMap(K fromKey) { return tailMap(fromKey, true); } // View classes abstract class EntrySetView extends AbstractSet> { private transient int size = -1, sizeModCount; public int size() { if (fromStart && toEnd) return m.size(); if (size == -1 || sizeModCount != m.modCount) { sizeModCount = m.modCount; size = 0; Iterator i = iterator(); while (i.hasNext()) { size++; i.next(); } } return size; } public boolean isEmpty() { TreeMap.Entry n = absLowest(); return n == null || tooHigh(n.key); } public boolean contains(Object o) { if (!(o instanceof Map.Entry)) return false; Map.Entry entry = (Map.Entry) o; Object key = entry.getKey(); if (!inRange(key)) return false; TreeMap.Entry node = m.getEntry(key); return node != null && valEquals(node.getValue(), entry.getValue()); } public boolean remove(Object o) { if (!(o instanceof Map.Entry)) return false; Map.Entry entry = (Map.Entry) o; Object key = entry.getKey(); if (!inRange(key)) return false; TreeMap.Entry node = m.getEntry(key); if (node!=null && valEquals(node.getValue(), entry.getValue())) { m.deleteEntry(node); return true; } return false; } } /** * Iterators for SubMaps */ abstract class SubMapIterator implements Iterator { TreeMap.Entry lastReturned; TreeMap.Entry next; final Object fenceKey; int expectedModCount; SubMapIterator(TreeMap.Entry first, TreeMap.Entry fence) { expectedModCount = m.modCount; lastReturned = null; next = first; fenceKey = fence == null ? UNBOUNDED : fence.key; } public final boolean hasNext() { return next != null && next.key != fenceKey; } final TreeMap.Entry nextEntry() { TreeMap.Entry e = next; if (e == null || e.key == fenceKey) throw new NoSuchElementException(); if (m.modCount != expectedModCount) throw new ConcurrentModificationException(); next = successor(e); lastReturned = e; return e; } final TreeMap.Entry prevEntry() { TreeMap.Entry e = next; if (e == null || e.key == fenceKey) throw new NoSuchElementException(); if (m.modCount != expectedModCount) throw new ConcurrentModificationException(); next = predecessor(e); lastReturned = e; return e; } final void removeAscending() { if (lastReturned == null) throw new IllegalStateException(); if (m.modCount != expectedModCount) throw new ConcurrentModificationException(); // deleted entries are replaced by their successors if (lastReturned.left != null && lastReturned.right != null) next = lastReturned; m.deleteEntry(lastReturned); lastReturned = null; expectedModCount = m.modCount; } final void removeDescending() { if (lastReturned == null) throw new IllegalStateException(); if (m.modCount != expectedModCount) throw new ConcurrentModificationException(); m.deleteEntry(lastReturned); lastReturned = null; expectedModCount = m.modCount; } } final class SubMapEntryIterator extends SubMapIterator> { SubMapEntryIterator(TreeMap.Entry first, TreeMap.Entry fence) { super(first, fence); } public Map.Entry next() { return nextEntry(); } public void remove() { removeAscending(); } } final class DescendingSubMapEntryIterator extends SubMapIterator> { DescendingSubMapEntryIterator(TreeMap.Entry last, TreeMap.Entry fence) { super(last, fence); } public Map.Entry next() { return prevEntry(); } public void remove() { removeDescending(); } } // Implement minimal Spliterator as KeySpliterator backup final class SubMapKeyIterator extends SubMapIterator implements Spliterator { SubMapKeyIterator(TreeMap.Entry first, TreeMap.Entry fence) { super(first, fence); } public K next() { return nextEntry().key; } public void remove() { removeAscending(); } public Spliterator trySplit() { return null; } public void forEachRemaining(Consumer action) { while (hasNext()) action.accept(next()); } public boolean tryAdvance(Consumer action) { if (hasNext()) { action.accept(next()); return true; } return false; } public long estimateSize() { return Long.MAX_VALUE; } public int characteristics() { return Spliterator.DISTINCT | Spliterator.ORDERED | Spliterator.SORTED; } public final Comparator getComparator() { return NavigableSubMap.this.comparator(); } } final class DescendingSubMapKeyIterator extends SubMapIterator implements Spliterator { DescendingSubMapKeyIterator(TreeMap.Entry last, TreeMap.Entry fence) { super(last, fence); } public K next() { return prevEntry().key; } public void remove() { removeDescending(); } public Spliterator trySplit() { return null; } public void forEachRemaining(Consumer action) { while (hasNext()) action.accept(next()); } public boolean tryAdvance(Consumer action) { if (hasNext()) { action.accept(next()); return true; } return false; } public long estimateSize() { return Long.MAX_VALUE; } public int characteristics() { return Spliterator.DISTINCT | Spliterator.ORDERED; } } } /** * @serial include */ static final class AscendingSubMap extends NavigableSubMap { private static final long serialVersionUID = 912986545866124060L; AscendingSubMap(TreeMap m, boolean fromStart, K lo, boolean loInclusive, boolean toEnd, K hi, boolean hiInclusive) { super(m, fromStart, lo, loInclusive, toEnd, hi, hiInclusive); } public Comparator comparator() { return m.comparator(); } public NavigableMap subMap(K fromKey, boolean fromInclusive, K toKey, boolean toInclusive) { if (!inRange(fromKey, fromInclusive)) throw new IllegalArgumentException("fromKey out of range"); if (!inRange(toKey, toInclusive)) throw new IllegalArgumentException("toKey out of range"); return new AscendingSubMap<>(m, false, fromKey, fromInclusive, false, toKey, toInclusive); } public NavigableMap headMap(K toKey, boolean inclusive) { if (!inRange(toKey, inclusive)) throw new IllegalArgumentException("toKey out of range"); return new AscendingSubMap<>(m, fromStart, lo, loInclusive, false, toKey, inclusive); } public NavigableMap tailMap(K fromKey, boolean inclusive) { if (!inRange(fromKey, inclusive)) throw new IllegalArgumentException("fromKey out of range"); return new AscendingSubMap<>(m, false, fromKey, inclusive, toEnd, hi, hiInclusive); } public NavigableMap descendingMap() { NavigableMap mv = descendingMapView; return (mv != null) ? mv : (descendingMapView = new DescendingSubMap<>(m, fromStart, lo, loInclusive, toEnd, hi, hiInclusive)); } Iterator keyIterator() { return new SubMapKeyIterator(absLowest(), absHighFence()); } Spliterator keySpliterator() { return new SubMapKeyIterator(absLowest(), absHighFence()); } Iterator descendingKeyIterator() { return new DescendingSubMapKeyIterator(absHighest(), absLowFence()); } final class AscendingEntrySetView extends EntrySetView { public Iterator> iterator() { return new SubMapEntryIterator(absLowest(), absHighFence()); } } public Set> entrySet() { EntrySetView es = entrySetView; return (es != null) ? es : (entrySetView = new AscendingEntrySetView()); } TreeMap.Entry subLowest() { return absLowest(); } TreeMap.Entry subHighest() { return absHighest(); } TreeMap.Entry subCeiling(K key) { return absCeiling(key); } TreeMap.Entry subHigher(K key) { return absHigher(key); } TreeMap.Entry subFloor(K key) { return absFloor(key); } TreeMap.Entry subLower(K key) { return absLower(key); } } /** * @serial include */ static final class DescendingSubMap extends NavigableSubMap { private static final long serialVersionUID = 912986545866120460L; DescendingSubMap(TreeMap m, boolean fromStart, K lo, boolean loInclusive, boolean toEnd, K hi, boolean hiInclusive) { super(m, fromStart, lo, loInclusive, toEnd, hi, hiInclusive); } private final Comparator reverseComparator = Collections.reverseOrder(m.comparator); public Comparator comparator() { return reverseComparator; } public NavigableMap subMap(K fromKey, boolean fromInclusive, K toKey, boolean toInclusive) { if (!inRange(fromKey, fromInclusive)) throw new IllegalArgumentException("fromKey out of range"); if (!inRange(toKey, toInclusive)) throw new IllegalArgumentException("toKey out of range"); return new DescendingSubMap<>(m, false, toKey, toInclusive, false, fromKey, fromInclusive); } public NavigableMap headMap(K toKey, boolean inclusive) { if (!inRange(toKey, inclusive)) throw new IllegalArgumentException("toKey out of range"); return new DescendingSubMap<>(m, false, toKey, inclusive, toEnd, hi, hiInclusive); } public NavigableMap tailMap(K fromKey, boolean inclusive) { if (!inRange(fromKey, inclusive)) throw new IllegalArgumentException("fromKey out of range"); return new DescendingSubMap<>(m, fromStart, lo, loInclusive, false, fromKey, inclusive); } public NavigableMap descendingMap() { NavigableMap mv = descendingMapView; return (mv != null) ? mv : (descendingMapView = new AscendingSubMap<>(m, fromStart, lo, loInclusive, toEnd, hi, hiInclusive)); } Iterator keyIterator() { return new DescendingSubMapKeyIterator(absHighest(), absLowFence()); } Spliterator keySpliterator() { return new DescendingSubMapKeyIterator(absHighest(), absLowFence()); } Iterator descendingKeyIterator() { return new SubMapKeyIterator(absLowest(), absHighFence()); } final class DescendingEntrySetView extends EntrySetView { public Iterator> iterator() { return new DescendingSubMapEntryIterator(absHighest(), absLowFence()); } } public Set> entrySet() { EntrySetView es = entrySetView; return (es != null) ? es : (entrySetView = new DescendingEntrySetView()); } TreeMap.Entry subLowest() { return absHighest(); } TreeMap.Entry subHighest() { return absLowest(); } TreeMap.Entry subCeiling(K key) { return absFloor(key); } TreeMap.Entry subHigher(K key) { return absLower(key); } TreeMap.Entry subFloor(K key) { return absCeiling(key); } TreeMap.Entry subLower(K key) { return absHigher(key); } } /** * This class exists solely for the sake of serialization * compatibility with previous releases of TreeMap that did not * support NavigableMap. It translates an old-version SubMap into * a new-version AscendingSubMap. This class is never otherwise * used. * * @serial include */ private class SubMap extends AbstractMap implements SortedMap, java.io.Serializable { private static final long serialVersionUID = -6520786458950516097L; private boolean fromStart = false, toEnd = false; private K fromKey, toKey; private Object readResolve() { return new AscendingSubMap<>(TreeMap.this, fromStart, fromKey, true, toEnd, toKey, false); } public Set> entrySet() { throw new InternalError(); } public K lastKey() { throw new InternalError(); } public K firstKey() { throw new InternalError(); } public SortedMap subMap(K fromKey, K toKey) { throw new InternalError(); } public SortedMap headMap(K toKey) { throw new InternalError(); } public SortedMap tailMap(K fromKey) { throw new InternalError(); } public Comparator comparator() { throw new InternalError(); } } // Red-black mechanics protected static final boolean RED = false; protected static final boolean BLACK = true; protected static class Entry implements Map.Entry, Pinenut { K key; V value; Entry left; Entry right; Entry parent; boolean color = BLACK; Entry( K key, V value, Entry parent ) { this.key = key; this.value = value; this.parent = parent; } /** * Returns the key. * * @return the key */ public K getKey() { return key; } /** * Returns the value associated with the key. * * @return the value associated with the key */ public V getValue() { return value; } /** * Replaces the value currently associated with the key with the given * value. * * @return the value associated with the key before this method was * called */ public V setValue(V value) { V oldValue = this.value; this.value = value; return oldValue; } public boolean equals(Object o) { if (!(o instanceof Map.Entry)) return false; Map.Entry e = (Map.Entry)o; return valEquals(key,e.getKey()) && valEquals(value,e.getValue()); } @Override public int hashCode() { int keyHash = (key==null ? 0 : key.hashCode()); int valueHash = (value==null ? 0 : value.hashCode()); return keyHash ^ valueHash; } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return "{" + StringUtils.jsonQuote( this.key.toString() ) + ":" + JSON.stringify( this.value ) + "}"; } @Override public TypeIndex prototype() { return Prototype.typeid( this ); } } /** * Returns the first Entry in the TreeMap (according to the TreeMap's * key-sort function). Returns null if the TreeMap is empty. */ final Entry getFirstEntry() { Entry p = root; if (p != null) while (p.left != null) p = p.left; return p; } /** * Returns the last Entry in the TreeMap (according to the TreeMap's * key-sort function). Returns null if the TreeMap is empty. */ final Entry getLastEntry() { Entry p = root; if (p != null) while (p.right != null) p = p.right; return p; } /** * Returns the successor of the specified Entry, or null if no such. */ static TreeMap.Entry successor(Entry t) { if (t == null) return null; else if (t.right != null) { Entry p = t.right; while (p.left != null) p = p.left; return p; } else { Entry p = t.parent; Entry ch = t; while (p != null && ch == p.right) { ch = p; p = p.parent; } return p; } } /** * Returns the predecessor of the specified Entry, or null if no such. */ static Entry predecessor(Entry t) { if (t == null) return null; else if (t.left != null) { Entry p = t.left; while (p.right != null) p = p.right; return p; } else { Entry p = t.parent; Entry ch = t; while (p != null && ch == p.left) { ch = p; p = p.parent; } return p; } } /** * Balancing operations. * * Implementations of rebalancings during insertion and deletion are * slightly different than the CLR version. Rather than using dummy * nilnodes, we use a set of accessors that deal properly with null. They * are used to avoid messiness surrounding nullness checks in the main * algorithms. */ private static boolean colorOf(Entry p) { return (p == null ? BLACK : p.color); } private static Entry parentOf(Entry p) { return (p == null ? null: p.parent); } private static void setColor(Entry p, boolean c) { if (p != null) p.color = c; } private static Entry leftOf(Entry p) { return (p == null) ? null: p.left; } private static Entry rightOf(Entry p) { return (p == null) ? null: p.right; } /** From CLR */ private void rotateLeft(Entry p) { if (p != null) { Entry r = p.right; p.right = r.left; if (r.left != null) r.left.parent = p; r.parent = p.parent; if (p.parent == null) root = r; else if (p.parent.left == p) p.parent.left = r; else p.parent.right = r; r.left = p; p.parent = r; } } /** From CLR */ private void rotateRight(Entry p) { if (p != null) { Entry l = p.left; p.left = l.right; if (l.right != null) l.right.parent = p; l.parent = p.parent; if (p.parent == null) root = l; else if (p.parent.right == p) p.parent.right = l; else p.parent.left = l; l.right = p; p.parent = l; } } /** From CLR */ protected void fixAfterInsertion(Entry x) { x.color = RED; while (x != null && x != root && x.parent.color == RED) { if (parentOf(x) == leftOf(parentOf(parentOf(x)))) { Entry y = rightOf(parentOf(parentOf(x))); if (colorOf(y) == RED) { setColor(parentOf(x), BLACK); setColor(y, BLACK); setColor(parentOf(parentOf(x)), RED); x = parentOf(parentOf(x)); } else { if (x == rightOf(parentOf(x))) { x = parentOf(x); rotateLeft(x); } setColor(parentOf(x), BLACK); setColor(parentOf(parentOf(x)), RED); rotateRight(parentOf(parentOf(x))); } } else { Entry y = leftOf(parentOf(parentOf(x))); if (colorOf(y) == RED) { setColor(parentOf(x), BLACK); setColor(y, BLACK); setColor(parentOf(parentOf(x)), RED); x = parentOf(parentOf(x)); } else { if (x == leftOf(parentOf(x))) { x = parentOf(x); rotateRight(x); } setColor(parentOf(x), BLACK); setColor(parentOf(parentOf(x)), RED); rotateLeft(parentOf(parentOf(x))); } } } root.color = BLACK; } protected Entry onlyDeleteEntry( Entry p ) { this.modCount++; this.size--; // If strictly internal, copy successor's element to p and then make p // point to successor. if ( p.left != null && p.right != null ) { Entry s = successor(p); p.key = s.key; p.value = s.value; p = s; // Fuck this... The `p` could be replaced... bad for Linked-Opt... } // p has 2 children // Start fixup at replacement node, if it exists. Entry replacement = ( p.left != null ? p.left : p.right ); if ( replacement != null ) { // Link replacement to parent replacement.parent = p.parent; if ( p.parent == null ) { root = replacement; } else if ( p == p.parent.left ) { p.parent.left = replacement; } else { p.parent.right = replacement; } // Null out links so they are OK to use by fixAfterDeletion. p.left = p.right = p.parent = null; // Fix replacement if ( p.color == BLACK ) { this.fixAfterDeletion(replacement); } } else if ( p.parent == null ) { // return if we are the only node. this.root = null; } else { // No children. Use self as phantom replacement and unlink. if ( p.color == BLACK ) { this.fixAfterDeletion( p ); } if ( p.parent != null ) { if ( p == p.parent.left ) { p.parent.left = null; } else if ( p == p.parent.right ) { p.parent.right = null; } p.parent = null; } } return p; } protected void deleteEntry( Entry p ) { p = this.onlyDeleteEntry( p ); this.afterNodeRemoval( p ); } /** From CLR */ private void fixAfterDeletion(Entry x) { while (x != root && colorOf(x) == BLACK) { if (x == leftOf(parentOf(x))) { Entry sib = rightOf(parentOf(x)); if (colorOf(sib) == RED) { setColor(sib, BLACK); setColor(parentOf(x), RED); rotateLeft(parentOf(x)); sib = rightOf(parentOf(x)); } if (colorOf(leftOf(sib)) == BLACK && colorOf(rightOf(sib)) == BLACK) { setColor(sib, RED); x = parentOf(x); } else { if (colorOf(rightOf(sib)) == BLACK) { setColor(leftOf(sib), BLACK); setColor(sib, RED); rotateRight(sib); sib = rightOf(parentOf(x)); } setColor(sib, colorOf(parentOf(x))); setColor(parentOf(x), BLACK); setColor(rightOf(sib), BLACK); rotateLeft(parentOf(x)); x = root; } } else { // symmetric Entry sib = leftOf(parentOf(x)); if (colorOf(sib) == RED) { setColor(sib, BLACK); setColor(parentOf(x), RED); rotateRight(parentOf(x)); sib = leftOf(parentOf(x)); } if ( colorOf(rightOf(sib)) == BLACK && colorOf(leftOf(sib) ) == BLACK) { setColor(sib, RED); x = parentOf(x); } else { if (colorOf(leftOf(sib)) == BLACK) { setColor(rightOf(sib), BLACK); setColor(sib, RED); rotateLeft(sib); sib = leftOf(parentOf(x)); } setColor(sib, colorOf(parentOf(x))); setColor(parentOf(x), BLACK); setColor(leftOf(sib), BLACK); rotateRight(parentOf(x)); x = root; } } } setColor(x, BLACK); } private static final long serialVersionUID = 919286545866124006L; @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return JSON.stringify( this ); } protected void internalWriteEntries( ObjectOutputStream s ) throws IOException { for ( Map.Entry e : entrySet() ) { s.writeObject(e.getKey()); s.writeObject(e.getValue()); } } protected void internalReadEntries( int size, final ObjectInputStream s ) throws IOException, ClassNotFoundException { this.buildFromSorted( size, null, s, null ); } protected void writeObject( ObjectOutputStream s ) throws IOException { // Write out the Comparator and any hidden stuff s.defaultWriteObject(); // Write out size (number of Mappings) s.writeInt( this.size ); // Write out keys and values (alternating) this.internalWriteEntries( s ); } protected void readObject( final ObjectInputStream s ) throws IOException, ClassNotFoundException { // Read in the Comparator and any hidden stuff s.defaultReadObject(); // Read in size int size = s.readInt(); this.internalReadEntries( size, s ); } /** Intended to be called only from TreeSet.readObject */ void readTreeSet(int size, ObjectInputStream s, V defaultVal) throws IOException, ClassNotFoundException { buildFromSorted(size, null, s, defaultVal); } /** Intended to be called only from TreeSet.addAll */ void addAllForTreeSet(SortedSet set, V defaultVal) { try { buildFromSorted(set.size(), set.iterator(), null, defaultVal); } catch (IOException | ClassNotFoundException cannotHappen) { } } /** * Linear time tree building algorithm from sorted data. Can accept keys * and/or values from iterator or stream. This leads to too many * parameters, but seems better than alternatives. The four formats * that this method accepts are: * * 1) An iterator of Map.Entries. (it != null, defaultVal == null). * 2) An iterator of keys. (it != null, defaultVal != null). * 3) A stream of alternating serialized keys and values. * (it == null, defaultVal == null). * 4) A stream of serialized keys. (it == null, defaultVal != null). * * It is assumed that the comparator of the TreeMap is already set prior * to calling this method. * * @param size the number of keys (or key-value pairs) to be read from * the iterator or stream * @param it If non-null, new entries are created from entries * or keys read from this iterator. * @param str If non-null, new entries are created from keys and * possibly values read from this stream in serialized form. * Exactly one of it and str should be non-null. * @param defaultVal if non-null, this default value is used for * each value in the map. If null, each value is read from * iterator or stream, as described above. * @throws java.io.IOException propagated from stream reads. This cannot * occur if str is null. * @throws ClassNotFoundException propagated from readObject. * This cannot occur if str is null. */ private void buildFromSorted( int size, Iterator it, ObjectInputStream str, V defaultVal ) throws IOException, ClassNotFoundException { this.size = size; this.root = this.buildFromSorted( 0, 0, size-1, computeRedLevel(size), it, str, defaultVal ); } /** * Recursive "helper method" that does the real work of the * previous method. Identically named parameters have * identical definitions. Additional parameters are documented below. * It is assumed that the comparator and size fields of the TreeMap are * already set prior to calling this method. (It ignores both fields.) * * @param level the current level of tree. Initial call should be 0. * @param lo the first element index of this subtree. Initial should be 0. * @param hi the last element index of this subtree. Initial should be * size-1. * @param redLevel the level at which nodes should be red. * Must be equal to computeRedLevel for tree of this size. */ @SuppressWarnings("unchecked") private final Entry buildFromSorted( int level, int lo, int hi, int redLevel, Iterator it, ObjectInputStream str, V defaultVal ) throws IOException, ClassNotFoundException { /* * Strategy: The root is the middlemost element. To get to it, we * have to first recursively construct the entire left subtree, * so as to grab all of its elements. We can then proceed with right * subtree. * * The lo and hi arguments are the minimum and maximum * indices to pull out of the iterator or stream for current subtree. * They are not actually indexed, we just proceed sequentially, * ensuring that items are extracted in corresponding order. */ if (hi < lo) { return null; } int mid = (lo + hi) >>> 1; Entry left = null; if ( lo < mid ) { left = this.buildFromSorted(level+1, lo, mid - 1, redLevel, it, str, defaultVal); } // extract key and/or value from iterator or stream K key; V value; if ( it != null ) { if ( defaultVal == null ) { Map.Entry entry = (Map.Entry)it.next(); key = (K)entry.getKey(); value = (V)entry.getValue(); } else { key = (K)it.next(); value = defaultVal; } } else { // use stream key = (K) str.readObject(); value = (defaultVal != null ? defaultVal : (V) str.readObject()); } Entry middle = this.spawnNode( key, value, null ); // color nodes in non-full bottommost level red if ( level == redLevel ) { middle.color = RED; } if ( left != null ) { middle.left = left; left.parent = middle; } if ( mid < hi ) { Entry right = this.buildFromSorted(level+1, mid+1, hi, redLevel, it, str, defaultVal); middle.right = right; right.parent = middle; } return middle; } /** * Finds the level down to which to assign all nodes BLACK. This is the * last `full' level of the complete binary tree produced by buildTree. * The remaining nodes are colored RED. (This makes a `nice' set of * color assignments wrt future insertions.) This level number is * computed by finding the number of splits needed to reach the zeroeth * node. * * @param size the (non-negative) number of keys in the tree to be built */ private static int computeRedLevel(int size) { return 31 - Integer.numberOfLeadingZeros(size + 1); } /** * Currently, we support Spliterator-based versions only for the * full map, in either plain of descending form, otherwise relying * on defaults because size estimation for submaps would dominate * costs. The type tests needed to check these for key views are * not very nice but avoid disrupting existing class * structures. Callers must use plain default spliterators if this * returns null. */ static Spliterator keySpliteratorFor(NavigableMap m) { if (m instanceof TreeMap) { @SuppressWarnings("unchecked") TreeMap t = (TreeMap) m; return t.keySpliterator(); } if (m instanceof DescendingSubMap) { @SuppressWarnings("unchecked") DescendingSubMap dm = (DescendingSubMap) m; TreeMap tm = dm.m; if (dm == tm.descendingMap) { @SuppressWarnings("unchecked") TreeMap t = (TreeMap) tm; return t.descendingKeySpliterator(); } } @SuppressWarnings("unchecked") NavigableSubMap sm = (NavigableSubMap) m; return sm.keySpliterator(); } final Spliterator keySpliterator() { return new KeySpliterator<>(this, null, null, 0, -1, 0); } final Spliterator descendingKeySpliterator() { return new DescendingKeySpliterator<>(this, null, null, 0, -2, 0); } /** * Base class for spliterators. Iteration starts at a given * origin and continues up to but not including a given fence (or * null for end). At top-level, for ascending cases, the first * split uses the root as left-fence/right-origin. From there, * right-hand splits replace the current fence with its left * child, also serving as origin for the split-off spliterator. * Left-hands are symmetric. Descending versions place the origin * at the end and invert ascending split rules. This base class * is non-committal about directionality, or whether the top-level * spliterator covers the whole tree. This means that the actual * split mechanics are located in subclasses. Some of the subclass * trySplit methods are identical (except for return types), but * not nicely factorable. * * Currently, subclass versions exist only for the full map * (including descending keys via its descendingMap). Others are * possible but currently not worthwhile because submaps require * O(n) computations to determine size, which substantially limits * potential speed-ups of using custom Spliterators versus default * mechanics. * * To boostrap initialization, external constructors use * negative size estimates: -1 for ascend, -2 for descend. */ static class TreeMapSpliterator { final TreeMap tree; TreeMap.Entry current; // traverser; initially first node in range TreeMap.Entry fence; // one past last, or null int side; // 0: top, -1: is a left split, +1: right int est; // size estimate (exact only for top-level) int expectedModCount; // for CME checks TreeMapSpliterator(TreeMap tree, TreeMap.Entry origin, TreeMap.Entry fence, int side, int est, int expectedModCount) { this.tree = tree; this.current = origin; this.fence = fence; this.side = side; this.est = est; this.expectedModCount = expectedModCount; } final int getEstimate() { // force initialization int s; TreeMap t; if ((s = est) < 0) { if ((t = tree) != null) { current = (s == -1) ? t.getFirstEntry() : t.getLastEntry(); s = est = t.size; expectedModCount = t.modCount; } else s = est = 0; } return s; } public final long estimateSize() { return (long)getEstimate(); } } static final class KeySpliterator extends TreeMapSpliterator implements Spliterator { KeySpliterator(TreeMap tree, TreeMap.Entry origin, TreeMap.Entry fence, int side, int est, int expectedModCount) { super(tree, origin, fence, side, est, expectedModCount); } public KeySpliterator trySplit() { if (est < 0) getEstimate(); // force initialization int d = side; TreeMap.Entry e = current, f = fence, s = ((e == null || e == f) ? null : // empty (d == 0) ? tree.root : // was top (d > 0) ? e.right : // was right (d < 0 && f != null) ? f.left : // was left null); if (s != null && s != e && s != f && tree.compare(e.key, s.key) < 0) { // e not already past s side = 1; return new KeySpliterator<> (tree, e, current = s, -1, est >>>= 1, expectedModCount); } return null; } public void forEachRemaining(Consumer action) { if (action == null) throw new NullPointerException(); if (est < 0) getEstimate(); // force initialization TreeMap.Entry f = fence, e, p, pl; if ((e = current) != null && e != f) { current = f; // exhaust do { action.accept(e.key); if ((p = e.right) != null) { while ((pl = p.left) != null) p = pl; } else { while ((p = e.parent) != null && e == p.right) e = p; } } while ((e = p) != null && e != f); if (tree.modCount != expectedModCount) throw new ConcurrentModificationException(); } } public boolean tryAdvance(Consumer action) { TreeMap.Entry e; if (action == null) throw new NullPointerException(); if (est < 0) getEstimate(); // force initialization if ((e = current) == null || e == fence) return false; current = successor(e); action.accept(e.key); if (tree.modCount != expectedModCount) throw new ConcurrentModificationException(); return true; } public int characteristics() { return (side == 0 ? Spliterator.SIZED : 0) | Spliterator.DISTINCT | Spliterator.SORTED | Spliterator.ORDERED; } public final Comparator getComparator() { return tree.comparator; } } static final class DescendingKeySpliterator extends TreeMapSpliterator implements Spliterator { DescendingKeySpliterator(TreeMap tree, TreeMap.Entry origin, TreeMap.Entry fence, int side, int est, int expectedModCount) { super(tree, origin, fence, side, est, expectedModCount); } public DescendingKeySpliterator trySplit() { if (est < 0) getEstimate(); // force initialization int d = side; TreeMap.Entry e = current, f = fence, s = ((e == null || e == f) ? null : // empty (d == 0) ? tree.root : // was top (d < 0) ? e.left : // was left (d > 0 && f != null) ? f.right : // was right null); if (s != null && s != e && s != f && tree.compare(e.key, s.key) > 0) { // e not already past s side = 1; return new DescendingKeySpliterator<> (tree, e, current = s, -1, est >>>= 1, expectedModCount); } return null; } public void forEachRemaining(Consumer action) { if (action == null) throw new NullPointerException(); if (est < 0) getEstimate(); // force initialization TreeMap.Entry f = fence, e, p, pr; if ((e = current) != null && e != f) { current = f; // exhaust do { action.accept(e.key); if ((p = e.left) != null) { while ((pr = p.right) != null) p = pr; } else { while ((p = e.parent) != null && e == p.left) e = p; } } while ((e = p) != null && e != f); if (tree.modCount != expectedModCount) throw new ConcurrentModificationException(); } } public boolean tryAdvance(Consumer action) { TreeMap.Entry e; if (action == null) throw new NullPointerException(); if (est < 0) getEstimate(); // force initialization if ((e = current) == null || e == fence) return false; current = predecessor(e); action.accept(e.key); if (tree.modCount != expectedModCount) throw new ConcurrentModificationException(); return true; } public int characteristics() { return (side == 0 ? Spliterator.SIZED : 0) | Spliterator.DISTINCT | Spliterator.ORDERED; } } static final class ValueSpliterator extends TreeMapSpliterator implements Spliterator { ValueSpliterator(TreeMap tree, TreeMap.Entry origin, TreeMap.Entry fence, int side, int est, int expectedModCount) { super(tree, origin, fence, side, est, expectedModCount); } public ValueSpliterator trySplit() { if (est < 0) getEstimate(); // force initialization int d = side; TreeMap.Entry e = current, f = fence, s = ((e == null || e == f) ? null : // empty (d == 0) ? tree.root : // was top (d > 0) ? e.right : // was right (d < 0 && f != null) ? f.left : // was left null); if (s != null && s != e && s != f && tree.compare(e.key, s.key) < 0) { // e not already past s side = 1; return new ValueSpliterator<> (tree, e, current = s, -1, est >>>= 1, expectedModCount); } return null; } public void forEachRemaining(Consumer action) { if (action == null) throw new NullPointerException(); if (est < 0) getEstimate(); // force initialization TreeMap.Entry f = fence, e, p, pl; if ((e = current) != null && e != f) { current = f; // exhaust do { action.accept(e.value); if ((p = e.right) != null) { while ((pl = p.left) != null) p = pl; } else { while ((p = e.parent) != null && e == p.right) e = p; } } while ((e = p) != null && e != f); if (tree.modCount != expectedModCount) throw new ConcurrentModificationException(); } } public boolean tryAdvance(Consumer action) { TreeMap.Entry e; if (action == null) throw new NullPointerException(); if (est < 0) getEstimate(); // force initialization if ((e = current) == null || e == fence) return false; current = successor(e); action.accept(e.value); if (tree.modCount != expectedModCount) throw new ConcurrentModificationException(); return true; } public int characteristics() { return (side == 0 ? Spliterator.SIZED : 0) | Spliterator.ORDERED; } } static final class EntrySpliterator extends TreeMapSpliterator implements Spliterator> { EntrySpliterator(TreeMap tree, TreeMap.Entry origin, TreeMap.Entry fence, int side, int est, int expectedModCount) { super(tree, origin, fence, side, est, expectedModCount); } public EntrySpliterator trySplit() { if (est < 0) getEstimate(); // force initialization int d = side; TreeMap.Entry e = current, f = fence, s = ((e == null || e == f) ? null : // empty (d == 0) ? tree.root : // was top (d > 0) ? e.right : // was right (d < 0 && f != null) ? f.left : // was left null); if (s != null && s != e && s != f && tree.compare(e.key, s.key) < 0) { // e not already past s side = 1; return new EntrySpliterator<> (tree, e, current = s, -1, est >>>= 1, expectedModCount); } return null; } public void forEachRemaining(Consumer> action) { if (action == null) throw new NullPointerException(); if (est < 0) getEstimate(); // force initialization TreeMap.Entry f = fence, e, p, pl; if ((e = current) != null && e != f) { current = f; // exhaust do { action.accept(e); if ((p = e.right) != null) { while ((pl = p.left) != null) p = pl; } else { while ((p = e.parent) != null && e == p.right) e = p; } } while ((e = p) != null && e != f); if (tree.modCount != expectedModCount) throw new ConcurrentModificationException(); } } public boolean tryAdvance(Consumer> action) { TreeMap.Entry e; if (action == null) throw new NullPointerException(); if (est < 0) getEstimate(); // force initialization if ((e = current) == null || e == fence) return false; current = successor(e); action.accept(e); if (tree.modCount != expectedModCount) throw new ConcurrentModificationException(); return true; } public int characteristics() { return (side == 0 ? Spliterator.SIZED : 0) | Spliterator.DISTINCT | Spliterator.SORTED | Spliterator.ORDERED; } @Override public Comparator> getComparator() { // Adapt or create a key-based comparator if (tree.comparator != null) { return Map.Entry.comparingByKey(tree.comparator); } else { return (Comparator> & Serializable) (e1, e2) -> { @SuppressWarnings("unchecked") Comparable k1 = (Comparable) e1.getKey(); return k1.compareTo(e2.getKey()); }; } } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/UniScopeMap.java ================================================ package com.pinecone.framework.unit; import java.util.ArrayList; import java.util.List; import java.util.Map; public interface UniScopeMap extends ScopeMap { UniScopeMap parent(); Map thisScope(); UniScopeMap setParent ( UniScopeMap that ); UniScopeMap setThisScope ( Map that ); @Override default boolean isProgenitor() { return this.parent() == null; } @Override default void purge() { this.setParent( null ); this.clear(); } @Override default void depurate() { UniScopeMap p = this.parent(); while ( p != null ) { p.clear(); p = p.parent(); } this.clear(); } @Override default void overrideTo ( Map neo ) { neo.putAll( this.thisScope() ); UniScopeMap p = this.parent(); while ( p != null ) { Map pm = p.thisScope(); for( Map.Entry o : pm.entrySet() ) { neo.putIfAbsent( o.getKey(), o.getValue() ); } p = p.parent(); } } @Override default boolean isScopeEmpty () { boolean b = this.isEmpty(); if( b ) { UniScopeMap p = this.parent(); while ( p != null ) { b = p.isEmpty(); if( !b ) { return b; } p = p.parent(); } } return b; } @Override @SuppressWarnings("unchecked") default ScopeMap[] ancestors (){ ArrayList > l = new ArrayList<>(); ScopeTrees.groupByNodes( this, l ); return l.toArray( (ScopeMap[]) new UniScopeMap[0] ); } @Override @SuppressWarnings("unchecked") default ScopeMap[] scopes (){ ArrayList > l = new ArrayList<>(); l.add( this ); ScopeTrees.groupByNodes( this, l ); return l.toArray( (ScopeMap[]) new UniScopeMap[0] ); } default UniScopeMap progenitor () { if( this.parent() == null ) { return this; } else { UniScopeMap p = this.parent(); while ( p != null ) { p = p.parent(); } return p; } } @Override default ScopeMap getAll ( Object key, List ret ) { V v = this.thisScope().get( key ); if( v != null ) { ret.add( v ); } UniScopeMap p = this.parent(); while ( p != null ) { Map pm = p.thisScope(); v = pm.get( key ); if( v != null ) { ret.add( v ); } p = p.parent(); } return this; } @Override default ScopeMap removeAll ( Object key ) { this.thisScope().remove( key ); UniScopeMap p = this.parent(); while ( p != null ) { Map pm = p.thisScope(); pm.remove( key ); p = p.parent(); } return this; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/UniScopeMaptron.java ================================================ package com.pinecone.framework.unit; import com.pinecone.framework.system.prototype.PineUnit; import com.pinecone.framework.util.json.JSON; import java.io.Serializable; import java.util.*; public class UniScopeMaptron implements PineUnit, Map, UniScopeMap, Cloneable, Serializable, Iterable > { protected UniScopeMap mParent; // This is the [[prototype]] link, same as Javascript. protected Map mThisMap; protected transient Set > entrySet; protected transient Set scKeySet; protected transient Collection scValues; public UniScopeMaptron() { this( true, null ); } public UniScopeMaptron( Map thisMap, UniScopeMap prototype ){ this.mThisMap = thisMap; this.mParent = prototype; } public UniScopeMaptron( boolean bLinked, UniScopeMap prototype ){ this( bLinked ? new LinkedHashMap<>() : new HashMap<>(), prototype ); } public UniScopeMaptron( Map thisMap ){ this( thisMap, null ); } /** Scope Map **/ @Override public UniScopeMap parent() { return this.mParent; } @Override public Map thisScope(){ return this.mThisMap; } @Override public UniScopeMap setParent ( UniScopeMap that ) { this.mParent = that; return this; } @Override public UniScopeMap setThisScope ( Map that ) { this.mThisMap = that; return this; } @Override public ScopeMap elevate ( Map child ) { UniScopeMaptron sup = new UniScopeMaptron<>( this.mThisMap, this.mParent ); this.setThisScope( child ); this.setParent ( sup ); return this; } /** Basic Map **/ @Override public int size() { return this.mThisMap.size(); } @Override public boolean isEmpty() { return this.mThisMap.isEmpty(); } @Override public boolean containsKey( Object key ) { boolean result = this.mThisMap.containsKey(key); if ( !result && this.mParent != null ) { result = this.mParent.containsKey( key ); } return result; } @Override public boolean containsValue( Object value ) { boolean result = this.mThisMap.containsValue(value); if ( !result && this.mParent != null ) { result = this.mParent.containsValue( value ); } return result; } @Override public V get( Object key ) { V val = this.mThisMap.get( key ); if ( val == null && this.mParent != null ) { val = this.mParent.get( key ); } return val; } @Override public void putAll( Map m ) { this.mThisMap.putAll( m ); } public UniScopeMaptron xPutAll(Map m ) { this.putAll(m); return this; } @Override public void clear() { this.mThisMap.clear(); } public UniScopeMaptron xClear() { this.clear(); return this; } @Override public V remove( Object key ) { V v = this.mThisMap.remove(key); if( v == null && this.mParent != null ) { v = this.mParent.remove( key ); } return v; } public UniScopeMaptron xRemove(Object key) { this.remove(key); return this; } @Override public Set keySet() { return this.mThisMap.keySet(); } @Override public Collection values() { return this.mThisMap.values(); } @Override public Set > entrySet() { return this.mThisMap.entrySet(); } @Override public Iterator > iterator() { return this.mThisMap.entrySet().iterator(); } @Override public V put( K key, V value ) { return this.mThisMap.put( key, value ); } @Override public V putIfAbsent( K key, V value ) { return this.mThisMap.putIfAbsent( key, value ); } @Override public boolean hasOwnProperty ( Object key ) { return this.mThisMap.containsKey( key ); } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return JSON.stringify( this.mThisMap ); } @Override public Iterator > scopeIterator() { return new ScopeEntryIterator(); } @Override public Set > scopeEntrySet() { Set> es; return (es = this.entrySet) == null ? (this.entrySet = new ScopeEntrySet()) : es; } @Override public Set scopeKeySet() { Set ks = this.scKeySet; if ( ks == null ) { ks = new ScopeKeySet(); this.scKeySet = ks; } return ks; } @Override public Collection scopeValues() { Collection vs = this.scValues; if ( vs == null ) { vs = new ScopeValues(); this.scValues = vs; } return vs; } protected final class ScopeEntrySet extends AbstractSet > { public final int size() { throw new UnsupportedOperationException("Iterator only."); } public final void clear() { UniScopeMaptron.this.clear(); } public final Iterator > iterator() { return new ScopeEntryIterator(); } public final boolean contains( Object o ) { if ( !(o instanceof Map.Entry) ) { return false; } Map.Entry e = (Map.Entry) o; Object key = e.getKey(); V v = UniScopeMaptron.this.get(key); return v != null && v.equals(e.getValue()); } public final boolean remove( Object o ) { if ( this.contains(o) ) { Map.Entry e = (Map.Entry) o; Object key = e.getKey(); return UniScopeMaptron.this.remove(key) != null; } return false; } public final Spliterator> spliterator() { return Spliterators.spliterator( this, Spliterator.SIZED | Spliterator.ORDERED | Spliterator.DISTINCT ); } } protected abstract class ScopeIterator { protected Iterator> thisMapIterator; protected UniScopeMap currentScope; ScopeIterator() { this.thisMapIterator = mThisMap.entrySet().iterator(); this.currentScope = UniScopeMaptron.this; } public boolean hasNext() { while ( !this.thisMapIterator.hasNext() && this.currentScope.parent() != null ) { this.currentScope = this.currentScope.parent(); this.thisMapIterator = this.currentScope.thisScope().entrySet().iterator(); } return this.thisMapIterator.hasNext(); } protected Map.Entry nextNode() { if ( !this.hasNext() ) { throw new NoSuchElementException(); } return this.thisMapIterator.next(); } public void remove() { this.thisMapIterator.remove(); } } final class ScopeKeySet extends AbstractSet { public final int size() { throw new UnsupportedOperationException("Iterator only."); } public final void clear() { UniScopeMaptron.this.clear(); } public final Iterator iterator() { return new ScopeKeyIterator(); } public final boolean contains( Object o ) { return containsKey(o); } public final boolean remove( Object key ) { return UniScopeMaptron.this.remove(key) != null; } public final Spliterator spliterator() { return Spliterators.spliterator( this, Spliterator.SIZED | Spliterator.ORDERED | Spliterator.DISTINCT ); } } protected final class ScopeKeyIterator extends ScopeIterator implements Iterator { public final K next() { return nextNode().getKey(); } } final class ScopeValues extends AbstractCollection { public final int size() { throw new UnsupportedOperationException("Iterator only."); } public final void clear() { UniScopeMaptron.this.clear(); } public final Iterator iterator() { return new ScopeValueIterator(); } public final boolean contains( Object o ) { return containsValue(o); } public final Spliterator spliterator() { return Spliterators.spliterator( this, Spliterator.SIZED | Spliterator.ORDERED ); } } protected final class ScopeValueIterator extends ScopeIterator implements Iterator { public final V next() { return (V)nextNode().getValue(); } } protected final class ScopeEntryIterator extends ScopeIterator implements Iterator> { public final Map.Entry next() { return nextNode(); } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/Units.java ================================================ package com.pinecone.framework.unit; import com.pinecone.framework.system.prototype.ObjectiveEvaluator; import com.pinecone.framework.system.prototype.Objectom; import com.pinecone.framework.util.lang.DynamicFactory; import java.lang.reflect.Array; import java.lang.reflect.InvocationTargetException; import java.util.Collection; import java.util.Map; import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.TreeMap; @SuppressWarnings("unchecked") public final class Units { public final static List EmptyList = List.of(); public final static Collection EmptyCollection = Units.EmptyList; public final static Set EmptySet = Set.of(); public final static Map EmptyMap = Map.of(); public static Collection emptyCollection() { return Units.EmptyCollection; } public static List emptyList() { return Units.EmptyList; } public static Set emptySet() { return Units.EmptySet; } public static Map emptyMap() { return Units.EmptyMap; } public static Collection spawnExtendParent( Collection parent ) { return Units.spawnExtendParent( parent, ArrayList.class ); } public static Map spawnExtendParent( Map parent ) { return Units.spawnExtendParent( parent, TreeMap.class ); } @SuppressWarnings( "unchecked" ) public static C spawnExtendParent( Object parent, Class basic ) { Object subList = null; try{ subList = parent.getClass().getDeclaredConstructor().newInstance(); } catch ( IllegalAccessException | InstantiationException | NoSuchMethodException | InvocationTargetException e ) { try{ subList = basic.getDeclaredConstructor().newInstance(); } catch ( IllegalAccessException | InstantiationException | NoSuchMethodException | InvocationTargetException e1 ) { throw new IllegalArgumentException( "Illegal 'basic' class given.", e1 ); } } return (C)subList; } @SuppressWarnings( "unchecked" ) public static C newInstance( Class clazz, Object...args ) { Object subList = null; try{ if( args.length == 0 ) { subList = clazz.getDeclaredConstructor().newInstance(); } else { subList = DynamicFactory.DefaultFactory.newInstance( clazz, null, args ); } } catch ( IllegalAccessException | InstantiationException | NoSuchMethodException | InvocationTargetException e1 ) { throw new IllegalArgumentException( "Illegal 'class' class given.", e1 ); } return (C)subList; } /** * getFromMapStructure * Similar to other dynamic languages(e.g. Javascript/PHP/Python/etc.), which is using to retrieve the value from the potential gettable object. * @param mapLiked Any object that resembles the map operation (get/set/index/query/etc.) in form. * @param key The string key( number-fmt/string-key/etc. ) that uses to retrieve the value from the map-liked object. * @param bIncludeIterable if true, for iterable object will uses the enum-index as the key. * @param bIncludeAnyPotentialMapLiked if true, for other any potential map-liked objects will try get from bean-liked-object. * @return null for not found, object for the value which is affiliated to the key. */ public static Object getFromMapStructure ( Object mapLiked, String key, boolean bIncludeIterable, boolean bIncludeAnyPotentialMapLiked ) { if( mapLiked instanceof Map ) { return ((Map) mapLiked).get( key ); } else if( mapLiked instanceof Objectom ) { return ((Objectom) mapLiked).get( key ); } else if( mapLiked instanceof List ) { try{ return ((List) mapLiked).get( Integer.parseInt( key ) ); } catch ( NumberFormatException e ) { return null; } } else if( mapLiked.getClass().isArray() ) { try{ return Array.get( mapLiked, Integer.parseInt( key ) ); } catch ( NumberFormatException e ) { return null; } } else if( mapLiked instanceof Iterable && bIncludeIterable ) { try{ int k = Integer.parseInt( key ); int i = 0; for( Object v : (Iterable) mapLiked ) { if( i == k ) { return v; } ++i; } return null; } catch ( NumberFormatException e ) { return null; } } else if( mapLiked.getClass().isPrimitive() ) { return null; } else if( mapLiked.getClass().isEnum() ) { return null; } else if( mapLiked instanceof Number ) { return null; } else if( mapLiked instanceof String ) { return null; } if( bIncludeAnyPotentialMapLiked ) { return ObjectiveEvaluator.MapStructures.classGet( mapLiked, key ); } return null; } public static Object getValueFromMapStructureRecursively( Object mapLiked, String key, String szSplitRegex, boolean bIncludeIterable, boolean bIncludeAnyPotentialMapLiked ) { String[] keys = key.split( szSplitRegex ); Object value = mapLiked; for ( String k : keys ) { value = Units.getFromMapStructure( value, k, bIncludeIterable, bIncludeAnyPotentialMapLiked ); } return value; } public static Object getValueFromMapStructureRecursively( Object mapLiked, String key ) { return Units.getValueFromMapStructureRecursively( mapLiked, key, "\\.|\\/", true, true ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/affinity/DataSharer.java ================================================ package com.pinecone.framework.unit.affinity; import com.pinecone.framework.system.prototype.ObjectiveBean; import com.pinecone.framework.system.prototype.Objectom; import com.pinecone.framework.system.prototype.Pinenut; public interface DataSharer extends Pinenut { Object share ( Objectom that, boolean ignoreIfNoSetter ) ; default Object share( Objectom that ) { return this.share( that, true ); } default Object shareFromBean( Object that ) { return this.share( new ObjectiveBean( that ) ); } default Object share( Object that ) { return this.share( Objectom.wrap( that ) ); } static Objectom warp( Object target, boolean isBean ) { if( target instanceof Objectom ) { return (Objectom)target; } else { if( isBean ) { return new ObjectiveBean( target ); } else { return Objectom.wrap( target ); } } } static Object share( Objectom target, Objectom shared, boolean ignoreIfNoSetter ) { for ( Object key : shared.keys() ) { try{ target.set( key, shared.get( key ) ); } catch ( IllegalArgumentException e ) { if( !ignoreIfNoSetter ) { throw e; } } } return target.prototype().proto(); } static Object share( Objectom target, Objectom shared ) { return DataSharer.share( target, shared, true ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/affinity/GenericObjectomSharer.java ================================================ package com.pinecone.framework.unit.affinity; import com.pinecone.framework.system.prototype.ObjectiveBean; import com.pinecone.framework.system.prototype.Objectom; public class GenericObjectomSharer implements DataSharer { protected Objectom mWarped; public GenericObjectomSharer( Object target, boolean isBean ) { this.mWarped = DataSharer.warp( target, isBean ); } @Override public Object share( Objectom that, boolean ignoreIfNoSetter ) { return DataSharer.share( this.mWarped, that, ignoreIfNoSetter ); } @Override public Object share( Objectom that ) { return this.share( that, true ); } @Override public Object shareFromBean( Object that ) { return this.share( new ObjectiveBean( that ) ); } @Override public Object share( Object that ) { return this.share( Objectom.wrap( that ) ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/affinity/ObjectOverrider.java ================================================ package com.pinecone.framework.unit.affinity; import com.pinecone.framework.system.prototype.Pinenut; import java.util.List; import java.util.Map; public interface ObjectOverrider extends Pinenut { void override ( Object instance, Object prototype, boolean bRecursive ) ; void overrideObject ( Map instance, Map parentScope, boolean bRecursive ) ; void overrideList ( List instanceList, List templateList, boolean bRecursive ) ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/affinity/RecursiveUnitOverrider.java ================================================ package com.pinecone.framework.unit.affinity; import java.util.List; import java.util.Map; public class RecursiveUnitOverrider implements ObjectOverrider { public RecursiveUnitOverrider() { } @SuppressWarnings( "unchecked" ) public void override ( Object instance, Object prototype, boolean bRecursive ) { if ( instance != null && prototype != null ) { if ( instance instanceof Map && prototype instanceof Map ) { this.overrideObject( (Map) instance, (Map) prototype, bRecursive ); } else if ( instance instanceof List && prototype instanceof List ) { this.overrideList( (List) instance, (List) prototype, bRecursive ); } } } public void overrideObject ( Map instance, Map parentScope, boolean bRecursive ) { for ( Map.Entry kv : parentScope.entrySet() ) { K key = kv.getKey(); V templateValue = kv.getValue(); if ( !instance.containsKey( key ) ) { instance.put( key, templateValue ); } else { Object instanceValue = instance.get( key ); this.override( instanceValue, templateValue, bRecursive ); } } } public void overrideList ( List instanceList, List templateList, boolean bRecursive ) { for ( int i = 0; i < templateList.size(); ++i ) { V templateElement = templateList.get( i ); if ( i < instanceList.size() ) { V instanceElement = instanceList.get( i ); this.override( instanceElement, templateElement, bRecursive ); } else { instanceList.add( templateElement ); } } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/distinct/ArchBloomDistinctAudit.java ================================================ package com.pinecone.framework.unit.distinct; import com.pinecone.framework.unit.Units; import java.util.BitSet; import java.util.Iterator; import java.util.Set; import java.util.Map; import java.util.HashSet; import java.util.Collection; public abstract class ArchBloomDistinctAudit implements DistinctAudit { protected Collection > mIterators ; protected Collection > mIteratorsCopy ; protected int mBitSize ; protected Class mConflictMapType ; protected Collection mDistinctions ; protected DistinctType mDistinctType ; protected ArchBloomDistinctAudit( int bitSize, Collection > iterators, Collection > iteratorsCopy, Collection distinctions, Class conflictMapType, DistinctType distinctType ) { this.mBitSize = bitSize; this.mIterators = iterators; this.mIteratorsCopy = iteratorsCopy; this.mDistinctions = distinctions; this.mConflictMapType = conflictMapType; this.mDistinctType = distinctType; } protected Map > newConflictMap() { return Units.newInstance( this.mConflictMapType ); } @Override public boolean hasOwnElement( E element ) { return this.hasOwnElement( -1, element ); } protected abstract boolean hasOwnElement( int id, E element ); protected void filterFromIterator( int id, Iterator iterator ){ while ( iterator.hasNext() ) { E element = iterator.next(); boolean owned = this.hasOwnElement( id, element ); if ( this.mDistinctType == DistinctType.SymmetricDistinct && !owned ) { this.mDistinctions.add(element); } else if ( this.mDistinctType == DistinctType.SymmetricHomogeneity && owned ) { this.mDistinctions.add(element); } } } protected void addBitSet( Iterator iterator, BitSet bitset, Map > conflictMap ) { while ( iterator.hasNext() ) { E element = iterator.next(); int hash = element.hashCode(); int index = Math.abs( hash % this.mBitSize ); if ( bitset.get( index ) ) { conflictMap.computeIfAbsent(index, k -> new HashSet<>()).add( element ); } else { bitset.set( index ); conflictMap.computeIfAbsent(index, k -> new HashSet<>()).add( element ); } } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/distinct/DistinctAudit.java ================================================ package com.pinecone.framework.unit.distinct; import com.pinecone.framework.system.prototype.Pinenut; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; public interface DistinctAudit extends Pinenut { boolean hasOwnElement( E element ); Collection audit(); Collection audit( Iterator neoIter, Iterator neoIterCopy ); default Collection audit( Collection neo ) { return this.audit( neo.iterator(), neo.iterator() ); } static Collection> toIterators( Collection > collections ) { List> iterators = new ArrayList<>(); for ( Collection collection : collections ) { iterators.add(collection.iterator()); } return iterators; } static int getMaxSize( Collection> collections ) { int maxSize = 0; for ( Collection collection : collections ) { maxSize = Math.max( maxSize, collection.size() ); } return maxSize; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/distinct/DistinctType.java ================================================ package com.pinecone.framework.unit.distinct; public enum DistinctType { SymmetricDistinct ( "SymmetricDistinct" ), SymmetricHomogeneity ( "SymmetricHomogeneity" ); private final String value; DistinctType( String value ){ this.value = value; } public String getName(){ return this.value; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/distinct/GenericDistinctAudit.java ================================================ package com.pinecone.framework.unit.distinct; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.unit.Units; import java.util.Iterator; import java.util.Set; import java.util.Collection; import java.util.ArrayList; import java.util.HashSet; public class GenericDistinctAudit implements DistinctAudit { protected Collection > mIterators; protected Collection mDistinctions; protected DistinctType mDistinctType; protected Set mCommonElements; protected Set mDistinctElements; protected Collection mDuplicateElements; protected Class mSetType; protected Set newSet() { return Units.newInstance( this.mSetType ); } public GenericDistinctAudit( Collection> iterators, Collection distinctions, DistinctType distinctType, Class setType ) { this.mIterators = iterators; this.mDistinctions = distinctions; this.mDistinctType = distinctType; this.mSetType = setType; this.mCommonElements = this.newSet(); this.mDistinctElements = this.newSet(); if( !( this.mDistinctions instanceof Set ) ) { this.mDuplicateElements = new ArrayList<>(); } } public GenericDistinctAudit( Collection> iterators, Collection distinctions, DistinctType distinctType ) { this( iterators, distinctions, distinctType, HashSet.class ); } public GenericDistinctAudit( Collection> iterators, DistinctType distinctType ) { this( iterators, new ArrayList<>(), distinctType ); } public GenericDistinctAudit( Collection > collections, DistinctType distinctType, Collection distinctions, Class setType ) { this( DistinctAudit.toIterators(collections), distinctions, distinctType, setType ); } public GenericDistinctAudit( Collection > collections, DistinctType distinctType, Collection distinctions ) { this( DistinctAudit.toIterators(collections), distinctions, distinctType ); } public GenericDistinctAudit( DistinctType distinctType, Collection > collections ) { this( collections, distinctType, new ArrayList<>() ); } @Override public boolean hasOwnElement( E element ) { return this.mCommonElements.contains( element ); } protected void addInnerSet( Iterator iterator ) { Set currentSet = this.newSet(); while ( iterator.hasNext() ) { E elem = iterator.next(); if( currentSet.contains( elem ) ) { if( this.mDuplicateElements != null ) { this.mDuplicateElements.add( elem ); } continue; } else { currentSet.add( elem ); } if( this.mDistinctElements.contains( elem ) ) { this.mCommonElements.add( elem ); this.mDistinctElements.remove( elem ); } else if( !this.mCommonElements.contains( elem ) ){ this.mDistinctElements.add( elem ); } // else if( !this.mDistinctElements.contains( elem ) /*&& !this.mCommonElements.contains( elem )*/ ) { // this.mCommonElements.add( elem ); // } // else { // this.mDistinctElements.add( elem ); // } } } protected Collection applyInnerSetToDistinctions() { if( this.mDuplicateElements == null ) { if ( this.mDistinctType == DistinctType.SymmetricDistinct ) { return this.mDistinctElements; } else if ( this.mDistinctType == DistinctType.SymmetricHomogeneity ) { return this.mCommonElements; } } else { if ( this.mDistinctType == DistinctType.SymmetricDistinct ) { this.mDistinctions.addAll( this.mDistinctElements ); } else if ( this.mDistinctType == DistinctType.SymmetricHomogeneity ) { this.mDistinctions.addAll( this.mCommonElements ); } for( E e : this.mDuplicateElements ) { if ( this.mDistinctElements.contains( e ) ) { this.mDistinctions.add( e ); } else if ( this.mCommonElements.contains( e ) ) { this.mDistinctions.add( e ); } } } return this.mDistinctions; } @Override public Collection audit() { for ( Iterator iterator : this.mIterators ) { this.addInnerSet( iterator ); } return this.applyInnerSetToDistinctions(); } @Override public Collection audit( Iterator neoIter, @Nullable Iterator dummy ) { this.addInnerSet( neoIter ); this.mDistinctions.clear(); return this.applyInnerSetToDistinctions(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/distinct/GenericPrototypeDistinctAudit.java ================================================ package com.pinecone.framework.unit.distinct; import java.util.HashSet; import java.util.Iterator; import java.util.Collection; import java.util.ArrayList; public class GenericPrototypeDistinctAudit extends GenericDistinctAudit { protected Iterator mMasterProtoIterator; public GenericPrototypeDistinctAudit( Iterator masterProtoIterator, Collection> iterators, Collection distinctions, DistinctType distinctType, Class setType ) { super( iterators, distinctions, distinctType, setType ); this.mMasterProtoIterator = masterProtoIterator; while ( this.mMasterProtoIterator.hasNext() ) { E elem = this.mMasterProtoIterator.next(); this.mCommonElements.add( elem ); } } public GenericPrototypeDistinctAudit( Iterator masterProtoIterator, Collection> iterators, Collection distinctions, DistinctType distinctType ) { this( masterProtoIterator, iterators, distinctions, distinctType, HashSet.class ); } public GenericPrototypeDistinctAudit( Iterator masterProtoIterator, Collection> iterators, DistinctType distinctType ) { this( masterProtoIterator, iterators, new ArrayList<>(), distinctType ); } public GenericPrototypeDistinctAudit( Iterator masterProtoIterator, Collection > collections, DistinctType distinctType, Collection distinctions, Class setType ) { this( masterProtoIterator, DistinctAudit.toIterators(collections), distinctions, distinctType, setType ); } public GenericPrototypeDistinctAudit( Iterator masterProtoIterator, Collection > collections, DistinctType distinctType, Collection distinctions ) { this( masterProtoIterator, DistinctAudit.toIterators(collections), distinctions, distinctType ); } public GenericPrototypeDistinctAudit( Iterator masterProtoIterator, DistinctType distinctType, Collection > collections ) { this( masterProtoIterator, collections, distinctType, new ArrayList<>() ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/distinct/MegaBloomDistinctAudit.java ================================================ package com.pinecone.framework.unit.distinct; import java.util.BitSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.Map; import java.util.HashMap; import java.util.Collection; import java.util.ArrayList; public class MegaBloomDistinctAudit extends ArchBloomDistinctAudit implements DistinctAudit { protected List mBitSets = new ArrayList<>() ; protected List > > mConflictMaps = new ArrayList<>() ; /** * Constructs * * @param bitSize The size of the bit array used for the bloom filter. * @param iterators A collection of iterators whose elements will be compared against the master prototype. * @param iteratorsCopy A collection of iterators that serve as copies for auditing purposes. * @param distinctions A collection to store the resulting distinctions found during the audit. * @param distinctType The type of distinction to perform, either finding symmetric distinct elements or symmetric homogeneous elements. */ public MegaBloomDistinctAudit( int bitSize, Collection > iterators, Collection > iteratorsCopy, Collection distinctions, Class conflictMapType, DistinctType distinctType ) { super( bitSize, iterators, iteratorsCopy, distinctions, conflictMapType, distinctType ); } public MegaBloomDistinctAudit( int bitSize, Collection > iterators, Collection > iteratorsCopy, Collection distinctions, DistinctType distinctType ) { this( bitSize, iterators, iteratorsCopy, distinctions, HashMap.class, distinctType ); } public MegaBloomDistinctAudit( int bitSize, Collection > iterators, Collection > iteratorsCopy, DistinctType distinctType ) { this( bitSize, iterators, iteratorsCopy, new ArrayList<>(), distinctType ); } public MegaBloomDistinctAudit( Collection > iterators, Collection > iteratorsCopy, DistinctType distinctType ) { this( (int)1e6, iterators, iteratorsCopy, distinctType ); } public MegaBloomDistinctAudit( Collection > collections, DistinctType distinctType, Collection distinctions ) { this( (int)( DistinctAudit.getMaxSize( collections ) * (float)1.5 ), DistinctAudit.toIterators( collections ), DistinctAudit.toIterators( collections ), distinctions, HashMap.class, distinctType ); } public MegaBloomDistinctAudit( int bitSize, Collection > collections, DistinctType distinctType ) { this( bitSize, DistinctAudit.toIterators( collections ), DistinctAudit.toIterators( collections ), new ArrayList<>(), distinctType ); } public MegaBloomDistinctAudit( Collection > collections, DistinctType distinctType ) { this( collections, distinctType, new ArrayList<>() ); } @Override protected boolean hasOwnElement( int id, E element ) { int hash = element.hashCode(); int index = Math.abs(hash % this.mBitSize); boolean owned = false; for ( int j = 0; j < this.mIteratorsCopy.size(); ++j ) { if ( id < 0 || id != j ) { BitSet bitmap = this.mBitSets.get(j); Map > hashMap = this.mConflictMaps.get(j); if ( bitmap.get(index) && hashMap.containsKey(index) && hashMap.get(index).contains(element) ) { owned = true; break; } } } return owned; } @Override public Collection audit() { for ( int i = 0; i < this.mIterators.size(); ++i ) { this.mBitSets.add( new BitSet( this.mBitSize ) ); this.mConflictMaps.add( this.newConflictMap() ); } Iterator > iters = this.mIterators.iterator(); int i = 0; while ( iters.hasNext() ) { Iterator iterator = iters.next(); BitSet bitset = this.mBitSets.get(i); Map > conflictMap = this.mConflictMaps.get(i); this.addBitSet( iterator, bitset, conflictMap ); ++i; } iters = this.mIteratorsCopy.iterator(); i = 0; while ( iters.hasNext() ) { Iterator iterator = iters.next(); this.filterFromIterator( i, iterator ); ++i; } return this.mDistinctions; } @Override public Collection audit( Iterator neoIter, Iterator neoIterCopy ) { BitSet newBitset = new BitSet( this.mBitSize ); Map > newConflictMap = this.newConflictMap(); this.addBitSet( neoIter, newBitset, newConflictMap ); this.mBitSets.add( newBitset ); this.mConflictMaps.add( newConflictMap ); int id = this.mBitSets.size() - 1; while ( neoIterCopy.hasNext() ) { this.filterFromIterator( id, neoIterCopy ); } return this.mDistinctions; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/distinct/MegaMergeDistinctAudit.java ================================================ package com.pinecone.framework.unit.distinct; import com.pinecone.framework.system.NotImplementedException; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.unit.Units; import java.util.Iterator; import java.util.Set; import java.util.Collection; import java.util.HashSet; /** * MegaMergeDistinctAudit * These two iterators should each include unique elements. * @param */ public class MegaMergeDistinctAudit implements DistinctAudit { protected Iterator mIterator1; protected Iterator mIterator2; protected int mSegmentSize; protected Set mDistinctSet; protected Class mSetType; protected Set mResultSet; protected Set newSet() { return Units.newInstance( this.mSetType ); } protected Set newSet( Object...args ) { return Units.newInstance( this.mSetType, args ); } public MegaMergeDistinctAudit( Iterator iterator1, Iterator iterator2, int segmentSize, Class setType ) { this.mIterator1 = iterator1; this.mIterator2 = iterator2; this.mSegmentSize = segmentSize; this.mSetType = setType; this.mDistinctSet = this.newSet(); this.mResultSet = this.newSet(); } public MegaMergeDistinctAudit( Iterator iterator1, Iterator iterator2, int segmentSize ) { this( iterator1, iterator2, segmentSize, HashSet.class ); } @Override public Collection audit() { while ( this.mIterator1.hasNext() || this.mIterator2.hasNext() ) { Set segment1 = this.getNextSegment( this.mIterator1, this.mSegmentSize ); Set segment2 = this.getNextSegment( this.mIterator2, this.mSegmentSize ); Set processedSegment = this.xorSets( segment1, segment2 ); this.mResultSet = this.mergeResults( this.mResultSet, processedSegment ); } return this.mResultSet; } protected Set getNextSegment( Iterator iterator, int segmentSize ) { Set segment = this.newSet(); int count = 0; while ( iterator.hasNext() && count < segmentSize ) { segment.add( iterator.next() ); count++; } return segment; } protected Set xorSets( Set set1, Set set2 ) { Set result = this.newSet(set1); for ( E element : set2 ) { if ( !result.add(element) ) { result.remove(element); } } return result; } protected Set mergeResults( Set resultSet, Set processedSegment ) { return this.xorSets( resultSet, processedSegment ); } @Override public boolean hasOwnElement( E element ) { throw new NotImplementedException(); } @Override public Collection audit( Iterator neoIter, @Nullable Iterator dummy ) { throw new NotImplementedException(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/distinct/MegaPrototypeBloomDistinctAudit.java ================================================ package com.pinecone.framework.unit.distinct; import java.util.BitSet; import java.util.Iterator; import java.util.Set; import java.util.Map; import java.util.HashMap; import java.util.Collection; import java.util.ArrayList; public class MegaPrototypeBloomDistinctAudit extends ArchBloomDistinctAudit implements DistinctAudit { protected Iterator mMasterProtoIterator; protected BitSet mMasterBitSet; protected Map > mMasterConflictMap; public MegaPrototypeBloomDistinctAudit( int bitSize, Iterator masterProtoIterator, Collection> iterators, Collection> iteratorsCopy, Collection distinctions, Class conflictMapType, DistinctType distinctType ) { super( bitSize, iterators, iteratorsCopy, distinctions, conflictMapType, distinctType ); this.mMasterProtoIterator = masterProtoIterator; this.mMasterBitSet = new BitSet(bitSize); this.mMasterConflictMap = this.newConflictMap(); this.addBitSet( this.mMasterProtoIterator, this.mMasterBitSet, this.mMasterConflictMap ); } /** * Constructs * * @param bitSize The size of the bit array used for the bloom filter. * @param masterProtoIterator The master prototype iterator that will be used as the reference for comparison. * @param iterators A collection of iterators whose elements will be compared against the master prototype. * @param iteratorsCopy A collection of iterators that serve as copies for auditing purposes. * @param distinctions A collection to store the resulting distinctions found during the audit. * @param distinctType The type of distinction to perform, either finding symmetric distinct elements or symmetric homogeneous elements. */ public MegaPrototypeBloomDistinctAudit( int bitSize, Iterator masterProtoIterator, Collection> iterators, Collection> iteratorsCopy, Collection distinctions, DistinctType distinctType ) { this( bitSize, masterProtoIterator, iterators, iteratorsCopy, distinctions, HashMap.class, distinctType); } public MegaPrototypeBloomDistinctAudit( int bitSize, Iterator masterProtoIterator, Collection> iterators, Collection> iteratorsCopy, DistinctType distinctType ) { this( bitSize, masterProtoIterator, iterators, iteratorsCopy, new ArrayList<>(), distinctType); } public MegaPrototypeBloomDistinctAudit( Iterator masterProtoIterator, Collection> iterators, Collection> iteratorsCopy, DistinctType distinctType ) { this( (int) 1e6, masterProtoIterator, iterators, iteratorsCopy, distinctType ); } public MegaPrototypeBloomDistinctAudit( int bitSize, Iterator masterProtoIterator, Collection> collections, DistinctType distinctType ) { this( bitSize, masterProtoIterator, DistinctAudit.toIterators(collections), DistinctAudit.toIterators(collections), new ArrayList<>(), HashMap.class, distinctType); } public MegaPrototypeBloomDistinctAudit( int bitSize, Iterator masterProtoIterator, Collection> collections, DistinctType distinctType, Collection distinctions ) { this( bitSize, masterProtoIterator, DistinctAudit.toIterators(collections), DistinctAudit.toIterators(collections), distinctions, HashMap.class, distinctType); } public MegaPrototypeBloomDistinctAudit( Iterator masterProtoIterator, Collection> collections, DistinctType distinctType, Collection distinctions ) { this( (int) (DistinctAudit.getMaxSize(collections) * 1.5), masterProtoIterator, collections, distinctType, distinctions ); } public MegaPrototypeBloomDistinctAudit( Iterator masterProtoIterator, Collection> collections, DistinctType distinctType ) { this( (int) (DistinctAudit.getMaxSize(collections) * 1.5), masterProtoIterator, collections, distinctType ); } @Override protected boolean hasOwnElement( int id, E element ) { int hash = element.hashCode(); int index = Math.abs( hash % this.mBitSize ); return this.mMasterBitSet.get(index) && this.mMasterConflictMap.containsKey(index) && this.mMasterConflictMap.get(index).contains(element); } @Override public Collection audit() { Iterator > iters = this.mIteratorsCopy.iterator(); int i = 0; while ( iters.hasNext() ) { this.filterFromIterator( i, iters.next() ); ++i; } return this.mDistinctions; } @Override public Collection audit( Iterator neoIter, Iterator neoIterCopy ) { BitSet newBitset = new BitSet( this.mBitSize ); Map > newConflictMap = this.newConflictMap(); this.addBitSet( neoIter, newBitset, newConflictMap ); this.filterFromIterator( -1, neoIterCopy ); return this.mDistinctions; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/multi/MultiCollectionMap.java ================================================ package com.pinecone.framework.unit.multi; import java.util.Collection; public interface MultiCollectionMap extends MultiCollectionProxyMap > { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/multi/MultiCollectionMaptron.java ================================================ package com.pinecone.framework.unit.multi; import com.pinecone.framework.unit.MultiValueMaptron; import java.util.Collection; import java.util.LinkedHashMap; import java.util.ArrayList; import java.util.Map; import java.util.Set; public class MultiCollectionMaptron extends MultiValueMaptron > implements MultiCollectionMap { private static final long serialVersionUID = 1897280134591921341L; public MultiCollectionMaptron( int initialCapacity ) { this( new LinkedHashMap<>( initialCapacity ) ); } public MultiCollectionMaptron( Map > otherMap, boolean bAssimilate ) { super( otherMap, bAssimilate ); } public MultiCollectionMaptron( Map > otherMap ) { this( otherMap, false ); } public MultiCollectionMaptron() { this( new LinkedHashMap<>(), true ); } @Override protected Collection newCollection() { return new ArrayList<>(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/multi/MultiCollectionProxyMap.java ================================================ package com.pinecone.framework.unit.multi; import com.pinecone.framework.system.prototype.PineUnit; import com.pinecone.framework.unit.MultiValueMapper; import java.util.Map; import java.util.Collection; public interface MultiCollectionProxyMap > extends Map, MultiValueMapper { @Override default V erase( Object key, V value ) { Collection more = this.get( key ); if( more.size() == 1 ) { return this.remove( key ).iterator().next(); } if( more.remove( value ) ){ return value; } return null; } @Override default V get( Object k, V v ) { Collection more = this.get( k ); if( more.contains( v ) ){ return v; } return null; } @Override @SuppressWarnings( "unchecked" ) default Collection puts( K key, Collection value ){ return this.put( key, (U)value ); } @Override @SuppressWarnings( "unchecked" ) default void putsAll( Map > m ) { this.putAll( (Map) m ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/multi/MultiHashSetMaptron.java ================================================ package com.pinecone.framework.unit.multi; import com.pinecone.framework.unit.MultiValueMaptron; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.Set; import java.util.Map; public class MultiHashSetMaptron extends MultiValueMaptron > implements MultiSetMap { public MultiHashSetMaptron() { this( new LinkedHashMap<>() ); } public MultiHashSetMaptron( int initialCapacity ) { this( new LinkedHashMap<>( initialCapacity ) ); } public MultiHashSetMaptron( Map > otherMap, boolean bAssimilate ) { super( otherMap, bAssimilate ); } public MultiHashSetMaptron( Map > otherMap ) { this( otherMap, false ); } @Override protected Set newCollection() { return new LinkedHashSet<>(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/multi/MultiListMaptron.java ================================================ package com.pinecone.framework.unit.multi; import com.pinecone.framework.unit.MultiValueMaptron; import com.pinecone.framework.unit.MultiValueMap; import java.util.LinkedHashMap; import java.util.ArrayList; import java.util.List; import java.util.Map; public class MultiListMaptron extends MultiValueMaptron > implements MultiValueMap { private static final long serialVersionUID = 3801124242820219131L; public MultiListMaptron( Map > otherMap, boolean bAssimilate ) { super( otherMap, bAssimilate ); } public MultiListMaptron( Map > otherMap ) { this( otherMap, false ); } public MultiListMaptron( int initialCapacity ) { this( new LinkedHashMap<>( initialCapacity ) ); } public MultiListMaptron() { this( new LinkedHashMap<>() ); } @Override protected List newCollection() { return new ArrayList<>(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/multi/MultiSetMap.java ================================================ package com.pinecone.framework.unit.multi; import java.util.Set; public interface MultiSetMap extends MultiCollectionProxyMap > { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/multi/MultiSetMaptron.java ================================================ package com.pinecone.framework.unit.multi; import com.pinecone.framework.unit.LinkedTreeSet; import com.pinecone.framework.unit.MultiValueMaptron; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; public class MultiSetMaptron extends MultiValueMaptron > implements MultiSetMap { private static final long serialVersionUID = 1367280134591921341L; public MultiSetMaptron( Map > otherMap, boolean bAssimilate ) { super( otherMap, bAssimilate ); } public MultiSetMaptron( Map > otherMap ) { this( otherMap, false ); } public MultiSetMaptron( int initialCapacity ) { this( new LinkedHashMap<>( initialCapacity ) ); } public MultiSetMaptron() { this( new LinkedHashMap<>() ); } @Override protected Set newCollection() { return new LinkedTreeSet<>(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/CollectedEntryDecoder.java ================================================ package com.pinecone.framework.unit.tabulate; import com.pinecone.framework.system.prototype.Pinenut; import java.util.Collection; import java.util.Map; public interface CollectedEntryDecoder extends Pinenut { Map decode( Collection > collection ) ; Map evolve( Map regressed ) ; Class getListClass(); CollectedEntryDecoder setListClass( Class listClass ); Class getMapClass(); CollectedEntryDecoder setMapClass( Class mapClass ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/CollectedEntryEncoder.java ================================================ package com.pinecone.framework.unit.tabulate; import com.pinecone.framework.system.prototype.Pinenut; import java.util.Collection; import java.util.LinkedHashMap; import java.util.Map; public interface CollectedEntryEncoder extends Pinenut { Collection > encode(); // To single layer map. Map regress( Class stereotypedClass ); default Map regress() { return this.regress( LinkedHashMap.class ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/FamilyEntryNameEncoder.java ================================================ package com.pinecone.framework.unit.tabulate; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.name.Namespace; public interface FamilyEntryNameEncoder extends Pinenut { FamilyEntryNameEncoder DefaultEncoder = new GenericNamespaceFamilyEntryNameEncoder(); String getSeparator(); boolean isNameForValue(); default String encode( UnitFamilyNode node ) { return this.encode( node, this.getSeparator(), this.isNameForValue() ); } default String encode( UnitFamilyNode node, boolean bNameForValue ) { return this.encode( node, this.getSeparator(), bNameForValue ); } String encode( UnitFamilyNode node, String szSeparator, boolean bNameForValue ) ; default Namespace encodeNS( UnitFamilyNode node ) { return this.encodeNS( node, this.getSeparator(), this.isNameForValue() ); } default Namespace encodeNS( UnitFamilyNode node, boolean bNameForValue ) { return this.encodeNS( node, this.getSeparator(), bNameForValue ); } Namespace encodeNS( UnitFamilyNode node, String szSeparator, boolean bNameForValue ) ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/FamilyIterator.java ================================================ package com.pinecone.framework.unit.tabulate; import com.pinecone.framework.system.prototype.Pinenut; import java.util.Iterator; public interface FamilyIterator extends Iterator >, Pinenut { @Override UnitFamilyNode next(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/GenericCollectedEntryDecoder.java ================================================ package com.pinecone.framework.unit.tabulate; import com.pinecone.framework.unit.Units; import java.util.Collection; import java.util.LinkedHashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.LinkedHashSet; import java.util.List; import java.util.ArrayList; public class GenericCollectedEntryDecoder implements CollectedEntryDecoder { protected String mszSeparator; protected String mszListType; protected String mszSetType; protected String mszTypeIndicator; protected Class mListClass; protected Class mMapClass; public GenericCollectedEntryDecoder( String separator, String typeIndicator, String listType, String setType, Class listClass, Class mapClass ) { this.mszSeparator = separator; this.mszTypeIndicator = typeIndicator; this.mszListType = typeIndicator + listType; this.mszSetType = typeIndicator + setType; this.mListClass = listClass; this.mMapClass = mapClass; } public GenericCollectedEntryDecoder( String separator, String typeIndicator, String listType, String setType ) { this( separator, typeIndicator, listType, setType, ArrayList.class, LinkedHashMap.class ); } public GenericCollectedEntryDecoder( String separator, String typeIndicator ) { this( separator, typeIndicator, "list", "set" ); } public GenericCollectedEntryDecoder() { this( "::", "$" ); } @Override public Class getListClass() { return this.mListClass; } @Override public CollectedEntryDecoder setListClass( Class listClass ) { this.mListClass = listClass; return this; } @Override public Class getMapClass() { return this.mMapClass; } @Override public CollectedEntryDecoder setMapClass(Class mapClass) { this.mMapClass = mapClass; return this; } protected Map newMap() { return Units.newInstance( this.mMapClass ); } protected List newList() { return Units.newInstance( this.mListClass ); } @Override public Map decode( Collection > collection ) { Map result = this.newMap(); for ( Map.Entry entry : collection ) { Object key = entry.getKey(); V value = entry.getValue(); // Ignored parent-type case scenarios. this.addAndBootstrap( result, key, value ); } return result; } @Override public Map evolve( Map regressed ) { Map result = this.newMap(); for ( Map.Entry entry : regressed.entrySet() ) { Object key = entry.getKey(); V value = entry.getValue(); // Ignored parent-type case scenarios. this.addAndBootstrap( result, key, value ); } return result; } // Ignored parent-type case scenarios. // `V` is for unit`s elements type. @SuppressWarnings( "unchecked" ) protected void addAndBootstrap( Map result, Object key, V value ) { String szKey = key.toString(); String[] debris = szKey.split( this.mszSeparator ); Object current = result; for ( int i = 1; i < debris.length - 1; ++i ) { String part = debris[i]; if ( part.endsWith( this.mszListType ) ) { part = part.substring( 0, part.length() - this.mszListType.length() ); current = this.affirmListExists( current, part ); } else if ( part.endsWith( this.mszSetType ) ) { part = part.substring( 0, part.length() - this.mszSetType.length() ); current = this.affirmSetExists( current, part ); } else { current = this.affirmMapExists( current, part ); } } String lastPart = debris[ debris.length - 1 ]; if ( lastPart.endsWith( this.mszListType ) ) { lastPart = lastPart.substring( 0, lastPart.length() - this.mszListType.length() ); current = this.affirmListExists( current, lastPart ); ((List) current).add( value ); } else if ( lastPart.endsWith( this.mszSetType ) ) { lastPart = lastPart.substring( 0, lastPart.length() - this.mszSetType.length() ); current = this.affirmSetExists( current, lastPart ); ((Set) current).add( value ); } else { if ( current instanceof Map ) { ((Map) current).put( lastPart, value ); } else if ( current instanceof List ) { ((List) current).add( value ); } else if ( current instanceof Set ) { ((Set) current).add( value ); } } } protected Object affirmLastList( Collection collection, Object last ) { if ( collection.isEmpty() || !( last instanceof List ) ) { List neo = this.newList(); collection.add( neo ); return neo; } return last; } protected Object affirmListExists( Object current, String part ) { if ( current instanceof Map ) { Map map = (Map) current; if ( !map.containsKey(part) ) { List neo = this.newList(); map.put( part, neo ); return neo; } return map.get( part ); } else if ( current instanceof List ) { List list = (List) current; return this.affirmLastList( list, list.get( list.size() - 1 ) ); } else if ( current instanceof Set ) { Set set = (Set) current; return this.affirmLastList( set, this.setLastElement( set ) ); } return null; } protected Object setLastElement( Set that ) { Object lastElement = null; Iterator iterator = that.iterator(); while ( iterator.hasNext() ) { lastElement = iterator.next(); } return lastElement; } // Set must be Linked. protected Object affirmLastSet( Collection collection, Object last ) { if ( collection.isEmpty() || !( last instanceof Set ) ) { Set newSet = new LinkedHashSet<>(); collection.add( newSet ); return newSet; } return last; } protected Object affirmSetExists( Object current, String part ) { if ( current instanceof Map ) { Map map = (Map) current; if ( !map.containsKey( part ) ) { Set neo = new LinkedHashSet<>(); map.put( part, neo ); return neo; } return map.get( part ); } else if ( current instanceof List ) { List list = (List) current; return this.affirmLastSet( list, list.get( list.size() - 1 ) ); } else if ( current instanceof Set ) { Set set = (Set) current; return this.affirmLastSet( set, this.setLastElement( set ) ); } return null; } protected Object affirmLastMap( Collection collection, Object last ) { if ( collection.isEmpty() || !( last instanceof Map) ) { Map neo = this.newMap(); collection.add( neo ); return neo; } return last; } protected Object affirmMapExists( Object current, String part ) { if ( current instanceof Map ) { Map map = (Map) current; if ( !map.containsKey( part ) ) { Map neo = this.newMap(); map.put( part, neo ); return neo; } return map.get( part ); } else if ( current instanceof List ) { List list = (List) current; return this.affirmLastMap( list, list.get( list.size() - 1 ) ); } else if ( current instanceof Set ) { Set set = (Set) current; return this.affirmLastMap( set, this.setLastElement( set ) ); } return null; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/GenericCollectedEntryEncoder.java ================================================ package com.pinecone.framework.unit.tabulate; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.unit.Units; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashMap; import java.util.Map; public class GenericCollectedEntryEncoder implements CollectedEntryEncoder { protected FamilyIterator mFamilyIterator; protected FamilyEntryNameEncoder mFamilyEntryNameEncoder; protected Class > > mStereotypedClass; public GenericCollectedEntryEncoder( FamilyIterator iterator, FamilyEntryNameEncoder encoder, Class > > stereotypedClass ) { this.mFamilyIterator = iterator; this.mFamilyEntryNameEncoder = encoder; this.mStereotypedClass = stereotypedClass; } @SuppressWarnings( "unchecked" ) public GenericCollectedEntryEncoder( FamilyIterator iterator, FamilyEntryNameEncoder encoder ) { this( iterator, encoder, (Class) ArrayList.class ); } public GenericCollectedEntryEncoder( FamilyIterator iterator ) { this( iterator, new TypedNamespaceFamilyEntryNameEncoder( true ) ); } @Override public Collection > encode() { Collection > collection; try{ collection = Units.newInstance( this.mStereotypedClass ); } catch ( IllegalArgumentException e ) { collection = new ArrayList<>(); } while( this.mFamilyIterator.hasNext() ) { UnitFamilyNode node = this.mFamilyIterator.next(); String k = this.mFamilyEntryNameEncoder.encode( node ); collection.add( new KeyValue<>( k, node.getEntry().getValue() )); } return collection; } @Override public Map regress( Class stereotypedClass ) { Map map; try{ map = Units.newInstance( stereotypedClass ); } catch ( IllegalArgumentException e ) { map = new LinkedHashMap<>(); } while( this.mFamilyIterator.hasNext() ) { UnitFamilyNode node = this.mFamilyIterator.next(); String k = this.mFamilyEntryNameEncoder.encode( node ); map.put( k, node.getEntry().getValue() ); } return map; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/GenericNamespaceFamilyEntryNameEncoder.java ================================================ package com.pinecone.framework.unit.tabulate; import com.pinecone.framework.util.name.Namespace; import com.pinecone.framework.util.name.UniNamespace; public class GenericNamespaceFamilyEntryNameEncoder implements FamilyEntryNameEncoder { protected String mszSeparator; protected boolean mbNameForValue; public GenericNamespaceFamilyEntryNameEncoder( String szSeparator, boolean bNameForValue ) { this.mszSeparator = szSeparator; this.mbNameForValue = bNameForValue; } public GenericNamespaceFamilyEntryNameEncoder() { this( "::", false ); } @Override public String getSeparator() { return this.mszSeparator; } @Override public boolean isNameForValue() { return this.mbNameForValue; } @Override public String encode( UnitFamilyNode node ) { return this.encode( node, this.mszSeparator, this.mbNameForValue ); } @Override public String encode( UnitFamilyNode node, String szSeparator, boolean bNameForValue ) { if( node.getSelfKey() != null ) { StringBuilder sb = new StringBuilder( this.wrapGetCurrentNodeName( node ) ); UnitFamilyNode p = node.parent(); while ( p != null ) { Object k = this.wrapGetCurrentNodeName( p ); sb.insert(0, k + szSeparator ); p = p.parent(); } String sz = sb.toString(); if( bNameForValue ) { sz = sz + szSeparator + this.wrapGetCurrentEntryKey( node ); } return sz; } if( bNameForValue ) { return szSeparator + this.wrapGetCurrentEntryKey( node ); } return null; } @Override public Namespace encodeNS( UnitFamilyNode node ) { return this.encodeNS( node, this.mszSeparator, this.mbNameForValue ); } @Override public Namespace encodeNS( UnitFamilyNode node, String szSeparator, boolean bNameForValue ) { if( node.getSelfKey() != null ) { Namespace ns = new UniNamespace( this.wrapGetCurrentNodeName( node ), szSeparator ); UnitFamilyNode p = node.parent(); while ( p != null ) { Object k = this.wrapGetCurrentNodeName( p ); Namespace root_p = ns; while ( root_p.parent() != null ) { root_p = root_p.parent(); } root_p.setParent( new UniNamespace( k.toString(), szSeparator ) ); p = p.parent(); } if( bNameForValue ) { ns = new UniNamespace( this.wrapGetCurrentEntryKey( node ), ns, szSeparator ); } return ns; } if( bNameForValue ) { return new UniNamespace( this.wrapGetCurrentEntryKey( node ), new UniNamespace( "", szSeparator ), szSeparator ); } return null; } protected String transferName( String szBad ) { return szBad; } protected String wrapGetCurrentNodeName( UnitFamilyNode node ) { Object k = node.getSelfKey(); if( k == null ) { k = ""; } return this.transferName( k.toString() ); } protected String wrapGetCurrentEntryKey( UnitFamilyNode node ) { Object k = node.getEntry().getKey(); if( k == null ) { k = ""; } return this.transferName( k.toString() ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/RecursiveEntryIterator.java ================================================ package com.pinecone.framework.unit.tabulate; import com.pinecone.framework.unit.KeyValue; import java.util.Map; import java.util.Iterator; import java.util.Deque; import java.util.ArrayDeque; import java.util.Collection; import java.util.NoSuchElementException; public class RecursiveEntryIterator implements Iterator > { private Deque > mIterStack; private Deque mIndexStack; private DummyEntry mNextEntry; private boolean mbIncludeCollection; protected RecursiveEntryIterator ( boolean bIncludeCollection ) { this.mbIncludeCollection = bIncludeCollection; this.mIterStack = new ArrayDeque<>(); this.mIndexStack = new ArrayDeque<>(); this.mIndexStack.push( 0 ); } public RecursiveEntryIterator( Map map, boolean bIncludeCollection ) { this( bIncludeCollection ); this.mIterStack.push( map.entrySet().iterator() ); this.advance(); } public RecursiveEntryIterator( Map map ) { this( map, true ); } public RecursiveEntryIterator( Collection collection ) { this( true ); this.mIterStack.push( collection.iterator() ); this.advance(); } @SuppressWarnings( "unchecked" ) private void advance() { this.mNextEntry = null; while ( !this.mIterStack.isEmpty() ) { Iterator iterator = this.mIterStack.peek(); if ( iterator.hasNext() ) { Object next = iterator.next(); if ( next instanceof Map.Entry ) { Map.Entry entry = (Map.Entry) next; Object value = entry.getValue(); if ( value instanceof Map ) { this.mIterStack.push( ((Map) value).entrySet().iterator() ); this.mIndexStack.push(0); } else if ( value instanceof Collection && this.mbIncludeCollection ) { this.mIterStack.push( ((Collection) value).iterator() ); this.mIndexStack.push(0); } else { //this.mNextEntry = new KeyValue<>( entry.getKey(), value ); this.updateNextEntryCursor( entry.getKey(), (V)value ); this.updateIndex(); break; } } else if ( next instanceof Map ) { this.mIterStack.push( ((Map) next).entrySet().iterator() ); this.mIndexStack.push(0); } else if ( next instanceof Collection && this.mbIncludeCollection ) { this.mIterStack.push(((Collection) next).iterator()); this.mIndexStack.push(0); } else { //this.mNextEntry = new KeyValue<>( this.mIndexStack.peek(), next ); this.updateNextEntryCursor( this.mIndexStack.peek(), (V)next ); this.updateIndex(); break; } } else { this.mIterStack.pop(); this.mIndexStack.pop(); this.updateIndex(); } } } protected void updateNextEntryCursor( Object key, V value ) { if( this.mNextEntry == null ) { this.mNextEntry = new DummyEntry( key, value ); } this.mNextEntry.apply( key, value ); } protected void updateIndex() { if ( !this.mIndexStack.isEmpty() ) { int currentIndex = this.mIndexStack.pop(); this.mIndexStack.push( currentIndex + 1 ); } } @Override public boolean hasNext() { return this.mNextEntry != null; } @Override public Map.Entry next() { if ( this.mNextEntry == null ) { throw new NoSuchElementException(); } Map.Entry result = this.mNextEntry; this.advance(); return result; } class DummyEntry extends KeyValue { public DummyEntry( Object key, V value ) { super( key, value ); } public void setKey( Object key ) { this.key = key; } public void apply( Object key, V value ) { this.key = key; this.value = value; } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/RecursiveFamilyIterator.java ================================================ package com.pinecone.framework.unit.tabulate; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.StringUtils; import java.util.Map; import java.util.Iterator; import java.util.Deque; import java.util.ArrayDeque; import java.util.Collection; import java.util.NoSuchElementException; public class RecursiveFamilyIterator implements FamilyIterator { private Deque > mIterStack; private Deque mIndexStack; private Deque mParentKeyStack; private Deque mParentStack; private DummyFamilyNode mNextNode; private boolean mbIncludeCollection; protected RecursiveFamilyIterator( boolean bIncludeCollection ) { this.mbIncludeCollection = bIncludeCollection; this.mIterStack = new ArrayDeque<>(); this.mIndexStack = new ArrayDeque<>(); this.mParentStack = new ArrayDeque<>(); this.mParentKeyStack = new ArrayDeque<>(); this.mIndexStack.push(0); this.mParentStack.push( new DummyFamilyNode( null, null ) ); // Deque don't accept null, using dummy. this.mParentKeyStack.push( "" ); } public RecursiveFamilyIterator( Map map, boolean bIncludeCollection ) { this( bIncludeCollection ); this.mIterStack.push(map.entrySet().iterator()); this.advance(); } public RecursiveFamilyIterator( Map map ) { this( map, true ); } public RecursiveFamilyIterator( Collection collection ) { this( true ); this.mIterStack.push( collection.iterator() ); this.advance(); } @SuppressWarnings( "unchecked" ) private void advance() { Object selfKey = null; Object parentKey = null; this.mNextNode = null; while ( !this.mIterStack.isEmpty() ) { Iterator iterator = this.mIterStack.peek(); DummyFamilyNode parent = this.mParentStack.peek(); parentKey = this.mParentKeyStack.peek(); if( StringUtils.isEmpty( parentKey ) ) { parentKey = null; } if( parent != null ) { if( parent.getEntry() == null ) { parent = null; selfKey = null; } else { selfKey = parent.getEntry().getKey(); } } if ( iterator.hasNext() ) { Object next = iterator.next(); if ( next instanceof Map.Entry ) { Map.Entry entry = (Map.Entry) next; Object value = entry.getValue(); if ( value instanceof Map ) { this.mIterStack.push(((Map) value).entrySet().iterator()); this.mIndexStack.push(0); Object thisKey = entry.getKey(); this.mParentKeyStack.push( thisKey ); this.mParentStack.push( new DummyFamilyNode( parent, parentKey, thisKey, (V)entry.getValue() ) ); } else if ( value instanceof Collection && this.mbIncludeCollection ) { this.mIterStack.push(((Collection) value).iterator()); this.mIndexStack.push(0); Object thisKey = entry.getKey(); this.mParentKeyStack.push( thisKey ); this.mParentStack.push( new DummyFamilyNode( parent, parentKey, thisKey, (V)entry.getValue() ) ); } else { this.updateNextNodeCursor( parent, selfKey, entry.getKey(), (V)value ); this.updateIndex(); break; } } else if ( next instanceof Map ) { Object thisKey = this.mIndexStack.getFirst(); this.mParentKeyStack.push( thisKey ); this.mParentStack.push( new DummyFamilyNode( parent, parentKey, thisKey, (V)next ) ); this.mIterStack.push(((Map) next).entrySet().iterator()); this.mIndexStack.push(0); } else if ( next instanceof Collection && this.mbIncludeCollection ) { Object thisKey = this.mIndexStack.getFirst(); this.mParentKeyStack.push( thisKey ); this.mParentStack.push( new DummyFamilyNode( parent, parentKey, thisKey, (V)next ) ); this.mIterStack.push(((Collection) next).iterator()); this.mIndexStack.push(0); } else { this.updateNextNodeCursor( parent, selfKey, this.mIndexStack.peek(), (V)next ); this.updateIndex(); break; } } else { this.mIterStack.pop(); this.mIndexStack.pop(); this.mParentStack.pop(); this.mParentKeyStack.pop(); this.updateIndex(); } } } protected void updateNextNodeCursor( UnitFamilyNode parent, Object selfKey, Object key, V value ) { if ( this.mNextNode == null ) { this.mNextNode = new DummyFamilyNode( parent, selfKey, key, value ); } else { this.mNextNode.apply( parent, selfKey, key, value ); } } protected void updateIndex() { if ( !this.mIndexStack.isEmpty() ) { int currentIndex = this.mIndexStack.pop(); this.mIndexStack.push(currentIndex + 1); } } @Override public boolean hasNext() { return this.mNextNode != null; } @Override public UnitFamilyNode next() { if ( this.mNextNode == null ) { throw new NoSuchElementException(); } DummyFamilyNode result = this.mNextNode; this.advance(); return result; } class DummyEntry extends KeyValue { public DummyEntry( Object key, V value ) { super( key, value ); } public void setKey( Object key ) { this.key = key; } public void apply( Object key, V value ) { this.key = key; this.value = value; } } class DummyFamilyNode implements UnitFamilyNode { UnitFamilyNode parent; Object selfKey; DummyEntry entry; public DummyFamilyNode( UnitFamilyNode parent, Object selfKey ) { this.entry = null; this.parent = parent; this.selfKey = selfKey; } public DummyFamilyNode( UnitFamilyNode parent, Object selfKey, Object entryKey, V entryValue ) { this.entry = new DummyEntry( entryKey, entryValue ); this.parent = parent; this.selfKey = selfKey; } public void setKey( Object key ) { this.entry.setKey( key ); } public void apply( UnitFamilyNode parent, Object selfKey, Object key, V value ) { this.parent = parent; this.selfKey = selfKey; this.entry.setKey( key ); this.entry.setValue( value ); } @Override public UnitFamilyNode parent() { return this.parent; } @Override public Object getSelfKey() { return this.selfKey; } @Override public Map.Entry getEntry() { return this.entry; } @Override public String toString() { return this.toJSONString(); } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/TypedNamespaceFamilyEntryNameEncoder.java ================================================ package com.pinecone.framework.unit.tabulate; import java.util.List; import java.util.Set; public class TypedNamespaceFamilyEntryNameEncoder extends GenericNamespaceFamilyEntryNameEncoder { protected String mszFmtTypeLabel; public TypedNamespaceFamilyEntryNameEncoder( String szSeparator, boolean bNameForValue, String szFmtTypeLabel ) { super( szSeparator, bNameForValue ); this.mszFmtTypeLabel = szFmtTypeLabel; } public TypedNamespaceFamilyEntryNameEncoder( String szSeparator, boolean bNameForValue ) { this( szSeparator, bNameForValue, "$" ); } public TypedNamespaceFamilyEntryNameEncoder( boolean bNameForValue ) { this( "::", bNameForValue, "$" ); } public TypedNamespaceFamilyEntryNameEncoder( String szFmtTypeLabel ) { this( "::", false, szFmtTypeLabel ); } public TypedNamespaceFamilyEntryNameEncoder() { this( "::", false ); } @Override protected String transferName( String szBad ) { return szBad; // TODO } protected String queryType( Object val ) { if( val instanceof List ) { return "list"; } else if( val instanceof Set) { return "set"; } return ""; } @Override protected String wrapGetCurrentNodeName( UnitFamilyNode node ) { Object k = node.getSelfKey(); if( k == null ) { k = ""; } String szType = ""; if( node.parent() != null ) { szType = this.queryType( node.parent().getEntry().getValue() ); if( !szType.isEmpty() ) { szType = this.mszFmtTypeLabel + szType; } } return this.transferName( k.toString() + szType ); } @Override protected String wrapGetCurrentEntryKey( UnitFamilyNode node ) { Object k = node.getEntry().getKey(); if( k == null ) { k = ""; } String szType = this.queryType( node.getEntry().getValue() ); if( !szType.isEmpty() ) { szType = this.mszFmtTypeLabel + szType; } return this.transferName( k.toString() + szType ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/UnitFamilyNode.java ================================================ package com.pinecone.framework.unit.tabulate; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.json.JSONEncoder; import java.util.Map; public interface UnitFamilyNode extends Pinenut { UnitFamilyNode parent(); K getSelfKey(); Map.Entry getEntry(); default String namespacify( String szSeparator, boolean bNameForValue ) { return FamilyEntryNameEncoder.DefaultEncoder.encode( this, szSeparator, bNameForValue ); } default String namespacify( String szSeparator ) { return this.namespacify( szSeparator, false ); } default String namespacify( boolean bNameForValue ) { return this.namespacify( "::", bNameForValue ); } default String namespacify() { return this.namespacify( false ); } @Override default String toJSONString() { return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "class", this.className() ), new KeyValue<>( "key", this.getSelfKey() ), new KeyValue<>( "entry", this.getEntry() ) } ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/HeapTopper.java ================================================ package com.pinecone.framework.unit.top; import com.pinecone.framework.util.json.JSON; import java.util.AbstractCollection; import java.util.PriorityQueue; import java.util.Comparator; import java.util.Collection; import java.util.Iterator; public class HeapTopper extends AbstractCollection implements Topper { private int mnTopmost; private final PriorityQueue mHeap; private final Comparator mComparator; public HeapTopper( int nTopmost, Comparator comparator ) { this.mnTopmost = nTopmost; this.mComparator = comparator; this.mHeap = new PriorityQueue<>( nTopmost, comparator ); } public HeapTopper( int nTopmost ) { this( nTopmost, new Comparator() { @Override @SuppressWarnings( "unchecked" ) public int compare( E o1, E o2 ) { return ( (Comparable)o1 ).compareTo( o2 ); } } ); } @Override public int size() { return this.mHeap.size(); } @Override public boolean isEmpty() { return this.mHeap.isEmpty(); } @Override public void clear() { this.mHeap.clear(); } @Override public boolean add( E e ) { if ( this.mHeap.size() < this.mnTopmost ) { this.mHeap.offer(e); } else if ( this.mComparator.compare( e, this.mHeap.peek() ) > 0 ) { this.mHeap.poll(); this.mHeap.offer(e); } return true; } @Override public boolean addAll( Collection c ) { for( E e : c ) { this.add( e ); } return true; } @Override public boolean removeAll( Collection c ) { return this.mHeap.removeAll( c ); } @Override public boolean retainAll( Collection c ) { return this.mHeap.retainAll( c ); } @Override public boolean remove( Object o ) { return this.mHeap.remove( o ); } @Override public Collection topmost() { return this.mHeap; } @Override public Topper setTopmostSize( int nTopmost ) { this.mnTopmost = nTopmost; while ( this.mHeap.size() > nTopmost ) { this.mHeap.poll(); } return this; } @Override public int getTopmostSize() { return this.mnTopmost; } @Override public E nextEviction() { return this.mHeap.peek(); } @Override public boolean willAccept( E e ) { return this.mHeap.size() < this.mnTopmost || this.mComparator.compare( e, this.mHeap.peek() ) > 0; } @Override public boolean containsKey( Object key ) { if( key instanceof Number ) { int i = ((Number) key).intValue(); return this.getTopmostSize() > i; } return false; } @Override public boolean containsAll( Collection c ) { return this.mHeap.containsAll( c ); } @Override public boolean contains( Object o ) { return this.topmost().contains( o ); } @Override public boolean hasOwnProperty( Object elm ) { return this.contains( elm ); } @Override public String toJSONString() { return JSON.stringify( this ); } @Override public String toString() { return this.toJSONString(); } @Override public Iterator iterator() { return this.topmost().iterator(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/LinkedMultiTreeToptron.java ================================================ package com.pinecone.framework.unit.top; import com.pinecone.framework.unit.LinkedTreeMap; import java.util.Collection; import java.util.NavigableMap; import java.util.Set; public class LinkedMultiTreeToptron extends MultiTreeToptron { protected LinkedMultiTreeToptron( int nTopmost, NavigableMap > coreMap, MultiToptronValueAdapter valueAdapter, TopmostSelector > selector ) { super( nTopmost, coreMap, valueAdapter, selector ); } public LinkedMultiTreeToptron( int nTopmost, MultiToptronValueAdapter valueAdapter, TopmostSelector > selector, boolean accessOrder ) { this( nTopmost, new LinkedTreeMap<>( accessOrder ), valueAdapter, selector ); } public LinkedMultiTreeToptron( int nTopmost, TopmostSelector > selector, boolean accessOrder ) { this( nTopmost, MultiTreeToptron.newLinkdedHashValueAdapter(), selector, accessOrder ); } public LinkedMultiTreeToptron( int nTopmost, boolean accessOrder ) { this( nTopmost, MultiTreeToptron.newLinkdedHashValueAdapter(), TopmostSelector.newGenericGreatestSelector( false ), accessOrder ); } public LinkedMultiTreeToptron( int nTopmost ) { this( nTopmost, true ); } @Override public LinkedTreeMap > getMap() { return ( LinkedTreeMap > ) this.mTopNCoreMap; } @Override public Set > > bottomEntrySet(){ return this.getMap().treeEntrySet(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/LinkedTreeToptron.java ================================================ package com.pinecone.framework.unit.top; import com.pinecone.framework.unit.LinkedTreeMap; import java.util.Map; import java.util.NavigableMap; import java.util.Set; public class LinkedTreeToptron extends TreeToptron { protected LinkedTreeToptron( int nTopmost, NavigableMap map, TopmostSelector selector ) { super( nTopmost, map, selector ); } public LinkedTreeToptron( int nTopmost, TopmostSelector selector ) { this( nTopmost, new LinkedTreeMap<>( selector ), selector ); } public LinkedTreeToptron( int nTopmost, TopmostSelector selector, boolean accessOrder ) { this( nTopmost, new LinkedTreeMap<>( selector, accessOrder ), selector ); } public LinkedTreeToptron( int nTopmost, boolean accessOrder ) { this( nTopmost, new LinkedTreeMap<>( accessOrder ), TopmostSelector.newGenericGreatestSelector( false ) ); } public LinkedTreeToptron( int nTopmost ) { this( nTopmost, true ); } @Override public LinkedTreeMap getMap() { return ( LinkedTreeMap ) this.mTopNCoreMap; } @Override public Set > bottomEntrySet(){ return this.getMap().treeEntrySet(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/MultiToptronValueAdapter.java ================================================ package com.pinecone.framework.unit.top; import com.pinecone.framework.system.prototype.Pinenut; import java.util.Collection; public interface MultiToptronValueAdapter extends Pinenut { Collection newCollection(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/MultiTreeToptron.java ================================================ package com.pinecone.framework.unit.top; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.unit.MultiValueMapper; import com.pinecone.framework.unit.TreeMap; import com.pinecone.framework.unit.Units; import com.pinecone.framework.unit.multi.MultiCollectionMaptron; import java.util.Collection; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import java.util.NavigableMap; /** * Pinecone Ursus For Java, MultiTreeToptron: For dynamic top-N scenarios. * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * This tron has not wrapped `Collection` into Readonly mode. * WARNING, All methods which returns `Collection` should be operated-Readonly outside, otherwise the elementSize will got malfunction. * ***************************************************************************************** * @param The key, which should implements the comparable in principle. * @param The value */ public class MultiTreeToptron implements ToptronMultiMap { protected static MultiToptronValueAdapter newLinkdedHashValueAdapter() { return new MultiToptronValueAdapter() { @Override public Collection newCollection() { return new LinkedHashSet<>(); } }; } protected final NavigableMap > mTopNCoreMap; protected final MultiValueMapper mTopNMap; protected int mnTopmost; protected int mnElementSize; protected TopmostSelector > mSelector; protected MultiTreeToptron( int nTopmost, NavigableMap > coreMap, MultiToptronValueAdapter valueAdapter, TopmostSelector > selector ) { this.mTopNCoreMap = coreMap; this.mTopNMap = new MultiCollectionMaptron<>( this.mTopNCoreMap, true ){ @Override protected Collection newCollection() { return valueAdapter.newCollection(); } }; this.mnTopmost = nTopmost; this.mnElementSize = 0; this.mSelector = selector; } public MultiTreeToptron( int nTopmost, MultiToptronValueAdapter valueAdapter, TopmostSelector > selector ) { this( nTopmost, new TreeMap<>(), valueAdapter, selector ); } public MultiTreeToptron( int nTopmost, TopmostSelector > selector ) { this( nTopmost, MultiTreeToptron.newLinkdedHashValueAdapter(), selector ); } public MultiTreeToptron( int nTopmost ) { this( nTopmost, TopmostSelector.newGenericGreatestSelector( false ) ); } protected Map.Entry > getMostEntry() { return this.mSelector.getMostEntry( this.mTopNCoreMap ); } @Override public int getTopmostSize() { return this.mnTopmost; } protected void trim( int nNewTopmost ) { int det = this.mnTopmost - nNewTopmost; if( det > 0 ) { for ( int i = 0; i < det; ++i ) { Map.Entry > kv = this.getMostEntry(); this.erase( kv.getKey(), kv.getValue().iterator().next() ); } } } @Override public MultiTreeToptron setTopmostSize( int nTopmost ) { this.trim( nTopmost ); this.mnTopmost = nTopmost; return this; } @Override public K nextEvictionKey() { Map.Entry > preElimination = this.getMostEntry(); if( preElimination != null ) { return preElimination.getKey(); } return null; } // In this context, which means there are ONE-Single value will be inserted. @Override public boolean willAccept( K key ) { if ( this.size() >= this.mnTopmost ) { Collection c = this.mTopNCoreMap.get( key ); if ( c == null ) { Map.Entry > estEntry = this.getMostEntry(); return this.mSelector.selects( estEntry, key ); } } return true; } @Override public Map.Entry nextEviction() { Map.Entry > preElimination = this.getMostEntry(); if( preElimination != null ) { return new KeyValue<>( preElimination.getKey(), preElimination.getValue().iterator().next() ); } return null; } @Override public int size() { return this.mTopNMap.size(); } @Override public boolean isEmpty() { return this.mTopNMap.isEmpty(); } @Override public void clear() { this.mTopNMap.clear(); this.mnElementSize = 0; } @Override public V get( Object k, V v ) { return this.mTopNMap.get( k, v ); } @Override public Collection get( Object key ) { return this.mTopNMap.get( key ); } @Override public V getFirst( K key ) { return this.mTopNMap.getFirst( key ); } @Override public V add( K key, V value ) { if ( this.mnElementSize < this.mnTopmost ) { V v = this.mTopNMap.add( key, value ); if( v != null ) { ++this.mnElementSize; } return v; } else { Collection more = this.mTopNMap.get( key ); if ( more == null || !more.contains( value ) ) { Map.Entry > estEntry = this.getMostEntry(); if( this.mSelector.selects( estEntry, key ) ) { V oldestValue = estEntry.getValue().iterator().next(); K legacyKey = estEntry.getKey(); Collection legacy = this.mTopNMap.get( legacyKey ); legacy.remove( oldestValue ); if( legacy.isEmpty() ) { this.mTopNMap.remove( legacyKey ); } --this.mnElementSize; return this.add( key, value ); } } } return null; } @Override public V set( K k, V v ) { Collection legacy = this.mTopNMap.get( k ); if( legacy != null && !legacy.isEmpty() ) { if( this.mTopNMap.set( k, v ) != null ){ this.mnElementSize -= legacy.size(); ++this.mnElementSize; return v; } } return null; } @Override public void setAll( Map m ) { for( Map.Entry kv : m.entrySet() ) { this.add( kv.getKey(), kv.getValue() ); } } @Override public Collection put( K key, Collection values ) { Collection ret = Units.spawnExtendParent( values ); for( V v : values ) { if( this.add( key, v ) != null ) { ret.add( v ); } } if( ret.isEmpty() ) { return null; } return ret; } @Override public Collection putIfAbsent( K key, Collection value ) { if( !this.containsKey( key ) ) { return this.put( key, value ); } return null; } @Override public void putAll( Map > m ) { for( Map.Entry > kv : m.entrySet() ) { Collection c = kv.getValue(); for( V v : c ) { this.add( kv.getKey(), v ); } } } @Override public V erase( Object key, V value ) { Collection legacy = this.mTopNMap.get( key ); if ( legacy != null && legacy.contains( value ) ) { legacy.remove( value ); if ( legacy.isEmpty() ) { this.mTopNMap.remove( key ); } --this.mnElementSize; return value; } return null; } @Override @SuppressWarnings( "unchecked" ) public boolean remove( Object key, Object values ) { if( values instanceof Collection ) { Collection c = (Collection)values; boolean b = true; for( Object v : c ) { b = b & this.erase( key, (V)v ) != null; } return b; } return false; } @Override public Collection remove( Object key ) { Collection legacy = this.mTopNMap.remove( key ); this.mnElementSize -= legacy.size(); return legacy; } @Override public boolean containsKey( Object key ) { return this.mTopNMap.containsKey( key ); } @Override public boolean containsValue( Object value ) { return false; } public V update( K oldKey, K newKey, V value ) { this.remove( oldKey, value ); return this.add( newKey, value ); } public Collection update( K oldKey, K newKey ) { Collection legacy = this.mTopNMap.get( oldKey ); this.mTopNMap.remove( oldKey ); return this.mTopNMap.puts( newKey, legacy ); } public int elementSize() { return this.mnElementSize; } @Override public Set keySet() { return this.mTopNMap.keySet(); } @Override public Map toSingleValueMap() { return this.mTopNMap.toSingleValueMap(); } @Override public Collection > collection() { return this.mTopNMap.collection(); } @Override public Collection collectionValues() { return this.mTopNMap.collectionValues(); } @Override @SuppressWarnings( "unchecked" ) public Set > > entrySet() { return (Set > >)this.mTopNMap.entrySet(); } @Override @SuppressWarnings( "unchecked" ) public Collection > values() { return (Collection >)this.mTopNMap.values(); } public NavigableMap > getMap() { return this.mTopNCoreMap; } public Set > > topEntrySet(){ return this.getMap().descendingMap().entrySet(); } public Set > > bottomEntrySet(){ return this.getMap().entrySet(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/TopmostSelector.java ================================================ package com.pinecone.framework.unit.top; import com.pinecone.framework.system.prototype.Pinenut; import java.util.Comparator; import java.util.Map; import java.util.NavigableMap; public interface TopmostSelector extends Pinenut, Comparator { Map.Entry getMostEntry( NavigableMap map ); // Selecting candidate if it is meets qualification. default boolean selects ( Map.Entry most, Map.Entry candidate ) { return this.selects( most, candidate.getKey() ); } boolean selects ( Map.Entry most, K candidateKey ); @Override @SuppressWarnings("unchecked") default int compare( Object o1, Object o2 ) { return ( (Comparable)o1 ).compareTo( o2 ); } // Selecting greatest top-N elements static TopmostSelector newGenericGreatestSelector( boolean bInsertDirectly ) { return new TopmostSelector<>() { @Override public Map.Entry getMostEntry( NavigableMap map ) { return map.firstEntry(); } @Override @SuppressWarnings("unchecked") public boolean selects( Map.Entry most, K candidateKey ) { if( bInsertDirectly ) { return true; } return ( (Comparable)most.getKey() ).compareTo( candidateKey ) < 0; // most < candidate } }; } // Selecting smallest top-N elements static TopmostSelector newGenericSmallestSelector( boolean bInsertDirectly ) { return new TopmostSelector<>() { @Override public Map.Entry getMostEntry( NavigableMap map ) { return map.lastEntry(); } @Override @SuppressWarnings("unchecked") public boolean selects( Map.Entry most, K candidateKey ) { if( bInsertDirectly ) { return true; } return ( (Comparable)most.getKey() ).compareTo( candidateKey ) > 0; // most > candidate } }; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/Topper.java ================================================ package com.pinecone.framework.unit.top; import com.pinecone.framework.system.prototype.PineUnit; import java.util.Collection; import java.util.Iterator; public interface Topper extends PineUnit, Collection { @Override int size(); @Override boolean isEmpty(); @Override void clear(); @Override boolean add( E e ); Collection topmost(); Topper setTopmostSize( int nTopmost ); int getTopmostSize(); E nextEviction(); boolean willAccept( E e ); @Override default Iterator iterator() { return this.topmost().iterator(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/Toptron.java ================================================ package com.pinecone.framework.unit.top; import com.pinecone.framework.system.prototype.PineUnit; import java.util.NavigableMap; public interface Toptron extends PineUnit { int size(); boolean isEmpty(); void clear(); boolean containsKey( Object key ); boolean containsValue( Object val ); Object get( Object key ); V add( K key, V value ); Toptron setTopmostSize(int nTopmost ); int getTopmostSize(); NavigableMap getMap(); V update( K oldKey, K newKey, V value ) ; Object update( K oldKey, K newKey ) ; K nextEvictionKey(); boolean willAccept( K key ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/ToptronMap.java ================================================ package com.pinecone.framework.unit.top; import java.util.Map; import java.util.NavigableMap; public interface ToptronMap extends Map, Toptron { @Override V get( Object key ); @Override default V put( K key, V value ) { return this.add( key, value ); } @Override V update( K oldKey, K newKey ) ; @Override ToptronMap setTopmostSize( int nTopmost ); @Override NavigableMap getMap(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/ToptronMultiMap.java ================================================ package com.pinecone.framework.unit.top; import com.pinecone.framework.unit.multi.MultiCollectionMap; import java.util.Collection; import java.util.Map; import java.util.NavigableMap; public interface ToptronMultiMap extends MultiCollectionMap, Toptron { Collection get( Object key ); ToptronMultiMap setTopmostSize( int nTopmost ); NavigableMap > getMap(); @Override Collection update( K oldKey, K newKey ) ; Map.Entry nextEviction(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/TreeToptron.java ================================================ package com.pinecone.framework.unit.top; import com.pinecone.framework.util.json.JSON; import java.util.Collection; import java.util.Map; import java.util.TreeMap; import java.util.Set; import java.util.NavigableMap; public class TreeToptron implements ToptronMap { protected final NavigableMap mTopNCoreMap; protected int mnTopmost; protected TopmostSelector mSelector; public TreeToptron( int nTopmost, NavigableMap map, TopmostSelector selector ) { this.mnTopmost = nTopmost; this.mTopNCoreMap = map; this.mSelector = selector; } public TreeToptron( int nTopmost, TopmostSelector selector ) { this( nTopmost, new TreeMap<>( selector ), selector ); } public TreeToptron( int nTopmost ) { this( nTopmost, TopmostSelector.newGenericSmallestSelector( false ) ); } protected Map.Entry getMostEntry() { return this.mSelector.getMostEntry( this.mTopNCoreMap ); } protected void trim( int nNewTopmost ) { int det = this.mnTopmost - nNewTopmost; if( det > 0 ) { for ( int i = 0; i < det; ++i ) { this.remove( this.getMostEntry().getKey() ); } } } @Override public TreeToptron setTopmostSize( int nTopmost ) { this.trim( nTopmost ); this.mnTopmost = nTopmost; return this; } @Override public int getTopmostSize() { return this.mnTopmost; } @Override public int size() { return this.mTopNCoreMap.size(); } @Override public boolean isEmpty() { return this.mTopNCoreMap.isEmpty(); } @Override public void clear() { this.mTopNCoreMap.clear(); } @Override public boolean containsKey( Object key ) { return this.mTopNCoreMap.containsKey(key); } @Override public boolean containsValue( Object val ) { return this.mTopNCoreMap.containsValue(val); } @Override public V get( Object key ) { return this.mTopNCoreMap.get( key ); } @Override public void putAll( Map m ) { for( Map.Entry kv : m.entrySet() ) { this.put( kv.getKey(), kv.getValue() ); } } @Override public K nextEvictionKey() { Map.Entry preElimination = this.getMostEntry(); if( preElimination != null ) { return preElimination.getKey(); } return null; } @Override public boolean willAccept( K key ) { if ( this.size() >= this.mnTopmost ) { V v = this.mTopNCoreMap.get( key ); if ( v == null ) { Map.Entry estEntry = this.getMostEntry(); return this.mSelector.selects( estEntry, key ); } } return true; } @Override public V add( K key, V value ) { if ( this.size() < this.mnTopmost ) { return this.mTopNCoreMap.put( key, value ); } else { V v = this.mTopNCoreMap.get( key ); if ( v == null ) { Map.Entry estEntry = this.getMostEntry(); if( this.mSelector.selects( estEntry, key ) ) { this.mTopNCoreMap.remove( estEntry.getKey() ); return this.put( key, value ); } } return null; } } @Override public V putIfAbsent( K key, V value ) { if ( this.size() < this.mnTopmost ) { return this.mTopNCoreMap.putIfAbsent( key, value ); } else { V v = this.mTopNCoreMap.get( key ); if ( v == null ) { return this.put( key, value ); } return null; } } @Override public V remove( Object key ) { return this.mTopNCoreMap.remove( key ); } @Override public V update( K oldKey, K newKey, V value ) { this.remove( oldKey ); return this.put( newKey, value ); } @Override public V update( K oldKey, K newKey ) { V legacy = this.mTopNCoreMap.get( oldKey ); this.mTopNCoreMap.remove( oldKey ); return this.mTopNCoreMap.put( newKey, legacy ); } @Override public Set > entrySet() { return this.mTopNCoreMap.entrySet(); } @Override public Set keySet() { return this.mTopNCoreMap.keySet(); } @Override public Collection values() { return this.mTopNCoreMap.values(); } @Override public NavigableMap getMap() { return this.mTopNCoreMap; } public Set > topEntrySet(){ return this.getMap().descendingMap().entrySet(); } public Set > bottomEntrySet(){ return this.getMap().entrySet(); } @Override public String toJSONString() { return JSON.stringify( this ); } @Override public String toString() { return this.toJSONString(); } @Override public boolean hasOwnProperty( Object key ) { return this.containsKey( key ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/AbstractTrieMap.java ================================================ package com.pinecone.framework.unit.trie; public abstract class AbstractTrieMap implements TrieMap { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/ArchTrieNode.java ================================================ package com.pinecone.framework.unit.trie; public abstract class ArchTrieNode implements TrieNode { protected String mszKey; protected TrieNode mParent; protected TrieMap mTrieMap; @SuppressWarnings( "unchecked" ) public ArchTrieNode( String szKey, TrieNode parent, TrieMap map ) { this.mParent = parent; this.mTrieMap = (TrieMap) map; this.mszKey = szKey; } @Override public boolean isLeaf() { return false; } @Override public String getNodeName() { return this.mszKey; } @Override public String getNamespace() { TrieNode p = this.mParent; StringBuilder sb = new StringBuilder(); String separator = this.getTrieMap().getSeparator(); if( p == null || p.parent() == null ) { return null; } while ( true ) { if ( sb.length() > 0 ) { sb.insert( 0, separator ); } sb.insert(0, p.getNodeName()); p = p.parent(); if( p == null || p.parent() == null ) { break; } } return sb.toString(); } @Override public String getFullName() { String ns = this.getNamespace(); if( ns != null ) { return ns + this.getTrieMap().getSeparator() + this.getNodeName(); } return this.getNodeName(); } @Override public TrieNode parent() { return this.mParent; } @Override public TrieMap getTrieMap() { return this.mTrieMap; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/DirectoryNode.java ================================================ package com.pinecone.framework.unit.trie; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; public interface DirectoryNode extends TrieNode { Map > children(); Map > segmentMap(); default DirectoryNode getDirectory( String szSegName ) { TrieNode n = this.get( szSegName ); if( n != null ) { return n.evinceDirectory(); } return null; } default ValueNode getValue( String szSegName ) { TrieNode n = this.get( szSegName ); if( n != null ) { return n.evinceValue(); } return null; } default ReparseNode getReparse( String szSegName ) { TrieNode n = this.get( szSegName ); if( n != null ) { return n.evinceReparse(); } return null; } TrieNode get( String szSegName ); void put( String szSegName, TrieNode node ); void putIfAbsent( String szSegName, TrieNode node ); TrieNode remove( String szSegName ); boolean isEmpty(); int size(); int childrenLeafSize(); void purge(); Set> > entrySet(); default Collection > values() { return this.children().values(); } default Set keySet(){ return this.children().keySet(); } default Collection > listItems() { return this.values(); } List > listValueNodes(); List listValues(); List > listDirectories(); @Override default DirectoryNode evinceDirectory() { return this; } @Override default String getTypeName() { return DirectoryNode.class.getSimpleName(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/GenericDirectoryNode.java ================================================ package com.pinecone.framework.unit.trie; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.json.JSONEncoder; public class GenericDirectoryNode extends ArchTrieNode implements DirectoryNode { protected Map > mChildren; /** * Root constructor. */ public GenericDirectoryNode( Map > children, TrieMap map ) { this( null, children, null, map ); } public GenericDirectoryNode( String szKey, Map > children, TrieNode parent, TrieMap map ) { super( szKey, parent, map ); this.mChildren = children; } @Override public int childrenLeafSize() { int leafCount = 0; for ( TrieNode child : this.mChildren.values() ) { DirectoryNode directoryNode = child.evinceDirectory(); if ( directoryNode != null ) { leafCount += directoryNode.childrenLeafSize(); } else { if ( child.isLeaf() ) { ++leafCount; } } } return leafCount; } @Override public Map > children() { return this.mChildren; } @Override public Map > segmentMap() { return this.mChildren; } @Override public TrieNode get( String szSegName ) { return this.mChildren.get( szSegName ); } @Override public void put( String szSegName, TrieNode node ) { this.mChildren.put( szSegName, node ); } @Override public void putIfAbsent( String szSegName, TrieNode node ) { this.mChildren.putIfAbsent( szSegName, node ); } protected void notifyMapChildrenEliminated( int nFatalities ) { //( (UniTrieMaptron) this.mTrieMap ).notifyChildrenEliminated( nFatalities ); } @Override public TrieNode remove( String szSegName ) { // int nFatalities = 1; // DirectoryNode childDir = this.getDirectory( szSegName ); // if( childDir != null ) { // nFatalities = childDir.childrenLeafSize(); // } // TrieNode legacy = this.mChildren.remove( szSegName ); // this.notifyMapChildrenEliminated( nFatalities ); // Cascading leafs. // return legacy; return this.mChildren.remove( szSegName ); } @Override public boolean isEmpty() { return this.mChildren.isEmpty(); } @Override public int size() { return this.mChildren.size(); } @Override public void purge() { //int nFatalities = this.childrenLeafSize(); this.mChildren.clear(); //this.notifyMapChildrenEliminated( nFatalities ); } @Override public Set> > entrySet() { return this.mChildren.entrySet(); } @Override public List> listValueNodes() { List> list = new ArrayList<>(); Collection > trieNodes = this.values(); for( TrieNode node : trieNodes ) { ValueNode vn = node.evinceValue(); if( vn != null ) { list.add( vn ); } } return list; } @Override public List listValues() { List list = new ArrayList<>(); Collection > trieNodes = this.values(); for( TrieNode node : trieNodes ) { ValueNode vn = node.evinceValue(); if( vn != null ) { list.add( vn.getValue() ); } } return list; } @Override public List> listDirectories() { List> list = new ArrayList<>(); Collection > trieNodes = this.values(); for( TrieNode node : trieNodes ) { DirectoryNode dir = node.evinceDirectory(); if( dir != null ) { list.add( dir ); } } return list; } @Override public String toJSONString() { return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "FullName" , this.getFullName() ), new KeyValue<>( "Type" , ReparseNode.class.getSimpleName() ), new KeyValue<>( "ChildrenSize" , this.size() ) } ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/GenericReparseNode.java ================================================ package com.pinecone.framework.unit.trie; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.json.JSONEncoder; public class GenericReparseNode extends ArchTrieNode implements ReparseNode { protected String mszReparsePointer ; public GenericReparseNode( String szKey, TrieNode parent, String szReparsePointer, TrieMap trieMap ) { super( szKey, parent, trieMap ); this.mszReparsePointer = szReparsePointer; } @Override public String getReparsePointer() { return mszReparsePointer; } @Override public void setReparsePointer( String path ) { this.mszReparsePointer = path; } @Override public TrieNode reparse() { String szReparsePointer = this.mszReparsePointer; while ( true ) { TrieNode revealed = this.getTrieMap().queryNode( szReparsePointer ); if( revealed != null ) { ReparseNode reparsed = revealed.evinceReparse(); if( reparsed != null ) { szReparsePointer = reparsed.getReparsePointer(); continue; } return revealed; } else { return null; } } } @Override public boolean isLeaf() { return true; } @Override public String toString() { TrieNode revealed = this.reparse(); if( revealed != null ) { return revealed.toString(); } return null; } @Override public String toJSONString() { return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "FullName" , this.getFullName() ), new KeyValue<>( "Type" , ReparseNode.class.getSimpleName() ), new KeyValue<>( "ReparsePoint" , this.getReparsePointer() ) } ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/GenericValueNode.java ================================================ package com.pinecone.framework.unit.trie; import com.pinecone.framework.util.json.JSON; public class GenericValueNode extends ArchTrieNode implements ValueNode { protected V value; public GenericValueNode( String szKey, V value, TrieNode parent, TrieMap map ) { super( szKey, parent, map ); this.value = value; } @Override public V getValue() { return this.value; } @Override public void setValue( V value ) { this.value = value; } @Override public boolean isLeaf() { return true; } @Override public String toString() { return this.value.toString(); } @Override public String toJSONString() { return JSON.stringify( this.value ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/IllegalOperationException.java ================================================ package com.pinecone.framework.unit.trie; import com.pinecone.framework.system.PineRuntimeException; public class IllegalOperationException extends PineRuntimeException { public IllegalOperationException () { super(); } public IllegalOperationException ( String message ) { super(message); } public IllegalOperationException ( String message, Throwable cause ) { super(message, cause); } public IllegalOperationException ( Throwable cause ) { super(cause); } protected IllegalOperationException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) { super( message, cause, enableSuppression, writableStackTrace ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/ReparseNode.java ================================================ package com.pinecone.framework.unit.trie; public interface ReparseNode extends TrieNode { String getReparsePointer(); void setReparsePointer( String path ); TrieNode reparse(); @Override default ReparseNode evinceReparse() { return this; } @Override default String getTypeName() { return ReparseNode.class.getSimpleName(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/SeparatedSegmentor.java ================================================ package com.pinecone.framework.unit.trie; public class SeparatedSegmentor implements TrieSegmentor { protected String separator; public SeparatedSegmentor( String szSeparator ) { this.separator = szSeparator; } public SeparatedSegmentor() { this( "/" ); } @Override public String[] segments( String szPathKey ) { return szPathKey.split( this.separator ); } @Override public String getSeparator() { return this.separator; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/TrieMap.java ================================================ package com.pinecone.framework.unit.trie; import com.pinecone.framework.system.prototype.PineUnit; import java.util.Map; public interface TrieMap extends Map, PineUnit { @Override V put( K key, V value ); default Object putEntity( K key, Object value ) { return this.putEntity( key, value, false ); } Object putEntity( K key, Object value, boolean isAbsent ) ; @Override V get( Object key ); @Override boolean containsKey( Object key ); @Override V remove( Object key ); @Override int size(); @Override boolean isEmpty(); TrieNode queryNode( String path ); String getSeparator(); DirectoryNode root(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/TrieNode.java ================================================ package com.pinecone.framework.unit.trie; import com.pinecone.framework.system.prototype.Pinenut; public interface TrieNode extends Pinenut { boolean isLeaf(); TrieNode parent(); TrieMap getTrieMap(); String getNodeName(); String getFullName(); String getNamespace(); default DirectoryNode evinceDirectory() { return null; } default ValueNode evinceValue() { return null; } default ReparseNode evinceReparse() { return null; } String getTypeName(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/TrieSegmentor.java ================================================ package com.pinecone.framework.unit.trie; import com.pinecone.framework.system.prototype.Pinenut; public interface TrieSegmentor extends Pinenut { TrieSegmentor PathSegmentor = new SeparatedSegmentor(); TrieSegmentor ObjectSegmentor = new SeparatedSegmentor( "." ); TrieSegmentor DefaultSegmentor = TrieSegmentor.PathSegmentor; String[] segments( String szPathKey ); String getSeparator(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/UniTrieMaptron.java ================================================ package com.pinecone.framework.unit.trie; import java.util.AbstractCollection; import java.util.AbstractMap; import java.util.AbstractSet; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collection; import java.util.Deque; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.Objects; import java.util.Set; import java.util.TreeMap; import java.util.function.Supplier; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JSON; /** * Pinecone Ursus For Java UniTrieMaptron * SharedList Author: Ken, DragonKing * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ********************************************************** * Thanks for Ken`s contribution. * ********************************************************** */ public class UniTrieMaptron extends AbstractTrieMap implements TrieMap, Cloneable { protected transient DirectoryNode mRoot; protected final transient Supplier > > mMapSupplier; //protected transient int mnSize; protected transient TrieSegmentor mSegmentor; protected transient Set > mEntrySet; protected transient Set mKeySet; protected transient Collection mValues; @SuppressWarnings( "unchecked" ) public UniTrieMaptron( Supplier mapSupplier, TrieSegmentor segmentor ) { if ( mapSupplier == null ) { throw new IllegalArgumentException( "Map supplier cannot be null." ); } this.mMapSupplier = mapSupplier; this.mRoot = new GenericDirectoryNode( this.mMapSupplier.get(), this ); //this.mnSize = 0; this.mSegmentor = segmentor; } public UniTrieMaptron( Supplier mapSupplier ) { this( mapSupplier, TrieSegmentor.DefaultSegmentor ); } public UniTrieMaptron( TrieSegmentor segmentor ) { this( TreeMap::new, segmentor ); } public UniTrieMaptron() { this( (Supplier) TreeMap::new ); } @SuppressWarnings( "unchecked" ) protected V convertValue( Object value ) { return ( V ) value; } protected String getStringKey( Object key ) { if ( key instanceof String ) { return (String) key; } return key.toString(); } @Override public boolean hasOwnProperty( Object elm ) { return this.containsKey( elm ); } @Override public V put( K key, V value ) { return this.putEntity0( key, value, false ); } @Override public V putIfAbsent( K key, V value ) { return this.putEntity0( key, value, true ); } public V makeSymbolic (K key, K target ) { ReparseNode p = new GenericReparseNode<>( null, null, target,this ); return this.putEntity0( key, p, false ); } protected V putEntity0( K key, Object value, boolean isAbsent ) { Object ret = this.putEntity( key, value, isAbsent ); if ( ret instanceof TrieNode ) { return null; } return this.convertValue( ret ); } @Override public Object putEntity( K key, Object value, boolean isAbsent ) { if ( key == null ) { throw new IllegalArgumentException( "Key cannot be null." ); } String[] segments = this.mSegmentor.segments( key ); TrieNode node = this.mRoot; DirectoryNode dir = this.mRoot; TrieNode parent = this.mRoot; String szLeafKey = null; for ( int i = 0; i < segments.length; ++i ) { String segment = segments[ i ]; if ( i < segments.length - 1 ) { node = dir.get( segment ); if( node == null ) { DirectoryNode neo = new GenericDirectoryNode<>( segment, this.mMapSupplier.get() ,parent, this ); dir.put( segment, neo ); node = neo; dir = neo; } else { dir = node.evinceDirectory(); if( dir == null ) { throw new IllegalArgumentException( "Path `" + key + "` given is not a full-directory insertion path." ); } } } else { // Leaf Node szLeafKey = segment; node = dir.get( segment ); if( node == null ) { TrieNode neo; if ( value instanceof ReparseNode ) { ReparseNode dummy = (ReparseNode) value; neo = new GenericReparseNode<>( segment, dir, dummy.getReparsePointer(),this ); } else { neo = new GenericValueNode<>( segment, this.convertValue( value ), parent, this ); } dir.put( segment, neo ); //++this.mnSize; return neo; // Insertion } } parent = node; } if ( isAbsent ) { return null; } // Modification ValueNode vn = node.evinceValue(); if( vn != null ) { V legacyValue = vn.getValue(); vn.setValue( this.convertValue( value ) ); return legacyValue; } ReparseNode rn = node.evinceReparse(); if( rn != null ) { TrieNode revealed = rn.reparse(); if( revealed != null ) { vn = revealed.evinceValue(); if( vn != null ) { V legacyValue = vn.getValue(); vn.setValue( this.convertValue( value ) ); return legacyValue; } } } DirectoryNode dn = node.evinceDirectory(); if( dn != null ) { TrieNode pp = dn.parent(); if( pp == null ) { pp = this.mRoot; } DirectoryNode pd = pp.evinceDirectory(); pd.remove( szLeafKey ); pd.put( szLeafKey, new GenericValueNode<>( dn.getNodeName(), this.convertValue( value ), pp, this ) ); } return null; } @Override public V get( Object key ) { String szKey = this.getStringKey( key ); TrieNode node = this.queryNode( szKey ); if ( node == null ) { return null; } ValueNode vn = node.evinceValue(); if ( vn != null ){ return vn.getValue(); } ReparseNode rp = node.evinceReparse(); if ( rp != null ){ TrieNode revealed = rp.reparse(); if( revealed != null ) { vn = revealed.evinceValue(); if( vn != null ) { return vn.getValue(); } } } return null; } @Override public boolean containsKey( Object key ) { String szKey = this.getStringKey( key ); return this.queryNode( szKey ) != null; } @Override public boolean containsValue( Object value ) { return this.dfsContainsValue( this.mRoot, value ); } private boolean dfsContainsValue( TrieNode node, Object value ) { if ( node == null ) { return false; } DirectoryNode directory = node.evinceDirectory(); if ( directory != null ) { for ( TrieNode childNode : directory.children().values() ) { if ( this.dfsContainsValue( childNode, value ) ) { return true; } } } else { ValueNode vn = node.evinceValue(); if( vn != null ) { return vn.getValue().equals( value ); } ReparseNode rp = node.evinceReparse(); if ( rp != null ){ TrieNode revealed = rp.reparse(); if( revealed != null ) { vn = revealed.evinceValue(); if( vn != null ) { return vn.getValue().equals( value ); } } } } return false; } @Override public V remove( Object key ) { String szKey = this.getStringKey( key ); //return this.remove( this.mRoot, this.mSegmentor.segments( szKey ), 0 ); return this.remove( this.mRoot, this.mSegmentor.segments( szKey ) ); } protected V remove( TrieNode startNode, String[] segments ) { if ( startNode == null || segments.length == 0 ) { return null; } TrieNode node = startNode; DirectoryNode directory; int depth = 0; while ( depth < segments.length ) { directory = node.evinceDirectory(); if ( directory == null ) { return null; } String segment = segments[ depth ]; TrieNode childNode = directory.get( segment ); if ( depth == segments.length - 1 ) { if ( childNode == null ) { return null; // Illegal path. } directory.remove( segment ); // <= Fatalities statistics therein. ValueNode valueNode = childNode.evinceValue(); if ( valueNode != null ) { return valueNode.getValue(); } return null; } node = childNode; ++depth; } return null; } /*protected V remove( TrieNode node, String[] segments, int depth ) { if ( node == null || depth >= segments.length ) { return null; } String segment = segments[ depth ]; DirectoryNode directory = node.evinceDirectory(); if ( directory == null ) { return null; } TrieNode childNode = directory.get(segment); if ( depth == segments.length - 1 ) { if ( childNode == null ) { return null; // Illegal path. } directory.remove( segment ); // <= Fatalities statistics therein. ValueNode valueNode = childNode.evinceValue(); if ( valueNode != null ) { return valueNode.getValue(); } return null; } return this.remove( childNode, segments, depth + 1 ); // if ( node == null ) { // return null; // } // // if ( depth == segments.length ) { // if ( !node.isEnd ) { // return null; // } // node.isEnd = false; // V oldValue = this.convertValue( node.value ); // node.value = null; // --this.mnSize; // return oldValue; // } // // String segment = segments[depth]; // TrieNode nextNode = node.children.get( segment ); // V result = this.remove( nextNode, segments, depth + 1 ); // // if ( nextNode != null && nextNode.children.isEmpty() && !nextNode.isEnd ) { // node.children.remove( segment ); // } // // return result; }*/ @Override public void putAll( Map m ) { for ( Entry entry : m.entrySet() ) { this.put( entry.getKey(), entry.getValue() ); } } @Override public void clear() { this.mRoot.segmentMap().clear(); //this.mnSize = 0; } // protected void notifyChildrenEliminated( int nFatalities ) { // this.mnSize -= nFatalities; // } @Override public DirectoryNode root() { return this.mRoot; } @Override public int size() { return this.mRoot.childrenLeafSize(); //return this.mnSize; } @Override public boolean isEmpty() { return this.mRoot.isEmpty(); } @Override public TrieNode queryNode( String path ) { String[] segments = this.mSegmentor.segments( path ); DirectoryNode dir = this.mRoot; TrieNode node = this.mRoot; int is = 0; if ( segments.length > 1 && segments[0].isEmpty() ) { // "/xxx/xxx" => Skip first `/` => "xxx/xxx" is = 1; } for ( int i = is; i < segments.length; ++i ) { String segment = segments[ i ]; if ( i < segments.length - 1 ) { node = dir.get( segment ); if ( node == null ) { return null; } dir = node.evinceDirectory(); if( dir == null ) { return null; // Illegal path. } } else { return dir.get( segment ); } } return null; } @Override public Set keySet() { Set es = this.mKeySet; return (es != null) ? es : (this.mKeySet = new KeySet()); } @Override public Collection values() { Collection vs = this.mValues; if (vs == null) { vs = new Values(); this.mValues = vs; } return vs; } @Override public Set > entrySet() { Set > es = this.mEntrySet; return (es != null) ? es : (this.mEntrySet = new EntrySet()); } class EntrySet extends AbstractSet> { @Override public Iterator> iterator() { return new EntryIterator(); } @Override public int size() { return UniTrieMaptron.this.size(); } @Override public boolean contains( Object o ) { if ( !( o instanceof Map.Entry ) ) { return false; } Entry entry = ( Entry ) o; Object value = UniTrieMaptron.this.get( entry.getKey() ); return Objects.equals( value, entry.getValue() ); } @Override public boolean remove( Object o ) { if ( !( o instanceof Map.Entry ) ) { return false; } Entry entry = ( Entry ) o; K key = ( K ) entry.getKey(); V currentValue = UniTrieMaptron.this.get( key ); if ( Objects.equals( currentValue, entry.getValue() ) ) { UniTrieMaptron.this.remove( key ); return true; } return false; } @Override public void clear() { UniTrieMaptron.this.clear(); } } class EntryIterator implements Iterator> { private final Map > dummyTerminationMap = Map.of(); private final Deque>>> stack; private final Deque pathStack; private Entry nextEntry; private StringBuilder currentPath; public EntryIterator() { this.stack = new ArrayDeque<>(); this.pathStack = new ArrayDeque<>(); this.stack.push( UniTrieMaptron.this.mRoot.children().entrySet().iterator() ); this.currentPath = new StringBuilder(); this.advance(); } @SuppressWarnings( "unchecked" ) private void advance() { this.nextEntry = null; while ( !this.stack.isEmpty() ) { Iterator>> iterator = this.stack.peek(); if ( !iterator.hasNext() ) { this.stack.pop(); if ( !this.pathStack.isEmpty() ) { this.currentPath = this.pathStack.pop(); } continue; } Entry> entry = iterator.next(); TrieNode node = entry.getValue(); String segment = entry.getKey(); this.pathStack.push( new StringBuilder( this.currentPath ) ); if ( this.currentPath.length() > 0 ) { this.currentPath.append( UniTrieMaptron.this.mSegmentor.getSeparator() ); } this.currentPath.append( segment ); // while ( node.value instanceof TrieReparseNode ) { // TrieReparseNode reparseNode = ( TrieReparseNode ) node.value; // node = UniTrieMaptron.this.getNode( reparseNode.getPath() ); // if ( node == null ) { // break; // } // } if ( node == null ) { continue; } DirectoryNode dir = node.evinceDirectory(); if( dir != null ) { this.stack.push( dir.children().entrySet().iterator() ); } else { ValueNode vn = node.evinceValue(); if( vn != null ) { this.nextEntry = new AbstractMap.SimpleEntry<>( ( K ) this.currentPath.toString(), vn.getValue() ); this.stack.push( this.dummyTerminationMap.entrySet().iterator() ); } ReparseNode rn = node.evinceReparse(); if( rn != null ) { this.nextEntry = new AbstractMap.SimpleEntry( this.currentPath.toString(), rn ); this.stack.push( this.dummyTerminationMap.entrySet().iterator() ); } break; } } } @Override public boolean hasNext() { return this.nextEntry != null; } @Override public Entry next() { if ( !this.hasNext() ) { throw new NoSuchElementException(); } Entry entry = this.nextEntry; this.advance(); return entry; } } class KeySet extends AbstractSet { @Override public Iterator iterator() { return new KeyIterator(); } @Override public int size() { return UniTrieMaptron.this.size(); } @Override public boolean contains( Object o ) { return UniTrieMaptron.this.containsKey( o ); } @Override public boolean remove( Object o ) { return UniTrieMaptron.this.remove( o ) != null; } @Override public void clear() { UniTrieMaptron.this.clear(); } } class KeyIterator implements Iterator { private final Iterator> entryIterator; public KeyIterator() { this.entryIterator = UniTrieMaptron.this.entrySet().iterator(); } @Override public boolean hasNext() { return this.entryIterator.hasNext(); } @Override public K next() { return this.entryIterator.next().getKey(); } } class Values extends AbstractCollection { @Override public Iterator iterator() { return new Iterator() { private final Iterator> entryIterator = UniTrieMaptron.this.entrySet().iterator(); @Override public boolean hasNext() { return this.entryIterator.hasNext(); } @Override public V next() { return this.entryIterator.next().getValue(); } }; } @Override public int size() { return UniTrieMaptron.this.size(); } @Override public boolean contains( Object o ) { return UniTrieMaptron.this.containsValue(o); } } @Override public TrieMap clone() { try { @SuppressWarnings("unchecked") UniTrieMaptron clonedMap = (UniTrieMaptron) super.clone(); clonedMap.mRoot = this.cloneDirectoryNode( this.mRoot, clonedMap, null ); clonedMap.mEntrySet = null; clonedMap.mKeySet = null; clonedMap.mValues = null; //clonedMap.mnSize = this.mnSize; return clonedMap; } catch ( CloneNotSupportedException e ) { throw new AssertionError( "Clone not supported", e ); } } protected DirectoryNode cloneDirectoryNode( DirectoryNode original, TrieMap pm, TrieNode parent ) { if ( original == null ) { return null; } Map> clonedChildren = this.mMapSupplier.get(); DirectoryNode neo = new GenericDirectoryNode<>( original.getNodeName(), clonedChildren, parent, pm ); for ( Map.Entry> entry : original.children().entrySet() ) { TrieNode clonedChild = this.cloneTrieNode( entry.getValue(), pm, neo ); clonedChildren.put( entry.getKey(), clonedChild ); } return neo; } protected TrieNode cloneTrieNode( TrieNode original, TrieMap pm, TrieNode parent ) { if ( original == null ) { return null; } DirectoryNode directoryNode = original.evinceDirectory(); if ( directoryNode != null ) { return this.cloneDirectoryNode( directoryNode, pm, parent ); } ValueNode valueNode = original.evinceValue(); if ( valueNode != null ) { return new GenericValueNode<>( original.getNodeName(), valueNode.getValue(), parent, pm ); } ReparseNode rp = original.evinceReparse(); if ( rp != null ){ return new GenericReparseNode<>( original.getNodeName(), parent, rp.getReparsePointer(), pm ); } return null; } @Override public String toJSONString() { return JSON.stringify( this ); } @Override public String toString() { return this.toJSONString(); } @Override public String getSeparator(){ return this.mSegmentor.getSeparator(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/ValueNode.java ================================================ package com.pinecone.framework.unit.trie; public interface ValueNode extends TrieNode { V getValue(); void setValue( V value ); @Override default ValueNode evinceValue() { return this; } @Override default String getTypeName() { return ValueNode.class.getSimpleName(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/Assert.java ================================================ package com.pinecone.framework.util; import java.util.Collection; import java.util.Map; import com.pinecone.framework.system.AssertionRuntimeException; public abstract class Assert { public Assert() { } public static void isTrue(boolean expression, String message) { if ( !expression ) { throw new AssertionRuntimeException( message ); } } public static void isTrue( boolean expression ) { Assert.isTrue( expression, "[Assertion failed] - this expression must be true" ); } public static void isNull( Object object, String message ) { if ( object != null ) { throw new AssertionRuntimeException(message); } } public static void isNull( Object object ) { Assert.isNull(object, "[Assertion failed] - the object argument must be null"); } public static void notNull( Object object, String message ) { if ( object == null ) { throw new AssertionRuntimeException( message ); } } public static void notNull( Object object ) { Assert.notNull( object, "[Assertion failed] - this argument is required; it must not be null" ); } public static void hasLength( String text, String message ) { if ( !StringUtils.hasLength(text) ) { throw new AssertionRuntimeException(message); } } public static void hasLength( String text ) { Assert.hasLength( text, "[Assertion failed] - this String argument must have length; it must not be null or empty" ); } public static void hasText( String text, String message ) { if ( !StringUtils.hasText(text) ) { throw new AssertionRuntimeException(message); } } public static void hasText( String text ) { Assert.hasText(text, "[Assertion failed] - this String argument must have text; it must not be null, empty, or blank"); } public static void doesNotContain( String textToSearch, String substring, String message ) { if ( StringUtils.hasLength(textToSearch) && StringUtils.hasLength(substring) && textToSearch.contains(substring) ) { throw new AssertionRuntimeException(message); } } public static void doesNotContain( String textToSearch, String substring ) { Assert.doesNotContain(textToSearch, substring, "[Assertion failed] - this String argument must not contain the substring [" + substring + "]"); } public static void notEmpty( Object[] array, String message ) { if ( ObjectUtils.isEmpty(array) ) { throw new AssertionRuntimeException(message); } } public static void notEmpty( Object[] array ) { Assert.notEmpty( array, "[Assertion failed] - this array must not be empty: it must contain at least 1 element" ); } public static void noNullElements( Object[] array, String message ) { if ( array != null ) { int len = array.length; for( int i = 0; i < len; ++i ) { Object element = array[i]; if ( element == null ) { throw new AssertionRuntimeException(message); } } } } public static void noNullElements( Object[] array ) { Assert.noNullElements(array, "[Assertion failed] - this array must not contain any null elements"); } public static void notEmpty( Collection collection, String message ) { if ( CollectionUtils.isEmpty(collection) ) { throw new AssertionRuntimeException(message); } } public static void notEmpty( Collection collection ) { Assert.notEmpty(collection, "[Assertion failed] - this collection must not be empty: it must contain at least 1 element"); } public static void notEmpty( Map map, String message ) { if ( CollectionUtils.isEmpty(map) ) { throw new AssertionRuntimeException(message); } } public static void notEmpty( Map map ) { Assert.notEmpty(map, "[Assertion failed] - this map must not be empty; it must contain at least one entry"); } public static void isInstanceOf( Class clazz, Object obj ) { isInstanceOf(clazz, obj, ""); } public static void isInstanceOf( Class type, Object obj, String message ) { Assert.notNull(type, "Type to check against must not be null"); if ( !type.isInstance(obj) ) { throw new AssertionRuntimeException((StringUtils.hasLength(message) ? message + " " : "") + "Object of class [" + (obj != null ? obj.getClass().getName() : "null") + "] must be an instance of " + type); } } public static void isAssignable( Class superType, Class subType ) { Assert.isAssignable( superType, subType, "" ); } public static void isAssignable( Class superType, Class subType, String message ) { Assert.notNull(superType, "Type to check against must not be null"); if (subType == null || !superType.isAssignableFrom(subType)) { throw new AssertionRuntimeException(message + subType + " is not assignable to " + superType); } } public static void state( boolean expression, String message ) { if ( !expression ) { throw new IllegalStateException(message); } } public static void state( boolean expression ) { Assert.state( expression, "[Assertion failed] - this state invariant must be true" ); } public static void provokeIrrationally( Throwable bad ) { throw new AssertionRuntimeException( bad ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/Bits.java ================================================ package com.pinecone.framework.util; public final class Bits { /** * From Pinecone CPP, * Pinecone/Framework/Util/Bits/BitsProcessor.h * Pinecone/Framework/Util/Bits/BitsProcessor.cpp */ // Reverse All Bits (Smallest Unit: Bit) private static final byte[] BitReverseTable256 = new byte[ 256 ]; static { for ( int i = 0; i < 256; ++i ) { Bits.BitReverseTable256[i] = (byte) (((i & 0x01) << 7) | ((i & 0x02) << 5) | ((i & 0x04) << 3) | ((i & 0x08) << 1) | ((i & 0x10) >> 1) | ((i & 0x20) >> 3) | ((i & 0x40) >> 5) | ((i & 0x80) >> 7)); } } public static byte reverse8Bits( byte nNum ) { return Bits.BitReverseTable256[ nNum & 0xFF ]; } public static short reverse16Bits( short nNum ) { int nRes = 0; byte p0 = (byte) (nNum & 0xFF); byte p1 = (byte) ((nNum >> 8) & 0xFF); byte q1 = Bits.BitReverseTable256[ p0 & 0xFF ]; byte q0 = Bits.BitReverseTable256[ p1 & 0xFF ]; nRes = (q0 & 0xFF) | ((q1 & 0xFF) << 8); return (short) nRes; } public static int reverse32Bits( int nNum ) { int nRes = 0; byte p0 = (byte) (nNum & 0xFF); byte p1 = (byte) ((nNum >> 8) & 0xFF); byte p2 = (byte) ((nNum >> 16) & 0xFF); byte p3 = (byte) ((nNum >> 24) & 0xFF); byte q3 = Bits.BitReverseTable256[ p0 & 0xFF ]; byte q2 = Bits.BitReverseTable256[ p1 & 0xFF ]; byte q1 = Bits.BitReverseTable256[ p2 & 0xFF ]; byte q0 = Bits.BitReverseTable256[ p3 & 0xFF ]; nRes = (q0 & 0xFF) | ((q1 & 0xFF) << 8) | ((q2 & 0xFF) << 16) | ((q3 & 0xFF) << 24); return nRes; } public static long reverse64Bits( long nNum ) { long nRes = 0; int lower = (int) (nNum & 0xFFFFFFFFL); int upper = (int) ((nNum >> 32) & 0xFFFFFFFFL); int reversedLower = Bits.reverse32Bits(upper); int reversedUpper = Bits.reverse32Bits(lower); nRes = ((long) reversedLower & 0xFFFFFFFFL) | (((long) reversedUpper & 0xFFFFFFFFL) << 32); return nRes; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/Bytes.java ================================================ package com.pinecone.framework.util; public final class Bytes { public static final byte[] Empty = new byte[0]; // LE public static byte[] int16ToBytesLE( short value ) { return new byte[]{ (byte) value, (byte) (value >> 8) }; } public static byte[] int32ToBytesLE( int value ) { return new byte[]{ (byte) value, (byte) (value >> 8), (byte) (value >> 16), (byte) (value >> 24) }; } public static byte[] int64ToBytesLE( long value ) { return new byte[]{ (byte) value, (byte) (value >> 8), (byte) (value >> 16), (byte) (value >> 24), (byte) (value >> 32), (byte) (value >> 40), (byte) (value >> 48), (byte) (value >> 56) }; } public static byte[] float32ToBytesLE( float value ) { return Bytes.int32ToBytesLE( Float.floatToIntBits(value) ); } public static byte[] float64ToBytesLE( double value ) { return Bytes.int64ToBytesLE( Double.doubleToLongBits(value) ); } // BE public static byte[] int16ToBytesBE( short value ) { return new byte[]{ (byte) (value >> 8), (byte) value }; } public static byte[] int32ToBytesBE( int value ) { return new byte[]{ (byte) (value >> 24), (byte) (value >> 16), (byte) (value >> 8), (byte) value }; } public static byte[] int64ToBytesBE( long value ) { return new byte[]{ (byte) (value >> 56), (byte) (value >> 48), (byte) (value >> 40), (byte) (value >> 32), (byte) (value >> 24), (byte) (value >> 16), (byte) (value >> 8), (byte) value }; } public static byte[] float32ToBytesBE( float value ) { return Bytes.int32ToBytesBE( Float.floatToIntBits(value) ); } public static byte[] float64ToBytesBE( double value ) { return Bytes.int64ToBytesBE( Double.doubleToLongBits(value) ); } // LE / Decode public static short bytesToInt16LE( byte[] bytes ) { return (short) ((bytes[1] << 8) | (bytes[0] & 0xFF)); } public static int bytesToInt32LE( byte[] bytes ) { return (bytes[3] << 24) | ((bytes[2] & 0xFF) << 16) | ((bytes[1] & 0xFF) << 8) | (bytes[0] & 0xFF); } public static long bytesToInt64LE( byte[] bytes ) { return ((long) bytes[7] << 56) | ((long) (bytes[6] & 0xFF) << 48) | ((long) (bytes[5] & 0xFF) << 40) | ((long) (bytes[4] & 0xFF) << 32) | ((long) (bytes[3] & 0xFF) << 24) | ((bytes[2] & 0xFF) << 16) | ((bytes[1] & 0xFF) << 8) | (bytes[0] & 0xFF); } public static float bytesToFloat32LE( byte[] bytes ) { return Float.intBitsToFloat( Bytes.bytesToInt32LE( bytes ) ); } public static double bytesToFloat64LE( byte[] bytes ) { return Double.longBitsToDouble( Bytes.bytesToInt64LE( bytes ) ); } // BE / Decode public static short bytesToInt16BE( byte[] bytes ) { return (short) ((bytes[0] << 8) | (bytes[1] & 0xFF)); } public static int bytesToInt32BE( byte[] bytes ) { return (bytes[0] << 24) | ((bytes[1] & 0xFF) << 16) | ((bytes[2] & 0xFF) << 8) | (bytes[3] & 0xFF); } public static long bytesToInt64BE( byte[] bytes ) { return ((long) bytes[0] << 56) | ((long) (bytes[1] & 0xFF) << 48) | ((long) (bytes[2] & 0xFF) << 40) | ((long) (bytes[3] & 0xFF) << 32) | ((long) (bytes[4] & 0xFF) << 24) | ((bytes[5] & 0xFF) << 16) | ((bytes[6] & 0xFF) << 8) | (bytes[7] & 0xFF); } public static float bytesToFloat32BE( byte[] bytes ) { return Float.intBitsToFloat( Bytes.bytesToInt32BE( bytes ) ); } public static double bytesToFloat64BE( byte[] bytes ) { return Double.longBitsToDouble( Bytes.bytesToInt64BE( bytes ) ); } public static int calculateParity( byte b ) { int count = 0; for ( int i = 0; i < 8; i++ ) { if ((b & (1 << i)) != 0) { count++; } } if( (count % 2) == 0 ){ return 1; } return 0; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/CharactersUtils.java ================================================ package com.pinecone.framework.util; public abstract class CharactersUtils { public static Character[] toObjects( char[] that ) { Character[] characters = new Character[ that.length ]; for ( int i = 0; i < that.length; i++ ) { characters[i] = that[i]; } return characters; } public static Character[] toObjects( String that ) { // No more array copy Character[] characters = new Character[ that.length() ]; for ( int i = 0; i < that.length(); i++ ) { characters[i] = that.charAt(i); } return characters; } public static char[] toChars ( Character[] that ) { char[] characters = new char[ that.length ]; for ( int i = 0; i < that.length; i++ ) { characters[i] = that[i]; } return characters; } public static char[] toChars ( Object[] that ) { char[] characters = new char[ that.length ]; for ( int i = 0; i < that.length; i++ ) { characters[i] = (char) that[i]; } return characters; } public static boolean regionMatches ( char c1, char c2 ) { c1 = Character.toUpperCase( c1 ); c2 = Character.toUpperCase( c2 ); if ( c1 == c2 ) { return true; } // Unfortunately, conversion to uppercase does not work properly // for the Georgian alphabet, which has strange rules about case // conversion. So we need to make one last check before // exiting. /** I agree ! **/ return Character.toLowerCase(c1) == Character.toLowerCase(c2); } public static int compareTo ( char[] hThis, int nThisFrom, int nThisTo, char[] that, int nThatFrom, int nThatTo, boolean bNoCase ) { // Fuck java, there is no FUCKING pointer !! nThisTo = nThisTo > hThis.length ? hThis.length : nThisTo; nThatTo = nThatTo > that.length ? that.length : nThatTo; nThisFrom = nThisFrom > 0 ? nThisFrom : 0; nThatFrom = nThatFrom > 0 ? nThatFrom : 0; int len1 = nThisTo - nThisFrom; int len2 = nThatTo - nThatFrom; int lim = Math.min( len1, len2 ); int k = 0; while ( k < lim ) { char c1 = hThis[ k + nThisFrom ]; char c2 = that [ k + nThatFrom ]; if( bNoCase ){ if( !CharactersUtils.regionMatches( c1, c2 ) ) { return c1 - c2; } } else { if ( c1 != c2 ) { return c1 - c2; } } k++; } return len1 - len2; } public static int compareTo ( char[] hThis, int nThisFrom, int nThisTo, char[] that, int nThatFrom, int nThatTo ) { return CharactersUtils.compareTo( hThis, nThisFrom, nThisTo, that, nThatFrom, nThatTo, false ); } public static boolean equals ( char[] hThis, int nThisFrom, int nThisTo, char[] that, int nThatFrom, int nThatTo, boolean bNoCase ) { // Fuck java, there is no FUCKING pointer !! nThisTo = nThisTo > hThis.length ? hThis.length : nThisTo; nThatTo = nThatTo > that.length ? that.length : nThatTo; nThisFrom = nThisFrom > 0 ? nThisFrom : 0; nThatFrom = nThatFrom > 0 ? nThatFrom : 0; int len1 = nThisTo - nThisFrom; int len2 = nThatTo - nThatFrom; if ( len1 == len2 ) { int i = 0; while ( len1-- != 0 ) { char c1 = hThis[ i + nThisFrom ]; char c2 = that [ i + nThatFrom ]; if( bNoCase ){ if( !CharactersUtils.regionMatches( c1, c2 ) ){ return false; } } else { if ( c1 != c2 ) { return false; } } i++; } return true; } return false; } public static boolean equals ( char[] hThis, int nThisFrom, int nThisTo, char[] that, int nThatFrom, int nThatTo ) { return CharactersUtils.equals( hThis, nThisFrom, nThisTo, that, nThatFrom, nThatTo, false ); } public static char[] toLower( char[] arrThis ){ for ( int i = 0; i < arrThis.length; i++ ) { arrThis[i] = Character.toLowerCase( arrThis[i] ); } return arrThis; } public static char[] toUpper( char[] arrThis ){ for ( int i = 0; i < arrThis.length; i++ ) { arrThis[i] = Character.toUpperCase( arrThis[i] ); } return arrThis; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/ClassUtils.java ================================================ package com.pinecone.framework.util; import java.beans.Introspector; import java.lang.reflect.Array; import java.lang.reflect.Constructor; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.Proxy; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import java.util.Map.Entry; public abstract class ClassUtils { public static final String ARRAY_SUFFIX = "[]"; private static final String INTERNAL_ARRAY_PREFIX = "["; private static final String NON_PRIMITIVE_ARRAY_PREFIX = "[L"; private static final char PACKAGE_SEPARATOR = '.'; private static final char PATH_SEPARATOR = '/'; private static final char INNER_CLASS_SEPARATOR = '$'; public static final String CGLIB_CLASS_SEPARATOR = "$$"; public static final String CLASS_FILE_SUFFIX = ".class"; private static final Map, Class> primitiveWrapperTypeMap = new HashMap<>(8); private static final Map, Class> primitiveTypeToWrapperMap = new HashMap<>(8); private static final Map> primitiveTypeNameMap = new HashMap<>(32); private static final Map> commonClassCache = new HashMap<>(32); public ClassUtils() { } private static void registerCommonClasses(Class... commonClasses) { Class[] var1 = commonClasses; int var2 = commonClasses.length; for(int var3 = 0; var3 < var2; ++var3) { Class clazz = var1[var3]; commonClassCache.put(clazz.getName(), clazz); } } public static ClassLoader getDefaultClassLoader() { ClassLoader cl = null; try { cl = Thread.currentThread().getContextClassLoader(); } catch (Throwable var3) { } if (cl == null) { cl = ClassUtils.class.getClassLoader(); if (cl == null) { try { cl = ClassLoader.getSystemClassLoader(); } catch (Throwable var2) { } } } return cl; } public static ClassLoader overrideThreadContextClassLoader(ClassLoader classLoaderToUse) { Thread currentThread = Thread.currentThread(); ClassLoader threadContextClassLoader = currentThread.getContextClassLoader(); if (classLoaderToUse != null && !classLoaderToUse.equals(threadContextClassLoader)) { currentThread.setContextClassLoader(classLoaderToUse); return threadContextClassLoader; } else { return null; } } public static Class forName(String name, ClassLoader classLoader) throws ClassNotFoundException, LinkageError { Assert.notNull(name, "Name must not be null"); Class clazz = resolvePrimitiveClassName(name); if (clazz == null) { clazz = (Class)commonClassCache.get(name); } if (clazz != null) { return clazz; } else { Class elementClass; String elementName; if (name.endsWith("[]")) { elementName = name.substring(0, name.length() - "[]".length()); elementClass = forName(elementName, classLoader); return Array.newInstance(elementClass, 0).getClass(); } else if (name.startsWith("[L") && name.endsWith(";")) { elementName = name.substring("[L".length(), name.length() - 1); elementClass = forName(elementName, classLoader); return Array.newInstance(elementClass, 0).getClass(); } else if (name.startsWith("[")) { elementName = name.substring("[".length()); elementClass = forName(elementName, classLoader); return Array.newInstance(elementClass, 0).getClass(); } else { ClassLoader clToUse = classLoader; if (classLoader == null) { clToUse = getDefaultClassLoader(); } try { return clToUse != null ? clToUse.loadClass(name) : Class.forName(name); } catch (ClassNotFoundException var9) { int lastDotIndex = name.lastIndexOf(46); if (lastDotIndex != -1) { String innerClassName = name.substring(0, lastDotIndex) + '$' + name.substring(lastDotIndex + 1); try { return clToUse != null ? clToUse.loadClass(innerClassName) : Class.forName(innerClassName); } catch (ClassNotFoundException var8) { } } throw var9; } } } } public static Class resolveClassName(String className, ClassLoader classLoader) throws IllegalArgumentException { try { return forName(className, classLoader); } catch (ClassNotFoundException var3) { throw new IllegalArgumentException("Cannot find class [" + className + "]", var3); } catch (LinkageError var4) { throw new IllegalArgumentException("Error loading class [" + className + "]: problem with class file or dependent class.", var4); } } public static Class resolvePrimitiveClassName(String name) { Class result = null; if (name != null && name.length() <= 8) { result = (Class)primitiveTypeNameMap.get(name); } return result; } public static boolean isPresent(String className, ClassLoader classLoader) { try { forName(className, classLoader); return true; } catch (Throwable var3) { return false; } } public static Class getUserClass(Object instance) { Assert.notNull(instance, "Instance must not be null"); return getUserClass(instance.getClass()); } public static Class getUserClass(Class clazz) { if (clazz != null && clazz.getName().contains("$$")) { Class superClass = clazz.getSuperclass(); if (superClass != null && !Object.class.equals(superClass)) { return superClass; } } return clazz; } public static boolean isCacheSafe(Class clazz, ClassLoader classLoader) { Assert.notNull(clazz, "Class must not be null"); try { ClassLoader target = clazz.getClassLoader(); if (target == null) { return true; } else { ClassLoader cur = classLoader; if (classLoader == target) { return true; } else { do { if (cur == null) { return false; } cur = cur.getParent(); } while(cur != target); return true; } } } catch (SecurityException var4) { return true; } } public static String getShortName(String className) { Assert.hasLength(className, "Class name must not be empty"); int lastDotIndex = className.lastIndexOf(46); int nameEndIndex = className.indexOf("$$"); if (nameEndIndex == -1) { nameEndIndex = className.length(); } String shortName = className.substring(lastDotIndex + 1, nameEndIndex); shortName = shortName.replace('$', '.'); return shortName; } public static String getShortName(Class clazz) { return getShortName(getQualifiedName(clazz)); } public static String getShortNameAsProperty(Class clazz) { String shortName = getShortName(clazz); int dotIndex = shortName.lastIndexOf(46); shortName = dotIndex != -1 ? shortName.substring(dotIndex + 1) : shortName; return Introspector.decapitalize(shortName); } public static String getClassFileName(Class clazz) { Assert.notNull(clazz, "Class must not be null"); String className = clazz.getName(); int lastDotIndex = className.lastIndexOf(46); return className.substring(lastDotIndex + 1) + ".class"; } public static String getPackageName(Class clazz) { Assert.notNull(clazz, "Class must not be null"); return getPackageName(clazz.getName()); } public static String getPackageName(String fqClassName) { Assert.notNull(fqClassName, "Class name must not be null"); int lastDotIndex = fqClassName.lastIndexOf(46); return lastDotIndex != -1 ? fqClassName.substring(0, lastDotIndex) : ""; } public static String getQualifiedName(Class clazz) { Assert.notNull(clazz, "Class must not be null"); return clazz.isArray() ? getQualifiedNameForArray(clazz) : clazz.getName(); } private static String getQualifiedNameForArray(Class clazz) { StringBuilder result = new StringBuilder(); while(clazz.isArray()) { clazz = clazz.getComponentType(); result.append("[]"); } result.insert(0, clazz.getName()); return result.toString(); } public static String getQualifiedMethodName(Method method) { Assert.notNull(method, "Method must not be null"); return method.getDeclaringClass().getName() + "." + method.getName(); } public static String getDescriptiveType(Object value) { if (value == null) { return null; } else { Class clazz = value.getClass(); if (Proxy.isProxyClass(clazz)) { StringBuilder result = new StringBuilder(clazz.getName()); result.append(" implementing "); Class[] ifcs = clazz.getInterfaces(); for(int i = 0; i < ifcs.length; ++i) { result.append(ifcs[i].getName()); if (i < ifcs.length - 1) { result.append(','); } } return result.toString(); } else { return clazz.isArray() ? getQualifiedNameForArray(clazz) : clazz.getName(); } } } public static boolean matchesTypeName(Class clazz, String typeName) { return typeName != null && (typeName.equals(clazz.getName()) || typeName.equals(clazz.getSimpleName()) || clazz.isArray() && typeName.equals(getQualifiedNameForArray(clazz))); } public static boolean hasConstructor(Class clazz, Class... paramTypes) { return getConstructorIfAvailable(clazz, paramTypes) != null; } public static Constructor getConstructorIfAvailable(Class clazz, Class... paramTypes) { Assert.notNull(clazz, "Class must not be null"); try { return clazz.getConstructor(paramTypes); } catch (NoSuchMethodException var3) { return null; } } public static boolean hasMethod( Class clazz, String methodName, Class... paramTypes ) { return getMethodIfAvailable(clazz, methodName, paramTypes) != null; } public static Method getMethod( Class clazz, String methodName, Class... paramTypes ) { Assert.notNull(clazz, "Class must not be null"); Assert.notNull(methodName, "Method name must not be null"); if ( paramTypes != null ) { try { return clazz.getMethod( methodName, paramTypes ); } catch (NoSuchMethodException e) { throw new IllegalStateException( "Expected method not found: " + e ); } } else { Set candidates = new HashSet<>(1); Method[] methods = clazz.getMethods(); int len = methods.length; for( int i = 0; i < len; ++i ) { Method method = methods[i]; if (methodName.equals(method.getName())) { candidates.add(method); } } if ( candidates.size() == 1 ) { return (Method)candidates.iterator().next(); } else if ( candidates.isEmpty() ) { throw new IllegalStateException("Expected method not found: " + clazz + "." + methodName); } else { throw new IllegalStateException("No unique method found: " + clazz + "." + methodName); } } } public static Method getFirstMethodByName( Class clazz, String methodName ) { Method[] methods = clazz.getMethods(); for ( Method method : methods ) { if( method.getName().equals( methodName ) ) { return method; } } return null; } public static Method getMethodIfAvailable(Class clazz, String methodName, Class... paramTypes) { Assert.notNull(clazz, "Class must not be null"); Assert.notNull(methodName, "Method name must not be null"); if (paramTypes != null) { try { return clazz.getMethod(methodName, paramTypes); } catch (NoSuchMethodException var9) { return null; } } else { Set candidates = new HashSet<>(1); Method[] methods = clazz.getMethods(); Method[] var5 = methods; int var6 = methods.length; for(int var7 = 0; var7 < var6; ++var7) { Method method = var5[var7]; if (methodName.equals(method.getName())) { candidates.add(method); } } if (candidates.size() == 1) { return (Method)candidates.iterator().next(); } else { return null; } } } public static int getMethodCountForName(Class clazz, String methodName) { Assert.notNull(clazz, "Class must not be null"); Assert.notNull(methodName, "Method name must not be null"); int count = 0; Method[] declaredMethods = clazz.getDeclaredMethods(); Method[] var4 = declaredMethods; int var5 = declaredMethods.length; int var6; for(var6 = 0; var6 < var5; ++var6) { Method method = var4[var6]; if (methodName.equals(method.getName())) { ++count; } } Class[] ifcs = clazz.getInterfaces(); Class[] var10 = ifcs; var6 = ifcs.length; for(int var11 = 0; var11 < var6; ++var11) { Class ifc = var10[var11]; count += getMethodCountForName(ifc, methodName); } if (clazz.getSuperclass() != null) { count += getMethodCountForName(clazz.getSuperclass(), methodName); } return count; } public static boolean hasAtLeastOneMethodWithName(Class clazz, String methodName) { Assert.notNull(clazz, "Class must not be null"); Assert.notNull(methodName, "Method name must not be null"); Method[] declaredMethods = clazz.getDeclaredMethods(); Method[] var3 = declaredMethods; int var4 = declaredMethods.length; int var5; for(var5 = 0; var5 < var4; ++var5) { Method method = var3[var5]; if (method.getName().equals(methodName)) { return true; } } Class[] ifcs = clazz.getInterfaces(); Class[] var9 = ifcs; var5 = ifcs.length; for(int var10 = 0; var10 < var5; ++var10) { Class ifc = var9[var10]; if (hasAtLeastOneMethodWithName(ifc, methodName)) { return true; } } return clazz.getSuperclass() != null && hasAtLeastOneMethodWithName(clazz.getSuperclass(), methodName); } public static Method getMostSpecificMethod(Method method, Class targetClass) { if (method != null && isOverridable(method, targetClass) && targetClass != null && !targetClass.equals(method.getDeclaringClass())) { try { if (Modifier.isPublic(method.getModifiers())) { try { return targetClass.getMethod(method.getName(), method.getParameterTypes()); } catch (NoSuchMethodException var3) { return method; } } Method specificMethod = ReflectionUtils.findMethod(targetClass, method.getName(), method.getParameterTypes()); return specificMethod != null ? specificMethod : method; } catch (SecurityException var4) { } } return method; } public static boolean isUserLevelMethod(Method method) { Assert.notNull(method, "Method must not be null"); return method.isBridge() || !method.isSynthetic() && !isGroovyObjectMethod(method); } private static boolean isGroovyObjectMethod(Method method) { return method.getDeclaringClass().getName().equals("groovy.lang.GroovyObject"); } private static boolean isOverridable(Method method, Class targetClass) { if (Modifier.isPrivate(method.getModifiers())) { return false; } else { return !Modifier.isPublic(method.getModifiers()) && !Modifier.isProtected(method.getModifiers()) ? getPackageName(method.getDeclaringClass()).equals(getPackageName(targetClass)) : true; } } public static Method getStaticMethod(Class clazz, String methodName, Class... args) { Assert.notNull(clazz, "Class must not be null"); Assert.notNull(methodName, "Method name must not be null"); try { Method method = clazz.getMethod(methodName, args); return Modifier.isStatic(method.getModifiers()) ? method : null; } catch (NoSuchMethodException var4) { return null; } } public static boolean isPrimitiveWrapper(Class clazz) { Assert.notNull(clazz, "Class must not be null"); return primitiveWrapperTypeMap.containsKey(clazz); } public static boolean isPrimitiveOrWrapper(Class clazz) { Assert.notNull(clazz, "Class must not be null"); return clazz.isPrimitive() || isPrimitiveWrapper(clazz); } public static boolean isPrimitiveArray(Class clazz) { Assert.notNull(clazz, "Class must not be null"); return clazz.isArray() && clazz.getComponentType().isPrimitive(); } public static boolean isPrimitiveWrapperArray(Class clazz) { Assert.notNull(clazz, "Class must not be null"); return clazz.isArray() && isPrimitiveWrapper(clazz.getComponentType()); } public static Class resolvePrimitiveIfNecessary(Class clazz) { Assert.notNull(clazz, "Class must not be null"); return clazz.isPrimitive() && clazz != Void.TYPE ? (Class)primitiveTypeToWrapperMap.get(clazz) : clazz; } public static boolean isAssignable( Class lhsType, Class rhsType ) { Assert.notNull(lhsType, "Left-hand side type must not be null"); Assert.notNull(rhsType, "Right-hand side type must not be null"); if ( lhsType.isAssignableFrom(rhsType) ) { return true; } else { Class resolvedPrimitive; if ( lhsType.isPrimitive() ) { resolvedPrimitive = (Class)primitiveWrapperTypeMap.get( rhsType ); if ( lhsType.equals( resolvedPrimitive ) ) { return true; } } else { resolvedPrimitive = (Class)primitiveTypeToWrapperMap.get( rhsType ); if ( resolvedPrimitive != null && lhsType.isAssignableFrom( resolvedPrimitive ) ) { return true; } } return false; } } public static boolean isAssignableValue(Class type, Object value) { Assert.notNull(type, "Type must not be null"); return value != null ? isAssignable(type, value.getClass()) : !type.isPrimitive(); } public static String convertResourcePathToClassName(String resourcePath) { Assert.notNull(resourcePath, "Resource path must not be null"); return resourcePath.replace('/', '.'); } public static String convertClassNameToResourcePath(String className) { Assert.notNull(className, "Class name must not be null"); return className.replace('.', '/'); } public static String addResourcePathToPackagePath(Class clazz, String resourceName) { Assert.notNull(resourceName, "Resource name must not be null"); return !resourceName.startsWith("/") ? classPackageAsResourcePath(clazz) + "/" + resourceName : classPackageAsResourcePath(clazz) + resourceName; } public static String classPackageAsResourcePath(Class clazz) { if (clazz == null) { return ""; } else { String className = clazz.getName(); int packageEndIndex = className.lastIndexOf(46); if (packageEndIndex == -1) { return ""; } else { String packageName = className.substring(0, packageEndIndex); return packageName.replace('.', '/'); } } } public static String classNamesToString(Class... classes) { return classNamesToString((Collection)Arrays.asList(classes)); } public static String classNamesToString(Collection> classes) { if (CollectionUtils.isEmpty(classes)) { return "[]"; } else { StringBuilder sb = new StringBuilder("["); Iterator it = classes.iterator(); while(it.hasNext()) { Class clazz = (Class)it.next(); sb.append(clazz.getName()); if (it.hasNext()) { sb.append(", "); } } sb.append("]"); return sb.toString(); } } public static Class[] toClassArray(Collection> collection) { return collection == null ? null : (Class[])collection.toArray(new Class[collection.size()]); } public static Class[] getAllInterfaces(Object instance) { Assert.notNull(instance, "Instance must not be null"); return getAllInterfacesForClass(instance.getClass()); } public static Class[] getAllInterfacesForClass(Class clazz) { return getAllInterfacesForClass(clazz, (ClassLoader)null); } public static Class[] getAllInterfacesForClass(Class clazz, ClassLoader classLoader) { Set> ifcs = getAllInterfacesForClassAsSet(clazz, classLoader); return (Class[])ifcs.toArray(new Class[ifcs.size()]); } public static Set> getAllInterfacesAsSet(Object instance) { Assert.notNull(instance, "Instance must not be null"); return getAllInterfacesForClassAsSet(instance.getClass()); } public static Set> getAllInterfacesForClassAsSet(Class clazz) { return getAllInterfacesForClassAsSet(clazz, (ClassLoader)null); } public static Set> getAllInterfacesForClassAsSet(Class clazz, ClassLoader classLoader) { Assert.notNull(clazz, "Class must not be null"); if (clazz.isInterface() && isVisible(clazz, classLoader)) { return Collections.singleton(clazz); } else { LinkedHashSet interfaces; for(interfaces = new LinkedHashSet(); clazz != null; clazz = clazz.getSuperclass()) { Class[] ifcs = clazz.getInterfaces(); Class[] var4 = ifcs; int var5 = ifcs.length; for(int var6 = 0; var6 < var5; ++var6) { Class ifc = var4[var6]; interfaces.addAll(getAllInterfacesForClassAsSet(ifc, classLoader)); } } return interfaces; } } public static Class createCompositeInterface(Class[] interfaces, ClassLoader classLoader) { Assert.notEmpty(interfaces, "Interfaces must not be empty"); Assert.notNull(classLoader, "ClassLoader must not be null"); return Proxy.getProxyClass(classLoader, interfaces); } public static Class determineCommonAncestor(Class clazz1, Class clazz2) { if (clazz1 == null) { return clazz2; } else if (clazz2 == null) { return clazz1; } else if (clazz1.isAssignableFrom(clazz2)) { return clazz1; } else if (clazz2.isAssignableFrom(clazz1)) { return clazz2; } else { Class ancestor = clazz1; do { ancestor = ancestor.getSuperclass(); if (ancestor == null || Object.class.equals(ancestor)) { return null; } } while(!ancestor.isAssignableFrom(clazz2)); return ancestor; } } public static boolean isVisible(Class clazz, ClassLoader classLoader) { if (classLoader == null) { return true; } else { try { Class actualClass = classLoader.loadClass(clazz.getName()); return clazz == actualClass; } catch (ClassNotFoundException var3) { return false; } } } public static boolean isCglibProxy(Object object) { return isCglibProxyClass(object.getClass()); } public static boolean isCglibProxyClass(Class clazz) { return clazz != null && isCglibProxyClassName(clazz.getName()); } public static boolean isCglibProxyClassName(String className) { return className != null && className.contains("$$"); } static { primitiveWrapperTypeMap.put(Boolean.class, Boolean.TYPE); primitiveWrapperTypeMap.put(Byte.class, Byte.TYPE); primitiveWrapperTypeMap.put(Character.class, Character.TYPE); primitiveWrapperTypeMap.put(Double.class, Double.TYPE); primitiveWrapperTypeMap.put(Float.class, Float.TYPE); primitiveWrapperTypeMap.put(Integer.class, Integer.TYPE); primitiveWrapperTypeMap.put(Long.class, Long.TYPE); primitiveWrapperTypeMap.put(Short.class, Short.TYPE); Iterator var0 = primitiveWrapperTypeMap.entrySet().iterator(); while(var0.hasNext()) { Entry, Class> entry = (Entry)var0.next(); primitiveTypeToWrapperMap.put(entry.getValue(), entry.getKey()); registerCommonClasses((Class)entry.getKey()); } Set> primitiveTypes = new HashSet<>(32); primitiveTypes.addAll(primitiveWrapperTypeMap.values()); primitiveTypes.addAll(Arrays.asList(boolean[].class, byte[].class, char[].class, double[].class, float[].class, int[].class, long[].class, short[].class)); primitiveTypes.add(Void.TYPE); Iterator var4 = primitiveTypes.iterator(); while(var4.hasNext()) { Class primitiveType = (Class)var4.next(); primitiveTypeNameMap.put(primitiveType.getName(), primitiveType); } registerCommonClasses(Boolean[].class, Byte[].class, Character[].class, Double[].class, Float[].class, Integer[].class, Long[].class, Short[].class); registerCommonClasses(Number.class, Number[].class, String.class, String[].class, Object.class, Object[].class, Class.class, Class[].class); registerCommonClasses(Throwable.class, Exception.class, RuntimeException.class, Error.class, StackTraceElement.class, StackTraceElement[].class); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/CollectionUtils.java ================================================ package com.pinecone.framework.util; import com.pinecone.framework.system.Unsafe; import com.pinecone.framework.unit.AbstractMultiValueMap; import com.pinecone.framework.unit.MultiValueMap; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Enumeration; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.Map.Entry; @SuppressWarnings( "unchecked" ) public abstract class CollectionUtils { public CollectionUtils() { } public static boolean isEmpty( Collection collection ) { return collection == null || collection.isEmpty(); } public static boolean isNoneEmpty( Collection collection ) { return !CollectionUtils.isEmpty( collection ); } public static boolean isEmpty( Map map ) { return map == null || map.isEmpty(); } public static boolean isNoneEmpty( Map map ) { return !CollectionUtils.isEmpty( map ); } public static List arrayToList(Object source) { return Arrays.asList(ObjectUtils.toObjectArray(source)); } public static void mergeArrayIntoCollection(Object array, Collection collection) { if (collection == null) { throw new IllegalArgumentException("Collection must not be null"); } else { Object[] arr = ObjectUtils.toObjectArray(array); Object[] var3 = arr; int var4 = arr.length; for(int var5 = 0; var5 < var4; ++var5) { Object elem = var3[var5]; collection.add((E) elem); } } } public static void mergePropertiesIntoMap(Properties props, Map map) { if (map == null) { throw new IllegalArgumentException("Map must not be null"); } else { String key; Object value; if (props != null) { for(Enumeration en = props.propertyNames(); en.hasMoreElements(); map.put(key, value)) { key = (String)en.nextElement(); value = props.getProperty(key); if (value == null) { value = props.get(key); } } } } } public static boolean contains(Iterator iterator, Object element) { if (iterator != null) { while(iterator.hasNext()) { Object candidate = iterator.next(); if (ObjectUtils.nullSafeEquals(candidate, element)) { return true; } } } return false; } public static boolean contains(Enumeration enumeration, Object element) { if (enumeration != null) { while(enumeration.hasMoreElements()) { Object candidate = enumeration.nextElement(); if (ObjectUtils.nullSafeEquals(candidate, element)) { return true; } } } return false; } public static boolean containsInstance(Collection collection, Object element) { if (collection != null) { Iterator var2 = collection.iterator(); while(var2.hasNext()) { Object candidate = var2.next(); if (candidate == element) { return true; } } } return false; } public static boolean containsAny(Collection source, Collection candidates) { if (!isEmpty(source) && !isEmpty(candidates)) { Iterator var2 = candidates.iterator(); Object candidate; do { if (!var2.hasNext()) { return false; } candidate = var2.next(); } while(!source.contains(candidate)); return true; } else { return false; } } public static E findFirstMatch(Collection source, Collection candidates) { if (!isEmpty(source) && !isEmpty(candidates)) { Iterator var2 = candidates.iterator(); Object candidate; do { if (!var2.hasNext()) { return null; } candidate = var2.next(); } while(!source.contains(candidate)); return (E) candidate; } else { return null; } } public static T findValueOfType(Collection collection, Class type) { if (isEmpty(collection)) { return null; } else { T value = null; Iterator var3 = collection.iterator(); while(true) { Object element; do { if (!var3.hasNext()) { return value; } element = var3.next(); } while(type != null && !type.isInstance(element)); if (value != null) { return null; } value = (T) element; } } } public static Object findValueOfType(Collection collection, Class[] types) { if (!isEmpty(collection) && !ObjectUtils.isEmpty(types)) { Class[] var2 = types; int var3 = types.length; for(int var4 = 0; var4 < var3; ++var4) { Class type = var2[var4]; Object value = findValueOfType(collection, type); if (value != null) { return value; } } return null; } else { return null; } } public static boolean hasUniqueObject(Collection collection) { if (isEmpty(collection)) { return false; } else { boolean hasCandidate = false; Object candidate = null; Iterator var3 = collection.iterator(); while(var3.hasNext()) { Object elem = var3.next(); if (!hasCandidate) { hasCandidate = true; candidate = elem; } else if (candidate != elem) { return false; } } return true; } } public static Class findCommonElementType(Collection collection) { if (isEmpty(collection)) { return null; } else { Class candidate = null; Iterator var2 = collection.iterator(); while(var2.hasNext()) { Object val = var2.next(); if (val != null) { if (candidate == null) { candidate = val.getClass(); } else if (candidate != val.getClass()) { return null; } } } return candidate; } } public static A[] toArray(Enumeration enumeration, A[] array) { ArrayList elements = new ArrayList(); while(enumeration.hasMoreElements()) { elements.add(enumeration.nextElement()); } return (A[]) elements.toArray(array); } public static Iterator toIterator(Enumeration enumeration) { return new CollectionUtils.EnumerationIterator(enumeration); } public static MultiValueMap toMultiValueMap(Map> map) { return new CollectionUtils.MultiValueMapAdapter(map); } public static MultiValueMap unmodifiableMultiValueMap(MultiValueMap map) { Assert.notNull(map, "'map' must not be null"); Map> result = new LinkedHashMap(map.size()); Iterator var2 = map.entrySet().iterator(); while(var2.hasNext()) { Entry> entry = (Entry)var2.next(); List values = Collections.unmodifiableList((List)entry.getValue()); result.put(entry.getKey(), values); } Map> unmodifiableMap = Collections.unmodifiableMap(result); return toMultiValueMap(unmodifiableMap); } private static class MultiValueMapAdapter extends AbstractMultiValueMap implements MultiValueMap, Serializable { private final Map> map; public MultiValueMapAdapter(Map> map) { Assert.notNull(map, "'map' must not be null"); this.map = map; } public V add( K key, V value ) { List values = (List)this.map.get(key); if (values == null) { values = new LinkedList(); this.map.put(key, values); } ( (List) values ).add( value ); return value; } public V getFirst(K key) { List values = (List)this.map.get(key); return values != null ? values.get(0) : null; } public V set( K key, V value ) { List values = new LinkedList(); values.add(value); this.map.put( key, values ); return value; } public void setAll(Map values) { Iterator var2 = values.entrySet().iterator(); while(var2.hasNext()) { Entry entry = (Entry)var2.next(); this.set(entry.getKey(), entry.getValue()); } } public Map toSingleValueMap() { LinkedHashMap singleValueMap = new LinkedHashMap(this.map.size()); Iterator var2 = this.map.entrySet().iterator(); while(var2.hasNext()) { Entry> entry = (Entry)var2.next(); singleValueMap.put(entry.getKey(), (V) ((List)entry.getValue()).get(0)); } return singleValueMap; } public int size() { return this.map.size(); } public boolean isEmpty() { return this.map.isEmpty(); } public boolean containsKey(Object key) { return this.map.containsKey(key); } public boolean containsValue(Object value) { return this.map.containsValue(value); } public List get(Object key) { return (List)this.map.get(key); } public List put(K key, List value) { return (List)this.map.put(key, value); } public List remove(Object key) { return (List)this.map.remove(key); } public void putAll(Map> m) { this.map.putAll(m); } public void clear() { this.map.clear(); } public Set keySet() { return this.map.keySet(); } public Collection> values() { return this.map.values(); } public Set>> entrySet() { return this.map.entrySet(); } public boolean equals(Object other) { return this == other ? true : this.map.equals(other); } public int hashCode() { return this.map.hashCode(); } public String toString() { return this.map.toString(); } } private static class EnumerationIterator implements Iterator { private Enumeration enumeration; public EnumerationIterator(Enumeration enumeration) { this.enumeration = enumeration; } public boolean hasNext() { return this.enumeration.hasMoreElements(); } public E next() { return this.enumeration.nextElement(); } public void remove() throws UnsupportedOperationException { throw new UnsupportedOperationException("Not supported"); } } @Unsafe public static List genericConvert( List list ) { return (List) list; } @Unsafe public static Collection genericConvert( Collection collection ) { return (Collection) collection; } @Unsafe public static Map genericConvert( Map map ) { return (Map) map; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/CursorParser.java ================================================ package com.pinecone.framework.util; import com.pinecone.framework.system.ParseException; import com.pinecone.framework.system.prototype.Pinenut; public interface CursorParser extends Pinenut { void back() throws ParseException; char next() throws ParseException; String next( int n ) throws ParseException; Object nextValue() throws ParseException ; Object nextValue( Object indexKey, Object parent, Object[] args ) throws ParseException ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/Debug.java ================================================ package com.pinecone.framework.util; import java.util.concurrent.atomic.AtomicInteger; import com.pinecone.framework.system.InstantKillError; import com.pinecone.framework.util.io.Tracer; import com.pinecone.framework.util.io.Tracerson; import com.pinecone.framework.util.json.JSON; public class Debug { private final static Tracer console = new Tracerson(); public static Tracer console() { return Debug.console; } public static Tracer probe(){ System.err.println("\n\rFuck is here !\n\r"); return Debug.console; } public static Tracer fmt( int nIndentFactor, Object Anything, Object...objects ){ Debug.console.getOut().print( JSON.stringify( Anything, nIndentFactor ) ); for ( Object row : objects ) { Debug.console.getOut().print( JSON.stringify( row, nIndentFactor ) ); } return Debug.console; } public static Tracer fmp( int nIndentFactor, Object Anything, Object...objects ){ Debug.console.getOut().print( JSON.stringify( JSON.parse( JSON.stringify( Anything ) ), nIndentFactor ) ); for ( Object row : objects ) { Debug.console.getOut().print( JSON.stringify( JSON.parse( JSON.stringify( row ) ), nIndentFactor ) ); } return Debug.console; } public static Tracer trace( Object Anything, Object...objects ){ return Debug.console.log( Anything, objects ); } public synchronized static Tracer traceSyn( Object Anything, Object...objects ){ return Debug.console.log( Anything, objects ); } public static Tracer info ( Object Anything, Object...objects ){ return Debug.console.info( Anything, objects ); } public synchronized static Tracer infoSyn( Object Anything, Object...objects ){ return Debug.console.info( Anything, objects ); } public static Tracer warn ( Object Anything, Object...objects ){ return Debug.console.warn( Anything, objects ); } public synchronized static Tracer warnSyn( Object Anything, Object...objects ){ return Debug.console.warn( Anything, objects ); } public static Tracer colorf( int colorCode, Object Anything, Object...objects ){ return Debug.console.colorf( colorCode, Anything, objects ); } public static Tracer purplef( Object Anything, Object...objects ){ return Debug.console.colorf( 35, Anything, objects ); } public synchronized static Tracer purplefs( Object Anything, Object...objects ){ return Debug.purplef( Anything, objects ); } public static Tracer redf( Object Anything, Object...objects ){ return Debug.console.colorf( 31, Anything, objects ); } public synchronized static Tracer redfs( Object Anything, Object...objects ){ return Debug.redf( Anything, objects ); } public static Tracer greenf( Object Anything, Object...objects ){ return Debug.console.colorf( 32, Anything, objects ); } public synchronized static Tracer greenfs( Object Anything, Object...objects ){ return Debug.greenf( Anything, objects ); } public static Tracer bluef( Object Anything, Object...objects ){ return Debug.console.colorf( 34, Anything, objects ); } public synchronized static Tracer bluefs( Object Anything, Object...objects ){ return Debug.bluef( Anything, objects ); } public static Tracer whitef( Object Anything, Object...objects ){ return Debug.console.colorf( 30, Anything, objects ); } public synchronized static Tracer whitefs( Object Anything, Object...objects ){ return Debug.whitef( Anything, objects ); } public static Tracer hhf(){ Debug.console.getOut().println(); return Debug.console; } public static Tracer echo( Object data, Object...objects ) { return Debug.console.echo( data, objects ); } public static Tracer cerr( Object data, Object...objects ) { return Debug.console.cerr( data, objects ); } public synchronized static Tracer cerrSyn( Object data, Object...objects ) { return Debug.console.cerr( data, objects ); } public static void sleep( long millis ) { try { Thread.sleep( millis ); } catch ( InterruptedException e ) { Debug.cerr( e.getMessage() ); } } public static void stop() { throw new InstantKillError( "Invoked at [ Debug::stop() ]" ); } public static void exit() { System.exit( -666 ); } private static AtomicInteger InvokeCounts = new AtomicInteger(); public static long invokeCounts() { return Debug.InvokeCounts.getAndIncrement(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/GeneralStrings.java ================================================ package com.pinecone.framework.util; public abstract class GeneralStrings { public static boolean transferCharParse ( char c, CursorParser cursor, StringBuilder sb ) { switch ( c ) { case '"': case '\'': case '/': case '\\': { sb.append(c); return true; } case 'b': { sb.append('\b'); return true; } case 'f': { sb.append('\f'); return true; } case 'n': sb.append('\n'); return true; case 'r': { sb.append('\r'); return true; } case 't': { sb.append('\t'); return true; } case 'x': { // Notice: It is seem JSON not supported '\xFF' format in JSON standard, but who care. sb.append( (char) Integer.parseInt(cursor.next( 2), 16) ); return true; } case 'u': { sb.append((char) Integer.parseInt(cursor.next( 4), 16)); return true; } default: { // Notice: In Pinecone CPP will keep the illegal escape, you can modified as strict [JSON standard NOT allowed]. sb.append( '\\' ); //throw this.syntaxError( "Error parser json string with illegal escape." ); } } return false; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/OSIdentifier.java ================================================ package com.pinecone.framework.util; public class OSIdentifier { private static String OS = System.getProperty("os.name").toLowerCase(); private static OSIdentifier OSIInstance = new OSIdentifier(); private Platform platform; private OSIdentifier(){} public static boolean isLinux(){ return OS.contains("linux"); } public static boolean isMacOS(){ return OS.contains("mac") &&OS.indexOf("os")>0&& !OS.contains("x"); } public static boolean isMacOSX(){ return OS.contains("mac") &&OS.indexOf("os")>0&&OS.indexOf("x")>0; } public static boolean isWindows(){ return OS.contains("windows"); } public static boolean isOS2(){ return OS.contains("os/2"); } public static boolean isSolaris(){ return OS.contains("solaris"); } public static boolean isSunOS(){ return OS.contains("sunos"); } public static boolean isMPEiX(){ return OS.contains("mpe/ix"); } public static boolean isHPUX(){ return OS.contains("hp-ux"); } public static boolean isAix(){ return OS.contains("aix"); } public static boolean isOS390(){ return OS.contains("os/390"); } public static boolean isFreeBSD(){ return OS.contains("freebsd"); } public static boolean isIrix(){ return OS.contains("irix"); } public static boolean isDigitalUnix(){ return OS.contains("digital") &&OS.indexOf("unix")>0; } public static boolean isNetWare(){ return OS.contains("netware"); } public static boolean isOSF1(){ return OS.contains("osf1"); } public static boolean isOpenVMS(){ return OS.contains("openvms"); } public static Platform getOSname(){ if(isAix()){ OSIdentifier.OSIInstance.platform = Platform.AIX; }else if (isDigitalUnix()) { OSIdentifier.OSIInstance.platform = Platform.Digital_Unix; }else if (isFreeBSD()) { OSIdentifier.OSIInstance.platform = Platform.FreeBSD; }else if (isHPUX()) { OSIdentifier.OSIInstance.platform = Platform.HP_UX; }else if (isIrix()) { OSIdentifier.OSIInstance.platform = Platform.Irix; }else if (isLinux()) { OSIdentifier.OSIInstance.platform = Platform.Linux; }else if (isMacOS()) { OSIdentifier.OSIInstance.platform = Platform.Mac_OS; }else if (isMacOSX()) { OSIdentifier.OSIInstance.platform = Platform.Mac_OS_X; }else if (isMPEiX()) { OSIdentifier.OSIInstance.platform = Platform.MPEiX; }else if (isNetWare()) { OSIdentifier.OSIInstance.platform = Platform.NetWare_411; }else if (isOpenVMS()) { OSIdentifier.OSIInstance.platform = Platform.OpenVMS; }else if (isOS2()) { OSIdentifier.OSIInstance.platform = Platform.OS2; }else if (isOS390()) { OSIdentifier.OSIInstance.platform = Platform.OS390; }else if (isOSF1()) { OSIdentifier.OSIInstance.platform = Platform.OSF1; }else if (isSolaris()) { OSIdentifier.OSIInstance.platform = Platform.Solaris; }else if (isSunOS()) { OSIdentifier.OSIInstance.platform = Platform.SunOS; }else if (isWindows()) { OSIdentifier.OSIInstance.platform = Platform.Windows; }else{ OSIdentifier.OSIInstance.platform = Platform.Others; } return OSIdentifier.OSIInstance.platform; } public enum Platform { Any("any"), Linux("Linux"), Mac_OS("Mac OS"), Mac_OS_X("Mac OS X"), Windows("Windows"), OS2("OS/2"), Solaris("Solaris"), SunOS("SunOS"), MPEiX("MPE/iX"), HP_UX("HP-UX"), AIX("AIX"), OS390("OS/390"), FreeBSD("FreeBSD"), Irix("Irix"), Digital_Unix("Digital Unix"), NetWare_411("NetWare"), OSF1("OSF1"), OpenVMS("OpenVMS"), Others("Others"); private String description; private Platform(String desc) { this.description = desc; } public String toString() { return this.description; } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/ObjectUtils.java ================================================ package com.pinecone.framework.util; import java.lang.reflect.Array; import java.util.Arrays; public abstract class ObjectUtils { private static final int INITIAL_HASH = 7; private static final int MULTIPLIER = 31; private static final String EMPTY_STRING = ""; private static final String NULL_STRING = "null"; private static final String ARRAY_START = "{"; private static final String ARRAY_END = "}"; private static final String EMPTY_ARRAY = "{}"; private static final String ARRAY_ELEMENT_SEPARATOR = ", "; public ObjectUtils() { } public static boolean isCheckedException(Throwable ex) { return !(ex instanceof RuntimeException) && !(ex instanceof Error); } public static boolean isCompatibleWithThrowsClause(Throwable ex, Class... declaredExceptions) { if (!isCheckedException(ex)) { return true; } else { if (declaredExceptions != null) { Class[] var2 = declaredExceptions; int var3 = declaredExceptions.length; for(int var4 = 0; var4 < var3; ++var4) { Class declaredException = var2[var4]; if (declaredException.isInstance(ex)) { return true; } } } return false; } } public static boolean isArray(Object obj) { return obj != null && obj.getClass().isArray(); } public static boolean isEmpty(Object[] array) { return array == null || array.length == 0; } public static boolean containsElement(Object[] array, Object element) { if (array == null) { return false; } else { Object[] var2 = array; int var3 = array.length; for(int var4 = 0; var4 < var3; ++var4) { Object arrayEle = var2[var4]; if (nullSafeEquals(arrayEle, element)) { return true; } } return false; } } public static boolean containsConstant(Enum[] enumValues, String constant) { return containsConstant(enumValues, constant, false); } public static boolean containsConstant(Enum[] enumValues, String constant, boolean caseSensitive) { Enum[] var3 = enumValues; int var4 = enumValues.length; int var5 = 0; while(true) { if (var5 >= var4) { return false; } Enum candidate = var3[var5]; if (caseSensitive) { if (candidate.toString().equals(constant)) { break; } } else if (candidate.toString().equalsIgnoreCase(constant)) { break; } ++var5; } return true; } public static > E caseInsensitiveValueOf(E[] enumValues, String constant) { Enum[] var2 = enumValues; int var3 = enumValues.length; for(int var4 = 0; var4 < var3; ++var4) { E candidate = (E) var2[var4]; if (candidate.toString().equalsIgnoreCase(constant)) { return candidate; } } throw new IllegalArgumentException(String.format("constant [%s] does not exist in enum type %s", constant, enumValues.getClass().getComponentType().getName())); } public static A[] addObjectToArray(A[] array, O obj) { Class compType = Object.class; if (array != null) { compType = array.getClass().getComponentType(); } else if (obj != null) { compType = obj.getClass(); } int newArrLength = array != null ? array.length + 1 : 1; A[] newArr = (A[])((Object[])Array.newInstance(compType, newArrLength)); if (array != null) { System.arraycopy(array, 0, newArr, 0, array.length); } newArr[newArr.length - 1] = obj; return newArr; } public static Object[] toObjectArray(Object source) { if (source instanceof Object[]) { return (Object[])((Object[])source); } else if (source == null) { return new Object[0]; } else if (!source.getClass().isArray()) { throw new IllegalArgumentException("Source is not an array: " + source); } else { int length = Array.getLength(source); if (length == 0) { return new Object[0]; } else { Class wrapperType = Array.get(source, 0).getClass(); Object[] newArray = (Object[])((Object[])Array.newInstance(wrapperType, length)); for(int i = 0; i < length; ++i) { newArray[i] = Array.get(source, i); } return newArray; } } } public static boolean nullSafeEquals(Object o1, Object o2) { if (o1 == o2) { return true; } else if (o1 != null && o2 != null) { if (o1.equals(o2)) { return true; } else { if (o1.getClass().isArray() && o2.getClass().isArray()) { if (o1 instanceof Object[] && o2 instanceof Object[]) { return Arrays.equals((Object[])((Object[])o1), (Object[])((Object[])o2)); } if (o1 instanceof boolean[] && o2 instanceof boolean[]) { return Arrays.equals((boolean[])((boolean[])o1), (boolean[])((boolean[])o2)); } if (o1 instanceof byte[] && o2 instanceof byte[]) { return Arrays.equals((byte[])((byte[])o1), (byte[])((byte[])o2)); } if (o1 instanceof char[] && o2 instanceof char[]) { return Arrays.equals((char[])((char[])o1), (char[])((char[])o2)); } if (o1 instanceof double[] && o2 instanceof double[]) { return Arrays.equals((double[])((double[])o1), (double[])((double[])o2)); } if (o1 instanceof float[] && o2 instanceof float[]) { return Arrays.equals((float[])((float[])o1), (float[])((float[])o2)); } if (o1 instanceof int[] && o2 instanceof int[]) { return Arrays.equals((int[])((int[])o1), (int[])((int[])o2)); } if (o1 instanceof long[] && o2 instanceof long[]) { return Arrays.equals((long[])((long[])o1), (long[])((long[])o2)); } if (o1 instanceof short[] && o2 instanceof short[]) { return Arrays.equals((short[])((short[])o1), (short[])((short[])o2)); } } return false; } } else { return false; } } public static int nullSafeHashCode(Object obj) { if (obj == null) { return 0; } else { if (obj.getClass().isArray()) { if (obj instanceof Object[]) { return nullSafeHashCode((Object[])((Object[])obj)); } if (obj instanceof boolean[]) { return nullSafeHashCode((boolean[])((boolean[])obj)); } if (obj instanceof byte[]) { return nullSafeHashCode((byte[])((byte[])obj)); } if (obj instanceof char[]) { return nullSafeHashCode((char[])((char[])obj)); } if (obj instanceof double[]) { return nullSafeHashCode((double[])((double[])obj)); } if (obj instanceof float[]) { return nullSafeHashCode((float[])((float[])obj)); } if (obj instanceof int[]) { return nullSafeHashCode((int[])((int[])obj)); } if (obj instanceof long[]) { return nullSafeHashCode((long[])((long[])obj)); } if (obj instanceof short[]) { return nullSafeHashCode((short[])((short[])obj)); } } return obj.hashCode(); } } public static int nullSafeHashCode(Object[] array) { if (array == null) { return 0; } else { int hash = 7; Object[] var2 = array; int var3 = array.length; for(int var4 = 0; var4 < var3; ++var4) { Object element = var2[var4]; hash = 31 * hash + nullSafeHashCode(element); } return hash; } } public static int nullSafeHashCode(boolean[] array) { if (array == null) { return 0; } else { int hash = 7; boolean[] var2 = array; int var3 = array.length; for(int var4 = 0; var4 < var3; ++var4) { boolean element = var2[var4]; hash = 31 * hash + hashCode(element); } return hash; } } public static int nullSafeHashCode(byte[] array) { if (array == null) { return 0; } else { int hash = 7; byte[] var2 = array; int var3 = array.length; for(int var4 = 0; var4 < var3; ++var4) { byte element = var2[var4]; hash = 31 * hash + element; } return hash; } } public static int nullSafeHashCode(char[] array) { if (array == null) { return 0; } else { int hash = 7; char[] var2 = array; int var3 = array.length; for(int var4 = 0; var4 < var3; ++var4) { char element = var2[var4]; hash = 31 * hash + element; } return hash; } } public static int nullSafeHashCode(double[] array) { if (array == null) { return 0; } else { int hash = 7; double[] var2 = array; int var3 = array.length; for(int var4 = 0; var4 < var3; ++var4) { double element = var2[var4]; hash = 31 * hash + hashCode(element); } return hash; } } public static int nullSafeHashCode(float[] array) { if (array == null) { return 0; } else { int hash = 7; float[] var2 = array; int var3 = array.length; for(int var4 = 0; var4 < var3; ++var4) { float element = var2[var4]; hash = 31 * hash + hashCode(element); } return hash; } } public static int nullSafeHashCode(int[] array) { if (array == null) { return 0; } else { int hash = 7; int[] var2 = array; int var3 = array.length; for(int var4 = 0; var4 < var3; ++var4) { int element = var2[var4]; hash = 31 * hash + element; } return hash; } } public static int nullSafeHashCode(long[] array) { if (array == null) { return 0; } else { int hash = 7; long[] var2 = array; int var3 = array.length; for(int var4 = 0; var4 < var3; ++var4) { long element = var2[var4]; hash = 31 * hash + hashCode(element); } return hash; } } public static int nullSafeHashCode(short[] array) { if (array == null) { return 0; } else { int hash = 7; short[] var2 = array; int var3 = array.length; for(int var4 = 0; var4 < var3; ++var4) { short element = var2[var4]; hash = 31 * hash + element; } return hash; } } public static int hashCode(boolean bool) { return bool ? 1231 : 1237; } public static int hashCode(double dbl) { return hashCode(Double.doubleToLongBits(dbl)); } public static int hashCode(float flt) { return Float.floatToIntBits(flt); } public static int hashCode(long lng) { return (int)(lng ^ lng >>> 32); } public static String identityToString(Object obj) { return obj == null ? "" : obj.getClass().getName() + "@" + getIdentityHexString(obj); } public static String getIdentityHexString(Object obj) { return Integer.toHexString(System.identityHashCode(obj)); } public static String getDisplayString(Object obj) { return obj == null ? "" : nullSafeToString(obj); } public static String nullSafeClassName(Object obj) { return obj != null ? obj.getClass().getName() : "null"; } public static String nullSafeToString(Object obj) { if (obj == null) { return "null"; } else if (obj instanceof String) { return (String)obj; } else if (obj instanceof Object[]) { return nullSafeToString((Object[])((Object[])obj)); } else if (obj instanceof boolean[]) { return nullSafeToString((boolean[])((boolean[])obj)); } else if (obj instanceof byte[]) { return nullSafeToString((byte[])((byte[])obj)); } else if (obj instanceof char[]) { return nullSafeToString((char[])((char[])obj)); } else if (obj instanceof double[]) { return nullSafeToString((double[])((double[])obj)); } else if (obj instanceof float[]) { return nullSafeToString((float[])((float[])obj)); } else if (obj instanceof int[]) { return nullSafeToString((int[])((int[])obj)); } else if (obj instanceof long[]) { return nullSafeToString((long[])((long[])obj)); } else if (obj instanceof short[]) { return nullSafeToString((short[])((short[])obj)); } else { String str = obj.toString(); return str != null ? str : ""; } } public static String nullSafeToString(Object[] array) { if (array == null) { return "null"; } else { int length = array.length; if (length == 0) { return "{}"; } else { StringBuilder sb = new StringBuilder(); for(int i = 0; i < length; ++i) { if (i == 0) { sb.append("{"); } else { sb.append(", "); } sb.append(String.valueOf(array[i])); } sb.append("}"); return sb.toString(); } } } public static String nullSafeToString(boolean[] array) { if (array == null) { return "null"; } else { int length = array.length; if (length == 0) { return "{}"; } else { StringBuilder sb = new StringBuilder(); for(int i = 0; i < length; ++i) { if (i == 0) { sb.append("{"); } else { sb.append(", "); } sb.append(array[i]); } sb.append("}"); return sb.toString(); } } } public static String nullSafeToString(byte[] array) { if (array == null) { return "null"; } else { int length = array.length; if (length == 0) { return "{}"; } else { StringBuilder sb = new StringBuilder(); for(int i = 0; i < length; ++i) { if (i == 0) { sb.append("{"); } else { sb.append(", "); } sb.append(array[i]); } sb.append("}"); return sb.toString(); } } } public static String nullSafeToString(char[] array) { if (array == null) { return "null"; } else { int length = array.length; if (length == 0) { return "{}"; } else { StringBuilder sb = new StringBuilder(); for(int i = 0; i < length; ++i) { if (i == 0) { sb.append("{"); } else { sb.append(", "); } sb.append("'").append(array[i]).append("'"); } sb.append("}"); return sb.toString(); } } } public static String nullSafeToString(double[] array) { if (array == null) { return "null"; } else { int length = array.length; if (length == 0) { return "{}"; } else { StringBuilder sb = new StringBuilder(); for(int i = 0; i < length; ++i) { if (i == 0) { sb.append("{"); } else { sb.append(", "); } sb.append(array[i]); } sb.append("}"); return sb.toString(); } } } public static String nullSafeToString(float[] array) { if (array == null) { return "null"; } else { int length = array.length; if (length == 0) { return "{}"; } else { StringBuilder sb = new StringBuilder(); for(int i = 0; i < length; ++i) { if (i == 0) { sb.append("{"); } else { sb.append(", "); } sb.append(array[i]); } sb.append("}"); return sb.toString(); } } } public static String nullSafeToString(int[] array) { if (array == null) { return "null"; } else { int length = array.length; if (length == 0) { return "{}"; } else { StringBuilder sb = new StringBuilder(); for(int i = 0; i < length; ++i) { if (i == 0) { sb.append("{"); } else { sb.append(", "); } sb.append(array[i]); } sb.append("}"); return sb.toString(); } } } public static String nullSafeToString(long[] array) { if (array == null) { return "null"; } else { int length = array.length; if (length == 0) { return "{}"; } else { StringBuilder sb = new StringBuilder(); for(int i = 0; i < length; ++i) { if (i == 0) { sb.append("{"); } else { sb.append(", "); } sb.append(array[i]); } sb.append("}"); return sb.toString(); } } } public static String nullSafeToString(short[] array) { if (array == null) { return "null"; } else { int length = array.length; if (length == 0) { return "{}"; } else { StringBuilder sb = new StringBuilder(); for(int i = 0; i < length; ++i) { if (i == 0) { sb.append("{"); } else { sb.append(", "); } sb.append(array[i]); } sb.append("}"); return sb.toString(); } } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/PatternMatchUtils.java ================================================ package com.pinecone.framework.util; import com.pinecone.framework.system.Nullable; public abstract class PatternMatchUtils { public PatternMatchUtils() { } public static boolean simpleMatch(@Nullable String pattern, @Nullable String str) { if (pattern != null && str != null) { int firstIndex = pattern.indexOf(42); if (firstIndex == -1) { return pattern.equals(str); } else if (firstIndex == 0) { if (pattern.length() == 1) { return true; } else { int nextIndex = pattern.indexOf(42, 1); if (nextIndex == -1) { return str.endsWith(pattern.substring(1)); } else { String part = pattern.substring(1, nextIndex); if (part.isEmpty()) { return simpleMatch(pattern.substring(nextIndex), str); } else { for( int partIndex = str.indexOf(part); partIndex != -1; partIndex = str.indexOf(part, partIndex + 1) ) { if (simpleMatch(pattern.substring(nextIndex), str.substring(partIndex + part.length()))) { return true; } } return false; } } } } else { return str.length() >= firstIndex && pattern.substring(0, firstIndex).equals(str.substring(0, firstIndex)) && simpleMatch(pattern.substring(firstIndex), str.substring(firstIndex)); } } else { return false; } } public static boolean simpleMatch(@Nullable String[] patterns, String str) { if ( patterns != null ) { for( int i = 0; i < patterns.length; ++i ) { String pattern = patterns[i]; if ( simpleMatch(pattern, str) ) { return true; } } } return false; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/Randomium.java ================================================ package com.pinecone.framework.util; import com.pinecone.Pinecone; import com.pinecone.framework.system.prototype.Pinenut; import java.math.BigDecimal; import java.math.BigInteger; import java.math.RoundingMode; import java.util.Random; public class Randomium extends Random implements Pinenut { private static final String S_ALP_NUM_STRING_DICT = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"; public Randomium(){ super(); } public Randomium( long seed ){ super( seed ); } public int nextInteger ( int from, int to ) { if ( from > to ) { throw new IllegalArgumentException( "'from' cannot be greater than 'to'!" ); } return this.nextInt(to - from + 1) + from; } public long nextLong ( long from, long to ) { if ( from > to ) { throw new IllegalArgumentException( "'from' cannot be greater than 'to'!" ); } return from + (long) ( this.nextLong() * (to - from + 1) / (Long.MAX_VALUE + 1.0) ); } public short nextShort( short from, short to ) { if (from > to) { throw new IllegalArgumentException( "'from' cannot be greater than 'to'!" ); } return (short) ( this.nextInt(to - from + 1) + from ); } public byte nextByte( byte from, byte to ) { if ( from > to ) { throw new IllegalArgumentException( "'from' cannot be greater than 'to'!" ); } return (byte) ( this.nextInt(to - from + 1) + from ); } public char nextCharacter( char from, char to ) { if ( from > to ) { throw new IllegalArgumentException( "'from' char cannot be greater than 'to' char!" ); } return (char) ( this.nextInt(to - from + 1) + from ); } public float nextFloat32( float from, float to ) { if ( from > to ) { throw new IllegalArgumentException("'from' cannot be greater than 'to'!"); } return from + this.nextFloat() * (to - from); } public double nextFloat64( double from, double to ) { if ( from > to ) { throw new IllegalArgumentException("'from' cannot be greater than 'to'!"); } return from + this.nextDouble() * (to - from); } public BigDecimal nextBigDecimal( BigDecimal from, BigDecimal to, int scale ) { if ( from.compareTo(to) > 0 ) { throw new IllegalArgumentException("'from' cannot be greater than 'to'!"); } BigDecimal randomBigDecimal = from.add(new BigDecimal(this.nextDouble()).multiply(to.subtract(from))); return randomBigDecimal.setScale( scale, RoundingMode.HALF_UP ); } public BigInteger nextBigInteger( BigInteger from, BigInteger to ) { if ( from.compareTo(to) > 0 ) { throw new IllegalArgumentException( "'from' cannot be greater than 'to'!" ); } BigInteger range = to.subtract( from ).add( BigInteger.ONE ); // Calculate the range BigInteger randomNumber; do { randomNumber = new BigInteger(range.bitLength(), this); } while (randomNumber.compareTo(range) >= 0); return randomNumber.add( from ); } public String nextString( char from, char to, int scale ) { if( from > to ){ throw new IllegalArgumentException("'from' char can't beyond 'to' char !"); } if( scale > Pinecone.COMMON_ACCURACY_LIMIT ){ throw new ArithmeticException("Randomium scale is too big limit '" + Pinecone.COMMON_ACCURACY_LIMIT + "' !"); } String randomDict = Randomium.S_ALP_NUM_STRING_DICT ; int fromIndex = randomDict.indexOf(from), toIndex = randomDict.indexOf(to); StringBuilder sb = new StringBuilder(); for( int i=0; i < scale; ++i ){ sb.append(randomDict.charAt( this.nextInt(toIndex - fromIndex + 1) + fromIndex) ) ; } return sb.toString(); } public String nextString( int scale ){ return nextString('0','z',scale); } public String nextString(){ return nextString(10); } public String nextMBString( char from, char to, int scale ) { if ( from > to ) { throw new IllegalArgumentException( "'from' char can't be beyond 'to' char!" ); } if ( scale < 0 ) { throw new IllegalArgumentException( "Scale cannot be negative!" ); } StringBuilder sb = new StringBuilder(); for ( int i = 0; i < scale; ++i ) { sb.append((char) (this.nextInt(to - from + 1) + from)); } return sb.toString(); } public String nextMBString( int scale ){ return nextMBString('0','z',scale); } public String nextMBString(){ return nextMBString(10); } public double nextGaussian( double mean, double stddev ) { return mean + stddev * this.nextGaussian(); } public int nextPoisson( double lambda ) { double L = Math.exp(-lambda); int k = 0; double p = 1.0; do { k++; p *= this.nextDouble(); } while (p > L); return k - 1; } public double nextBias( double from, double to, double bias ) { double randomValue = this.nextDouble(); double biasedValue = Math.pow( randomValue, bias ); return from + (to - from) * biasedValue; } public static Randomium newInstance() { return new Randomium(); } public static Randomium newInstance( long seed ) { return new Randomium( seed ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/ReflectionUtils.java ================================================ package com.pinecone.framework.util; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.unit.ConcurrentReferenceHashMap; import java.beans.IntrospectionException; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.UndeclaredThrowableException; import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; public final class ReflectionUtils { private static final String CGLIB_RENAMED_METHOD_PREFIX = "CGLIB$"; private static final Map, Method[]> declaredMethodsCache = new ConcurrentReferenceHashMap(256); public static ReflectionUtils.FieldFilter COPYABLE_FIELDS = new ReflectionUtils.FieldFilter() { public boolean matches(Field field) { return !Modifier.isStatic(field.getModifiers()) && !Modifier.isFinal(field.getModifiers()); } }; public static ReflectionUtils.MethodFilter NON_BRIDGED_METHODS = new ReflectionUtils.MethodFilter() { public boolean matches(Method method) { return !method.isBridge(); } }; public static ReflectionUtils.MethodFilter USER_DECLARED_METHODS = new ReflectionUtils.MethodFilter() { public boolean matches(Method method) { return !method.isBridge() && method.getDeclaringClass() != Object.class; } }; public ReflectionUtils() { } public static Field findField(Class clazz, String name) { return findField(clazz, name, (Class)null); } public static Field findField(Class clazz, String name, Class type) { Assert.notNull(clazz, "Class must not be null"); Assert.isTrue(name != null || type != null, "Either name or type of the field must be specified"); for(Class searchType = clazz; !Object.class.equals(searchType) && searchType != null; searchType = searchType.getSuperclass()) { Field[] fields = searchType.getDeclaredFields(); Field[] t = fields; int len = fields.length; for(int i = 0; i < len; ++i) { Field field = t[i]; if ((name == null || name.equals(field.getName())) && (type == null || type.equals(field.getType()))) { return field; } } } return null; } public static void setField(Field field, Object target, Object value) { try { field.set(target, value); } catch (IllegalAccessException e) { handleReflectionException(e); throw new IllegalStateException("Unexpected reflection exception - " + e.getClass().getName() + ": " + e.getMessage()); } } public static Object getField(Field field, Object target) { try { return field.get(target); } catch (IllegalAccessException e) { handleReflectionException(e); throw new IllegalStateException("Unexpected reflection exception - " + e.getClass().getName() + ": " + e.getMessage()); } } public static Method findMethod(Class clazz, String name) { return findMethod(clazz, name); } public static Method findMethod(Class clazz, String name, Class... paramTypes) { Assert.notNull(clazz, "Class must not be null"); Assert.notNull(name, "Method name must not be null"); for(Class searchType = clazz; searchType != null; searchType = searchType.getSuperclass()) { Method[] methods = searchType.isInterface() ? searchType.getMethods() : getDeclaredMethods(searchType); Method[] t = methods; int len = methods.length; for( int i = 0; i < len; ++i ) { Method method = t[i]; if (name.equals(method.getName()) && (paramTypes == null || Arrays.equals(paramTypes, method.getParameterTypes()))) { return method; } } } return null; } public static Object invokeMethod(Method method, Object target) { return invokeMethod(method, target); } public static Object invokeMethod(Method method, Object target, Object... args) { try { return method.invoke(target, args); } catch (Exception e) { handleReflectionException(e); throw new IllegalStateException("Should never get here"); } } public static Object invokeJdbcMethod(Method method, Object target) throws SQLException { return invokeJdbcMethod(method, target); } public static Object invokeJdbcMethod(Method method, Object target, Object... args) throws SQLException { try { return method.invoke(target, args); } catch (IllegalAccessException e) { handleReflectionException(e); } catch (InvocationTargetException e1) { if (e1.getTargetException() instanceof SQLException) { throw (SQLException)e1.getTargetException(); } handleInvocationTargetException(e1); } throw new IllegalStateException("Should never get here"); } public static void handleReflectionException(Exception ex) { if (ex instanceof NoSuchMethodException) { throw new IllegalStateException("Method not found: " + ex.getMessage()); } else if (ex instanceof IllegalAccessException) { throw new IllegalStateException("Could not access method: " + ex.getMessage()); } else { if (ex instanceof InvocationTargetException) { handleInvocationTargetException((InvocationTargetException)ex); } if (ex instanceof RuntimeException) { throw (RuntimeException)ex; } else { throw new UndeclaredThrowableException(ex); } } } public static void handleInvocationTargetException(InvocationTargetException ex) { rethrowRuntimeException(ex.getTargetException()); } public static void rethrowRuntimeException(Throwable ex) { if (ex instanceof RuntimeException) { throw (RuntimeException)ex; } else if (ex instanceof Error) { throw (Error)ex; } else { throw new UndeclaredThrowableException(ex); } } public static void rethrowException(Throwable ex) throws Exception { if (ex instanceof Exception) { throw (Exception)ex; } else if (ex instanceof Error) { throw (Error)ex; } else { throw new UndeclaredThrowableException(ex); } } public static boolean declaresException(Method method, Class exceptionType) { Assert.notNull(method, "Method must not be null"); Class[] declaredExceptions = method.getExceptionTypes(); Class[] t = declaredExceptions; int length = declaredExceptions.length; for( int i = 0; i < length; ++i ) { Class declaredException = t[i]; if (declaredException.isAssignableFrom(exceptionType)) { return true; } } return false; } public static boolean isPublicStaticFinal(Field field) { int modifiers = field.getModifiers(); return Modifier.isPublic(modifiers) && Modifier.isStatic(modifiers) && Modifier.isFinal(modifiers); } public static boolean isEqualsMethod(Method method) { if (method != null && method.getName().equals("equals")) { Class[] paramTypes = method.getParameterTypes(); return paramTypes.length == 1 && paramTypes[0] == Object.class; } else { return false; } } public static boolean isHashCodeMethod(Method method) { return method != null && method.getName().equals("hashCode") && method.getParameterTypes().length == 0; } public static boolean isToStringMethod(Method method) { return method != null && method.getName().equals("toString") && method.getParameterTypes().length == 0; } public static boolean isObjectMethod(Method method) { if (method == null) { return false; } else { try { Object.class.getDeclaredMethod(method.getName(), method.getParameterTypes()); return true; } catch ( Exception e ) { return false; } } } public static boolean isCglibRenamedMethod(Method renamedMethod) { String name = renamedMethod.getName(); if (!name.startsWith("CGLIB$")) { return false; } else { int i; for(i = name.length() - 1; i >= 0 && Character.isDigit(name.charAt(i)); --i) { } return i > "CGLIB$".length() && i < name.length() - 1 && name.charAt(i) == '$'; } } public static void makeAccessible(Field field) { if ((!Modifier.isPublic(field.getModifiers()) || !Modifier.isPublic(field.getDeclaringClass().getModifiers()) || Modifier.isFinal(field.getModifiers())) && !field.isAccessible()) { field.setAccessible(true); } } public static void makeAccessible(Method method) { if ((!Modifier.isPublic(method.getModifiers()) || !Modifier.isPublic(method.getDeclaringClass().getModifiers())) && !method.isAccessible()) { method.setAccessible(true); } } public static void makeAccessible(Constructor ctor) { if ((!Modifier.isPublic(ctor.getModifiers()) || !Modifier.isPublic(ctor.getDeclaringClass().getModifiers())) && !ctor.isAccessible()) { ctor.setAccessible(true); } } public static Constructor accessibleConstructor(Class clazz, Class... parameterTypes) throws NoSuchMethodException { Constructor ctor = clazz.getDeclaredConstructor(parameterTypes); ReflectionUtils.makeAccessible(ctor); return ctor; } public static void doWithMethods(Class clazz, ReflectionUtils.MethodCallback mc) throws IllegalArgumentException { doWithMethods(clazz, mc, (ReflectionUtils.MethodFilter)null); } public static void doWithMethods(Class clazz, ReflectionUtils.MethodCallback mc, ReflectionUtils.MethodFilter mf) throws IllegalArgumentException { Method[] methods = getDeclaredMethods(clazz); Method[] t = methods; int length = methods.length; int i; for( i = 0; i < length; ++i ) { Method method = t[i]; if (mf == null || mf.matches(method)) { try { mc.doWith(method); } catch (IllegalAccessException e) { throw new IllegalStateException("Shouldn't be illegal to access method '" + method.getName() + "': " + e); } } } if (clazz.getSuperclass() != null) { doWithMethods(clazz.getSuperclass(), mc, mf); } else if (clazz.isInterface()) { Class[] interfaces = clazz.getInterfaces(); length = interfaces.length; for( i = 0; i < length; ++i ) { Class superIfc = interfaces[i]; doWithMethods(superIfc, mc, mf); } } } public static Method[] getAllDeclaredMethods(Class leafClass) throws IllegalArgumentException { final List methods = new ArrayList(32); doWithMethods(leafClass, new ReflectionUtils.MethodCallback() { public void doWith(Method method) { methods.add(method); } }); return (Method[])methods.toArray(new Method[methods.size()]); } public static Method[] getUniqueDeclaredMethods(Class leafClass) throws IllegalArgumentException { final List methods = new ArrayList(32); doWithMethods(leafClass, new ReflectionUtils.MethodCallback() { public void doWith(Method method) { boolean knownSignature = false; Method methodBeingOverriddenWithCovariantReturnType = null; Iterator iter = methods.iterator(); while(iter.hasNext()) { Method existingMethod = (Method)iter.next(); if (method.getName().equals(existingMethod.getName()) && Arrays.equals(method.getParameterTypes(), existingMethod.getParameterTypes())) { if (existingMethod.getReturnType() != method.getReturnType() && existingMethod.getReturnType().isAssignableFrom(method.getReturnType())) { methodBeingOverriddenWithCovariantReturnType = existingMethod; break; } knownSignature = true; break; } } if (methodBeingOverriddenWithCovariantReturnType != null) { methods.remove(methodBeingOverriddenWithCovariantReturnType); } if (!knownSignature && !ReflectionUtils.isCglibRenamedMethod(method)) { methods.add(method); } } }); return (Method[])methods.toArray(new Method[methods.size()]); } private static Method[] getDeclaredMethods(Class clazz) { Method[] result = (Method[])declaredMethodsCache.get(clazz); if (result == null) { result = clazz.getDeclaredMethods(); declaredMethodsCache.put(clazz, result); } return result; } public static void doWithFields(Class clazz, ReflectionUtils.FieldCallback fc) throws IllegalArgumentException { doWithFields(clazz, fc, (ReflectionUtils.FieldFilter)null); } public static void doWithFields(Class clazz, ReflectionUtils.FieldCallback fc, ReflectionUtils.FieldFilter ff) throws IllegalArgumentException { Class targetClass = clazz; do { Field[] fields = targetClass.getDeclaredFields(); Field[] t = fields; int len = fields.length; for(int i = 0; i < len; ++i) { Field field = t[i]; if (ff == null || ff.matches(field)) { try { fc.doWith(field); } catch (IllegalAccessException e) { throw new IllegalStateException("Shouldn't be illegal to access field '" + field.getName() + "': " + e); } } } targetClass = targetClass.getSuperclass(); } while(targetClass != null && targetClass != Object.class); } public static void shallowCopyFieldState(final Object src, final Object dest) throws IllegalArgumentException { if (src == null) { throw new IllegalArgumentException("Source for field copy cannot be null"); } else if (dest == null) { throw new IllegalArgumentException("Destination for field copy cannot be null"); } else if (!src.getClass().isAssignableFrom(dest.getClass())) { throw new IllegalArgumentException("Destination class [" + dest.getClass().getName() + "] must be same or subclass as source class [" + src.getClass().getName() + "]"); } else { doWithFields(src.getClass(), new ReflectionUtils.FieldCallback() { public void doWith(Field field) throws IllegalArgumentException, IllegalAccessException { ReflectionUtils.makeAccessible(field); Object srcValue = field.get(src); field.set(dest, srcValue); } }, COPYABLE_FIELDS); } } public interface FieldFilter { boolean matches( Field field ); } public interface FieldCallback { void doWith( Field field ) throws IllegalArgumentException, IllegalAccessException; } public interface MethodFilter { boolean matches( Method method ); } public interface MethodCallback { void doWith( Method method ) throws IllegalArgumentException, IllegalAccessException; } /** * Version: New add in Pinecone Java Ver 20240628 */ public static Object tryAccessibleInvoke( Method method, Object obj, Object... args ) throws IllegalArgumentException, InvocationTargetException { try{ method.setAccessible( true ); return method.invoke( obj, args ); } catch ( IllegalAccessException eae ) { throw new ProxyProvokeHandleException( eae ); } } /** * Version: New add in Pinecone Java Ver 20241006 */ public static Object beanGet( Object bean, String propertyKey ) { try{ java.beans.PropertyDescriptor propertyDescriptor = new java.beans.PropertyDescriptor( propertyKey, bean.getClass() ); Method readMethod = propertyDescriptor.getReadMethod(); if ( readMethod != null ) { try{ readMethod.setAccessible( true ); return readMethod.invoke( bean ); } catch ( InvocationTargetException | IllegalArgumentException | IllegalAccessException e ) { return null; } } } catch ( IntrospectionException e ) { return null; } return null; } public static void beanSet( Object bean, String propertyKey, Object val ) throws IllegalArgumentException { try{ java.beans.PropertyDescriptor propertyDescriptor = new java.beans.PropertyDescriptor( propertyKey, bean.getClass() ); Method writeMethod = propertyDescriptor.getWriteMethod(); if ( writeMethod != null ) { try{ writeMethod.setAccessible( true ); writeMethod.invoke( bean, val ); } catch ( InvocationTargetException | IllegalArgumentException | IllegalAccessException e ) { throw new IllegalArgumentException( e ); } } } catch ( IntrospectionException e ) { throw new IllegalArgumentException( e ); } } /** * e.g. java.util.List * e.g. java.util.Map */ public static String[] extractGenericClassNames( String input ) { int startIndex = input.indexOf('<'); int endIndex = input.lastIndexOf('>'); if ( startIndex != -1 && endIndex != -1 && endIndex > startIndex ) { String types = input.substring( startIndex + 1, endIndex ).trim(); if( types.indexOf( ", " ) > 0 ) { return types.split( ", " ); } return types.split( "," ); } return null; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/StringTraits.java ================================================ package com.pinecone.framework.util; import java.util.Arrays; public class StringTraits { // String like `CHO2OHC` public static boolean isChiralString ( String szThat, boolean bNoCase ) { int nMid = szThat.length() / 2; for ( int i = 0; i < nMid; i++ ) { char c1 = szThat.charAt(i); char c2 = szThat.charAt( szThat.length() - i - 1 ); if( bNoCase ){ if( !CharactersUtils.regionMatches( c1, c2 ) ){ return false; } } else { if ( c1 != c2 ) { return false; } } } return true; } public static boolean isChiralString ( String szThat ) { return StringTraits.isChiralString( szThat, true ); } // String like `CHO2CHO` public static boolean isHomoString ( String szThat, boolean bNoCase ) { int nBias = szThat.length() % 2 != 0 ? 1 : 0; int nMid = szThat.length() / 2; for ( int i = 0; i < nMid; i++ ) { char c1 = szThat.charAt(i); char c2 = szThat.charAt( nMid + i + nBias ); if( bNoCase ){ if( !CharactersUtils.regionMatches( c1, c2 ) ){ return false; } } else { if ( c1 != c2 ) { return false; } } } return true; } public static boolean isHomoString ( String szThat ) { return StringTraits.isHomoString( szThat, true ); } // String like `CHO2HCO` or `CHO2COH` etc. public static boolean isHeterString ( String szThat, boolean bNoCase ) { int nBias = szThat.length() % 2 != 0 ? 1 : 0; int nMid = szThat.length() / 2; char[] chars = szThat.toCharArray(); Arrays.sort( chars, 0, nMid ); Arrays.sort( chars, nMid + nBias, szThat.length() ); return CharactersUtils.equals( chars, 0, nMid, chars, nMid + nBias, chars.length, bNoCase ); } public static boolean isHeterString ( String szThat ) { return StringTraits.isHeterString( szThat, true ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/StringUtils.java ================================================ package com.pinecone.framework.util; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Enumeration; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Properties; import java.util.Set; import java.util.StringTokenizer; import java.util.TimeZone; import java.util.TreeSet; public final class StringUtils { public static final String FOLDER_SEPARATOR = "/"; public static final String WINDOWS_FOLDER_SEPARATOR = "\\"; public static final String TOP_PATH = ".."; public static final String CURRENT_PATH = "."; public static final char EXTENSION_SEPARATOR = '.'; public StringUtils() { } public static boolean isEmpty( Object str ) { return str == null || "".equals(str); } public static boolean isEmpty( String str ) { return str == null || str.isEmpty(); } public static boolean isNoneEmpty( String str ) { return !StringUtils.isEmpty( str ); } public static boolean hasLength(CharSequence str) { return str != null && str.length() > 0; } public static boolean hasLength(String str) { return hasLength((CharSequence)str); } public static boolean hasText(CharSequence str) { if (!hasLength(str)) { return false; } else { int strLen = str.length(); for(int i = 0; i < strLen; ++i) { if (!Character.isWhitespace(str.charAt(i))) { return true; } } return false; } } public static boolean hasText(String str) { return hasText((CharSequence)str); } public static boolean containsWhitespace(CharSequence str) { if (!hasLength(str)) { return false; } else { int strLen = str.length(); for(int i = 0; i < strLen; ++i) { if (Character.isWhitespace(str.charAt(i))) { return true; } } return false; } } public static boolean containsWhitespace(String str) { return containsWhitespace((CharSequence)str); } public static String trimWhitespace(String str) { if (!hasLength(str)) { return str; } else { StringBuilder sb = new StringBuilder(str); while(sb.length() > 0 && Character.isWhitespace(sb.charAt(0))) { sb.deleteCharAt(0); } while(sb.length() > 0 && Character.isWhitespace(sb.charAt(sb.length() - 1))) { sb.deleteCharAt(sb.length() - 1); } return sb.toString(); } } public static String trimAllWhitespace(String str) { if (!hasLength(str)) { return str; } else { int len = str.length(); StringBuilder sb = new StringBuilder(str.length()); for(int i = 0; i < len; ++i) { char c = str.charAt(i); if (!Character.isWhitespace(c)) { sb.append(c); } } return sb.toString(); } } public static String trimLeadingWhitespace(String str) { if (!hasLength(str)) { return str; } else { StringBuilder sb = new StringBuilder(str); while(sb.length() > 0 && Character.isWhitespace(sb.charAt(0))) { sb.deleteCharAt(0); } return sb.toString(); } } public static String trimTrailingWhitespace(String str) { if (!hasLength(str)) { return str; } else { StringBuilder sb = new StringBuilder(str); while(sb.length() > 0 && Character.isWhitespace(sb.charAt(sb.length() - 1))) { sb.deleteCharAt(sb.length() - 1); } return sb.toString(); } } public static String trimLeadingCharacter(String str, char leadingCharacter) { if (!hasLength(str)) { return str; } else { StringBuilder sb = new StringBuilder(str); while(sb.length() > 0 && sb.charAt(0) == leadingCharacter) { sb.deleteCharAt(0); } return sb.toString(); } } public static String trimTrailingCharacter(String str, char trailingCharacter) { if (!hasLength(str)) { return str; } else { StringBuilder sb = new StringBuilder(str); while(sb.length() > 0 && sb.charAt(sb.length() - 1) == trailingCharacter) { sb.deleteCharAt(sb.length() - 1); } return sb.toString(); } } public static boolean startsWithIgnoreCase(String str, String prefix) { if (str != null && prefix != null) { if (str.startsWith(prefix)) { return true; } else if (str.length() < prefix.length()) { return false; } else { String lcStr = str.substring(0, prefix.length()).toLowerCase(); String lcPrefix = prefix.toLowerCase(); return lcStr.equals(lcPrefix); } } else { return false; } } public static boolean endsWithIgnoreCase(String str, String suffix) { if (str != null && suffix != null) { if (str.endsWith(suffix)) { return true; } else if (str.length() < suffix.length()) { return false; } else { String lcStr = str.substring(str.length() - suffix.length()).toLowerCase(); String lcSuffix = suffix.toLowerCase(); return lcStr.equals(lcSuffix); } } else { return false; } } public static boolean substringMatch(CharSequence str, int index, CharSequence substring) { for(int j = 0; j < substring.length(); ++j) { int i = index + j; if (i >= str.length() || str.charAt(i) != substring.charAt(j)) { return false; } } return true; } public static int countOccurrencesOf( String str, String sub ) { if ( str != null && sub != null && str.length() != 0 && sub.length() != 0 ) { int count = 0; int idx; for( int pos = 0; (idx = str.indexOf(sub, pos)) != -1; pos = idx + sub.length() ) { ++count; } return count; } else { return 0; } } public static int countOccurrencesOf( String str, String sub, int threshold ) { if ( str != null && sub != null && str.length() != 0 && sub.length() != 0 ) { int count = 0; int idx; for( int pos = 0; (idx = str.indexOf(sub, pos)) != -1; pos = idx + sub.length() ) { ++count; if( count >= threshold ) { break; } } return count; } else { return 0; } } public static String replace(String inString, String oldPattern, String newPattern) { if (hasLength(inString) && hasLength(oldPattern) && newPattern != null) { StringBuilder sb = new StringBuilder(); int pos = 0; int index = inString.indexOf(oldPattern); for(int patLen = oldPattern.length(); index >= 0; index = inString.indexOf(oldPattern, pos)) { sb.append(inString.substring(pos, index)); sb.append(newPattern); pos = index + patLen; } sb.append(inString.substring(pos)); return sb.toString(); } else { return inString; } } public static String delete(String inString, String pattern) { return replace(inString, pattern, ""); } public static String deleteAny(String inString, String charsToDelete) { if (hasLength(inString) && hasLength(charsToDelete)) { StringBuilder sb = new StringBuilder(); for(int i = 0; i < inString.length(); ++i) { char c = inString.charAt(i); if (charsToDelete.indexOf(c) == -1) { sb.append(c); } } return sb.toString(); } else { return inString; } } public static String quote(String str) { return str != null ? "'" + str + "'" : null; } public static Object quoteIfString(Object obj) { return obj instanceof String ? quote((String)obj) : obj; } public static String unqualify(String qualifiedName) { return unqualify(qualifiedName, '.'); } public static String unqualify(String qualifiedName, char separator) { return qualifiedName.substring(qualifiedName.lastIndexOf(separator) + 1); } public static String capitalize(String str) { return StringUtils.changeFirstCharacterCase(str, true); } public static String uncapitalize(String str) { return StringUtils.changeFirstCharacterCase(str, false); } private static String changeFirstCharacterCase( String str, boolean bCapitalize ) { if (str != null && str.length() != 0) { char[] cs = str.toCharArray(); if (bCapitalize) { cs[0] = Character.toUpperCase( cs[0] ); } else { cs[0] = Character.toLowerCase( cs[0] ); } return String.valueOf( cs ); } else { return str; } } public static String getFilename(String path) { if (path == null) { return null; } else { int separatorIndex = path.lastIndexOf("/"); return separatorIndex != -1 ? path.substring(separatorIndex + 1) : path; } } public static String getFilenameExtension(String path) { if (path == null) { return null; } else { int extIndex = path.lastIndexOf(46); if (extIndex == -1) { return null; } else { int folderIndex = path.lastIndexOf("/"); return folderIndex > extIndex ? null : path.substring(extIndex + 1); } } } public static String stripFilenameExtension(String path) { if (path == null) { return null; } else { int extIndex = path.lastIndexOf(46); if (extIndex == -1) { return path; } else { int folderIndex = path.lastIndexOf("/"); return folderIndex > extIndex ? path : path.substring(0, extIndex); } } } public static String applyRelativePath(String path, String relativePath) { int separatorIndex = path.lastIndexOf("/"); if (separatorIndex != -1) { String newPath = path.substring(0, separatorIndex); if (!relativePath.startsWith("/")) { newPath = newPath + "/"; } return newPath + relativePath; } else { return relativePath; } } public static String cleanPath(String path) { if (path == null) { return null; } else { String pathToUse = replace(path, "\\", "/"); int prefixIndex = pathToUse.indexOf(":"); String prefix = ""; if (prefixIndex != -1) { prefix = pathToUse.substring(0, prefixIndex + 1); if (prefix.contains("/")) { prefix = ""; } else { pathToUse = pathToUse.substring(prefixIndex + 1); } } if (pathToUse.startsWith("/")) { prefix = prefix + "/"; pathToUse = pathToUse.substring(1); } String[] pathArray = delimitedListToStringArray(pathToUse, "/"); List pathElements = new LinkedList(); int tops = 0; int i; for(i = pathArray.length - 1; i >= 0; --i) { String element = pathArray[i]; if (!".".equals(element)) { if ("..".equals(element)) { ++tops; } else if (tops > 0) { --tops; } else { pathElements.add(0, element); } } } for(i = 0; i < tops; ++i) { pathElements.add(0, ".."); } return prefix + collectionToDelimitedString(pathElements, "/"); } } public static boolean pathEquals(String path1, String path2) { return cleanPath(path1).equals(cleanPath(path2)); } public static Locale parseLocaleString(String localeString) { String[] parts = tokenizeToStringArray(localeString, "_ ", false, false); String language = parts.length > 0 ? parts[0] : ""; String country = parts.length > 1 ? parts[1] : ""; validateLocalePart(language); validateLocalePart(country); String variant = ""; if (parts.length > 2) { int endIndexOfCountryCode = localeString.indexOf(country, language.length()) + country.length(); variant = trimLeadingWhitespace(localeString.substring(endIndexOfCountryCode)); if (variant.startsWith("_")) { variant = trimLeadingCharacter(variant, '_'); } } return language.length() > 0 ? new Locale(language, country, variant) : null; } private static void validateLocalePart(String localePart) { for(int i = 0; i < localePart.length(); ++i) { char ch = localePart.charAt(i); if (ch != '_' && ch != ' ' && !Character.isLetterOrDigit(ch)) { throw new IllegalArgumentException("Locale part \"" + localePart + "\" contains invalid characters"); } } } public static String toLanguageTag(Locale locale) { return locale.getLanguage() + (hasText(locale.getCountry()) ? "-" + locale.getCountry() : ""); } public static TimeZone parseTimeZoneString(String timeZoneString) { TimeZone timeZone = TimeZone.getTimeZone(timeZoneString); if ("GMT".equals(timeZone.getID()) && !timeZoneString.startsWith("GMT")) { throw new IllegalArgumentException("Invalid time zone specification '" + timeZoneString + "'"); } else { return timeZone; } } public static String[] addStringToArray(String[] array, String str) { if (ObjectUtils.isEmpty(array)) { return new String[]{str}; } else { String[] newArr = new String[array.length + 1]; System.arraycopy(array, 0, newArr, 0, array.length); newArr[array.length] = str; return newArr; } } public static String[] concatenateStringArrays(String[] array1, String[] array2) { if (ObjectUtils.isEmpty(array1)) { return array2; } else if (ObjectUtils.isEmpty(array2)) { return array1; } else { String[] newArr = new String[array1.length + array2.length]; System.arraycopy(array1, 0, newArr, 0, array1.length); System.arraycopy(array2, 0, newArr, array1.length, array2.length); return newArr; } } public static String[] mergeStringArrays(String[] array1, String[] array2) { if (ObjectUtils.isEmpty(array1)) { return array2; } else if (ObjectUtils.isEmpty(array2)) { return array1; } else { List result = new ArrayList(); result.addAll(Arrays.asList(array1)); String[] arr = array2; int len = array2.length; for( int i = 0; i < len; ++i ) { String str = arr[i]; if ( !result.contains(str) ) { result.add(str); } } return toStringArray((Collection)result); } } public static String[] sortStringArray(String[] array) { if (ObjectUtils.isEmpty(array)) { return new String[0]; } else { Arrays.sort(array); return array; } } public static String[] toStringArray(Collection collection) { return collection == null ? null : (String[])collection.toArray(new String[collection.size()]); } public static String[] toStringArray(Enumeration enumeration) { if (enumeration == null) { return null; } else { List list = Collections.list(enumeration); return (String[])list.toArray(new String[list.size()]); } } public static String[] trimArrayElements(String[] array) { if (ObjectUtils.isEmpty(array)) { return new String[0]; } else { String[] result = new String[array.length]; for(int i = 0; i < array.length; ++i) { String element = array[i]; result[i] = element != null ? element.trim() : null; } return result; } } public static String[] removeDuplicateStrings( String[] array ) { if ( ObjectUtils.isEmpty( array ) ) { return array; } else { Set set = new TreeSet(); String[] arr = array; int len = array.length; for( int i = 0; i < len; ++i ) { String element = arr[ i ]; set.add(element); } return toStringArray( (Collection)set ); } } public static String[] split(String toSplit, String delimiter) { if (hasLength(toSplit) && hasLength(delimiter)) { int offset = toSplit.indexOf(delimiter); if (offset < 0) { return null; } else { String beforeDelimiter = toSplit.substring(0, offset); String afterDelimiter = toSplit.substring(offset + delimiter.length()); return new String[]{beforeDelimiter, afterDelimiter}; } } else { return null; } } public static Properties splitArrayElementsIntoProperties(String[] array, String delimiter) { return splitArrayElementsIntoProperties(array, delimiter, (String)null); } public static Properties splitArrayElementsIntoProperties(String[] array, String delimiter, String charsToDelete) { if ( ObjectUtils.isEmpty(array) ) { return null; } else { Properties result = new Properties(); String[] arr = array; int len = array.length; for( int i = 0; i < len; ++i ) { String element = arr[i]; if ( charsToDelete != null ) { element = deleteAny(element, charsToDelete); } String[] splittedElement = split(element, delimiter); if ( splittedElement != null ) { result.setProperty(splittedElement[0].trim(), splittedElement[1].trim()); } } return result; } } public static String[] tokenizeToStringArray(String str, String delimiters) { return tokenizeToStringArray(str, delimiters, true, true); } public static String[] tokenizeToStringArray(String str, String delimiters, boolean trimTokens, boolean ignoreEmptyTokens) { if (str == null) { return null; } else { StringTokenizer st = new StringTokenizer(str, delimiters); ArrayList tokens = new ArrayList(); while(true) { String token; do { if (!st.hasMoreTokens()) { return toStringArray((Collection)tokens); } token = st.nextToken(); if (trimTokens) { token = token.trim(); } } while(ignoreEmptyTokens && token.length() <= 0); tokens.add(token); } } } public static String[] delimitedListToStringArray(String str, String delimiter) { return delimitedListToStringArray(str, delimiter, (String)null); } public static String[] delimitedListToStringArray(String str, String delimiter, String charsToDelete) { if (str == null) { return new String[0]; } else if (delimiter == null) { return new String[]{str}; } else { List result = new ArrayList(); int pos; if ("".equals(delimiter)) { for(pos = 0; pos < str.length(); ++pos) { result.add(deleteAny(str.substring(pos, pos + 1), charsToDelete)); } } else { int delPos; for(pos = 0; (delPos = str.indexOf(delimiter, pos)) != -1; pos = delPos + delimiter.length()) { result.add(deleteAny(str.substring(pos, delPos), charsToDelete)); } if (str.length() > 0 && pos <= str.length()) { result.add(deleteAny(str.substring(pos), charsToDelete)); } } return toStringArray((Collection)result); } } public static String[] commaDelimitedListToStringArray(String str) { return delimitedListToStringArray(str, ","); } public static Set commaDelimitedListToSet(String str) { Set set = new TreeSet(); String[] tokens = commaDelimitedListToStringArray(str); String[] ts = tokens; int len = tokens.length; for( int i = 0; i < len; ++i ) { String token = ts[i]; set.add(token); } return set; } public static String collectionToDelimitedString(Collection coll, String delim, String prefix, String suffix) { if (CollectionUtils.isEmpty(coll)) { return ""; } else { StringBuilder sb = new StringBuilder(); Iterator it = coll.iterator(); while(it.hasNext()) { sb.append(prefix).append(it.next()).append(suffix); if (it.hasNext()) { sb.append(delim); } } return sb.toString(); } } public static String collectionToDelimitedString(Collection coll, String delim) { return collectionToDelimitedString(coll, delim, "", ""); } public static String collectionToCommaDelimitedString(Collection coll) { return collectionToDelimitedString(coll, ","); } public static String arrayToDelimitedString(Object[] arr, String delim) { if (ObjectUtils.isEmpty(arr)) { return ""; } else if (arr.length == 1) { return ObjectUtils.nullSafeToString(arr[0]); } else { StringBuilder sb = new StringBuilder(); for(int i = 0; i < arr.length; ++i) { if (i > 0) { sb.append(delim); } sb.append(arr[i]); } return sb.toString(); } } public static String arrayToCommaDelimitedString(Object[] arr) { return arrayToDelimitedString(arr, ","); } /** * prototype Pinecone C/C++ Ver 3.1 * Version: New add in Pinecone Java Ver 3.0 */ public static String hypertext2Text( String hypertext , boolean replaceBlankSpace , boolean replaceNewLine){ String szRegex = "<.*?>"; if (replaceBlankSpace){ szRegex += "| "; } if (replaceNewLine){ szRegex += "|\t" + "|\r" + "|\n"; } hypertext = hypertext.replaceAll(szRegex,""); //hypertext = StringEscapeUtils.unescapeHtml( hypertext ); return hypertext; } public static String nullThenEmpty ( String str ){ return str == null ? "" : str; } public static Writer addSlashes( String szBadString, Writer writer, boolean bJsonQuoteMode ) throws IOException { if ( szBadString != null && szBadString.length() != 0 ) { char c = 0; int len = szBadString.length(); if( bJsonQuoteMode ){ writer.write(34); } for( int i = 0; i < len; ++i ) { char b = c; c = szBadString.charAt(i); switch(c) { case '\b': { writer.write("\\b"); continue; } case '\t': { writer.write("\\t"); continue; } case '\n': { writer.write("\\n"); continue; } case '\f': { writer.write("\\f"); continue; } case '\r': { writer.write("\\r"); continue; } case '\'':{ if( bJsonQuoteMode ){ writer.write(c); continue; } } case '"': case '\\': { writer.write(92); writer.write(c); continue; } case '/': { if (b == '<') { writer.write(92); } writer.write(c); continue; } } if (c >= ' ' && (c < 128 || c >= 160) && (c < 8192 || c >= 8448)) { writer.write(c); } else { writer.write("\\u"); String szHexString = Integer.toHexString(c); writer.write("0000", 0, 4 - szHexString.length() ); writer.write(szHexString); } } if( bJsonQuoteMode ){ writer.write(34); } return writer; } else { if( bJsonQuoteMode ){ writer.write("\"\""); } return writer; } } public static String addSlashes( String szBadString, boolean bJsonQuoteMode ) { StringWriter sw = new StringWriter(); synchronized( sw.getBuffer()) { String s; try { s = StringUtils.addSlashes( szBadString, sw, bJsonQuoteMode ).toString(); } catch ( IOException e ) { return ""; } return s; } } public static String addSlashes( String szBadString ) { return StringUtils.addSlashes( szBadString, false ); } public static String jsonQuote( String szBadString ) { return StringUtils.addSlashes( szBadString, true ); } public static String sequencify ( String[] sequences, String szDelimiter, String szPrefix ){ StringBuilder sb = new StringBuilder(); int i = 0; for( String sequence : sequences ){ sb.append( szPrefix ).append( sequence ); if( ++i != sequences.length ){ sb.append( szDelimiter ); } } return sb.toString(); } public static String sequencify ( String[] sequences, String szDelimiter ){ return StringUtils.sequencify( sequences, szDelimiter, "" ); } public static String[] trimEmptyElement( String[] strings ) { String[] buf = new String[ strings.length ]; int j = 0; for ( int i = 0; i < strings.length; i++ ) { String each = strings[ i ]; if( !StringUtils.isEmpty( each ) ) { buf[ j++ ] = each; } } return Arrays.copyOf( buf, j ); } /** * Version: New add in Pinecone Java Ver 20240624 */ public static boolean containsBoth( String target, String moreChars ) { boolean[] found = new boolean[ moreChars.length() ]; Arrays.fill( found, false ); for ( int i = 0; i < target.length(); ++i ) { char c = target.charAt( i ); int index = moreChars.indexOf( c ); if ( index != -1 ) { found[ index ] = true; } } for ( boolean isFound : found ) { if ( !isFound ) { return false; } } return true; } public static boolean containsBoth( String target, char[] moreChars ) { boolean[] found = new boolean[ moreChars.length ]; Arrays.fill( found, false ); for ( int i = 0; i < target.length(); i++ ) { char c = target.charAt(i); for ( int j = 0; j < moreChars.length; ++j ) { if( found[j] ) { continue; } if ( c == moreChars[j] ) { found[j] = true; break; } } } for ( boolean isFound : found ) { if ( !isFound ) { return false; } } return true; } public static boolean containsOnce( String target, String moreChars ) { for ( int i = 0; i < moreChars.length(); ++i ) { char c = moreChars.charAt( i ); if ( target.indexOf(c) != -1 ) { return true; } } return false; } public static boolean containsOnce( String target, char[] moreChars ) { for ( int i = 0; i < target.length(); i++ ) { char c = target.charAt(i); for ( int j = 0; j < moreChars.length; ++j ) { if ( c == moreChars[j] ) { return true; } } } return false; } public static int countOccurrencesOf( String target, char specifiedChar, int threshold ) { int count = 0; for ( int i = 0; i < target.length(); ++i ) { if ( target.charAt(i) == specifiedChar ) { count++; if ( threshold > 0 && count >= threshold ) { return count; } } } return count; } public static int countOccurrencesOf( String target, char specifiedChar ) { return StringUtils.countOccurrencesOf( target, specifiedChar, 0 ); } /** * Version: New add in Pinecone Java Ver 20241003 */ public static boolean isBlank( String str ) { int strLen; if ( str != null && (strLen = str.length()) != 0 ) { for( int i = 0; i < strLen; ++i ) { if ( !Character.isWhitespace(str.charAt(i)) ) { return false; } } } return true; } public static boolean isNotBlank( String str ) { return !StringUtils.isBlank( str ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/SuperConvert.java ================================================ package com.pinecone.framework.util; public class SuperConvert { public static double[] object2Float64Array(Object[] objects){ double[] arrayBuf = new double[objects.length]; for(int i=0;i T resolve( @Nullable Supplier supplier ) { return supplier != null ? supplier.get() : null; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/UnitHelper.java ================================================ package com.pinecone.framework.util; import java.lang.reflect.Array; import java.util.List; public final class UnitHelper { public static int accumulateInt( int from, int to, List list ){ int sum = 0; for( int i = from; i < to; i++ ){ sum += list.get( i ); } return sum; } public static int accumulateInt( List list ) { return UnitHelper.accumulateInt( 0, list.size(), list ); } public static double accumulateDouble( int from, int to, List list ){ double sum = 0; for( int i = from; i < to; ++i ){ sum += list.get( i ); } return sum; } public static double accumulateDouble( List list ) { return UnitHelper.accumulateDouble( 0, list.size(), list ); } @SuppressWarnings( "unchecked" ) public static T[] append( T[] original, int currentSize, T element ) { if ( currentSize >= original.length ) { T[] newArray = (T[]) Array.newInstance( original.getClass().getComponentType(), original.length + 1 ); System.arraycopy( original, 0, newArray, 0, original.length ); newArray[ currentSize ] = element; return newArray; } else { original[ currentSize ] = element; return original; } } public static T[] append( T[] original, T element ) { return UnitHelper.append( original, original.length, element ); } @SuppressWarnings( "unchecked" ) public static T[] remove( T[] original, int index ) { if ( index < 0 || index >= original.length ) { throw new IndexOutOfBoundsException( "Index: " + index + ", Size: " + original.length ); } T[] newArray = (T[]) Array.newInstance( original.getClass().getComponentType(), original.length - 1 ); for ( int i = 0, j = 0; i < original.length; ++i ) { if ( i != index ) { newArray[ j++ ] = original[ i ]; } } return newArray; } @SuppressWarnings( "unchecked" ) public static T[] popBack( T[] original ) { if ( original.length == 0 ) { throw new IllegalStateException( "Cannot pop from an empty array." ); } T[] newArray = (T[]) Array.newInstance( original.getClass().getComponentType(), original.length - 1 ); System.arraycopy( original, 0, newArray, 0, original.length - 1 ); return newArray; } @SuppressWarnings( "unchecked" ) public static T[] pollFirst( T[] original ) { if ( original.length == 0 ) { throw new IllegalStateException( "Cannot poll from an empty array." ); } T firstElement = original[ 0 ]; T[] newArray = (T[]) Array.newInstance( original.getClass().getComponentType(), original.length - 1 ); System.arraycopy( original, 1, newArray, 0, original.length - 1 ); return newArray; } @SuppressWarnings( "unchecked" ) public static T[] insert( T[] original, int index, T element ) { if ( index < 0 || index > original.length ) { throw new IndexOutOfBoundsException( "Index: " + index + ", Size: " + original.length ); } T[] newArray = (T[]) Array.newInstance( original.getClass().getComponentType(), original.length + 1 ); System.arraycopy( original, 0, newArray, 0, index ); newArray[ index ] = element; System.arraycopy( original, index, newArray, index + 1, original.length - index ); return newArray; } public static Object mergeArr( Object... arrays ) { return UnitHelper.mergeArrays( arrays ); } public static Object mergeArrays( Object[] arrays ) { if ( arrays == null || arrays.length == 0 ) { throw new IllegalArgumentException("Input arrays cannot be null or empty."); } Class componentType = arrays[ 0 ].getClass().getComponentType(); int totalLength = 0; for ( Object array : arrays ) { totalLength += Array.getLength(array); } Object result = Array.newInstance( componentType, totalLength ); int currentIndex = 0; for ( Object array : arrays ) { int length = Array.getLength(array); System.arraycopy( array, 0, result, currentIndex, length ); currentIndex += length; } return result; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/comparator/CompoundComparator.java ================================================ package com.pinecone.framework.util.comparator; import com.pinecone.framework.util.Assert; import java.io.Serializable; import java.util.ArrayList; import java.util.Comparator; import java.util.Iterator; import java.util.List; public class CompoundComparator implements Comparator, Serializable { private final List comparators; public CompoundComparator() { this.comparators = new ArrayList<>(); } public CompoundComparator( Comparator... comparators ) { Assert.notNull(comparators, "Comparators must not be null"); this.comparators = new ArrayList<>(comparators.length); Comparator[] hComparators = comparators; int len = comparators.length; for( int i = 0; i < len; ++i ) { Comparator comparator = hComparators[i]; this.addComparator(comparator); } } public void addComparator(Comparator comparator) { if (comparator instanceof InvertibleComparator) { this.comparators.add((InvertibleComparator)comparator); } else { this.comparators.add(new InvertibleComparator(comparator)); } } public void addComparator(Comparator comparator, boolean ascending) { this.comparators.add(new InvertibleComparator(comparator, ascending)); } public void setComparator(int index, Comparator comparator) { if (comparator instanceof InvertibleComparator) { this.comparators.set(index, (InvertibleComparator)comparator); } else { this.comparators.set(index, new InvertibleComparator(comparator)); } } public void setComparator(int index, Comparator comparator, boolean ascending) { this.comparators.set(index, new InvertibleComparator(comparator, ascending)); } public void invertOrder() { Iterator iter = this.comparators.iterator(); while( iter.hasNext() ) { InvertibleComparator comparator = (InvertibleComparator)iter.next(); comparator.invertOrder(); } } public void invertOrder(int index) { ((InvertibleComparator)this.comparators.get(index)).invertOrder(); } public void setAscendingOrder(int index) { ((InvertibleComparator)this.comparators.get(index)).setAscending(true); } public void setDescendingOrder(int index) { ((InvertibleComparator)this.comparators.get(index)).setAscending(false); } public int getComparatorCount() { return this.comparators.size(); } public int compare(T o1, T o2) { Assert.state(this.comparators.size() > 0, "No sort definitions have been added to this CompoundComparator to compare"); Iterator iter = this.comparators.iterator(); int result; do { if ( !iter.hasNext() ) { return 0; } InvertibleComparator comparator = (InvertibleComparator)iter.next(); result = comparator.compare(o1, o2); } while( result == 0 ); return result; } public boolean equals(Object obj) { if (this == obj) { return true; } else if (!(obj instanceof CompoundComparator)) { return false; } else { CompoundComparator other = (CompoundComparator)obj; return this.comparators.equals(other.comparators); } } public int hashCode() { return this.comparators.hashCode(); } public String toString() { return "CompoundComparator: " + this.comparators; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/comparator/InvertibleComparator.java ================================================ package com.pinecone.framework.util.comparator; import com.pinecone.framework.util.Assert; import java.io.Serializable; import java.util.Comparator; public class InvertibleComparator implements Comparator, Serializable { private final Comparator comparator; private boolean ascending = true; public InvertibleComparator(Comparator comparator) { Assert.notNull(comparator, "Comparator must not be null"); this.comparator = comparator; } public InvertibleComparator(Comparator comparator, boolean ascending) { Assert.notNull(comparator, "Comparator must not be null"); this.comparator = comparator; this.setAscending(ascending); } public void setAscending(boolean ascending) { this.ascending = ascending; } public boolean isAscending() { return this.ascending; } public void invertOrder() { this.ascending = !this.ascending; } public int compare(T o1, T o2) { int result = this.comparator.compare(o1, o2); if (result != 0) { if (!this.ascending) { if (-2147483648 == result) { result = 2147483647; } else { result *= -1; } } return result; } else { return 0; } } public boolean equals(Object obj) { if (this == obj) { return true; } else if (!(obj instanceof InvertibleComparator)) { return false; } else { InvertibleComparator other = (InvertibleComparator)obj; return this.comparator.equals(other.comparator) && this.ascending == other.ascending; } } public int hashCode() { return this.comparator.hashCode(); } public String toString() { return "InvertibleComparator: [" + this.comparator + "]; ascending=" + this.ascending; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/comparator/OrderComparator.java ================================================ package com.pinecone.framework.util.comparator; import java.util.Arrays; import java.util.Comparator; import java.util.List; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.ObjectUtils; public class OrderComparator implements Comparator { public static final OrderComparator INSTANCE = new OrderComparator(); public OrderComparator() { } public Comparator withSourceProvider(OrderComparator.OrderSourceProvider sourceProvider) { return (o1, o2) -> { return this.doCompare(o1, o2, sourceProvider); }; } @Override public int compare(@Nullable Object o1, @Nullable Object o2) { return this.doCompare(o1, o2, (OrderComparator.OrderSourceProvider)null); } private int doCompare(@Nullable Object o1, @Nullable Object o2, @Nullable OrderComparator.OrderSourceProvider sourceProvider) { boolean p1 = o1 instanceof PriorityOrdered; boolean p2 = o2 instanceof PriorityOrdered; if (p1 && !p2) { return -1; } else if (p2 && !p1) { return 1; } else { int i1 = this.getOrder(o1, sourceProvider); int i2 = this.getOrder(o2, sourceProvider); return Integer.compare(i1, i2); } } private int getOrder(@Nullable Object obj, @Nullable OrderComparator.OrderSourceProvider sourceProvider) { Integer order = null; if (obj != null && sourceProvider != null) { Object orderSource = sourceProvider.getOrderSource(obj); if (orderSource != null) { if (orderSource.getClass().isArray()) { Object[] ta = ObjectUtils.toObjectArray(orderSource); int len = ta.length; for( int i = 0; i < len; ++i ) { Object source = ta[i]; order = this.findOrder(source); if (order != null) { break; } } } else { order = this.findOrder(orderSource); } } } return order != null ? order : this.getOrder(obj); } protected int getOrder( @Nullable Object obj ) { if (obj != null) { Integer order = this.findOrder(obj); if (order != null) { return order; } } return 2147483647; } @Nullable protected Integer findOrder(Object obj) { return obj instanceof Ordered ? ((Ordered)obj).getOrder() : null; } @Nullable public Integer getPriority(Object obj) { return null; } public static void sort(List list) { if (list.size() > 1) { list.sort(INSTANCE); } } public static void sort(Object[] array) { if (array.length > 1) { Arrays.sort(array, INSTANCE); } } public static void sortIfNecessary(Object value) { if ( value instanceof Object[] ) { OrderComparator.sort((Object[])((Object[])value)); } else if (value instanceof List) { OrderComparator.sort((List)value); } } @FunctionalInterface public interface OrderSourceProvider { @Nullable Object getOrderSource(Object var1); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/comparator/Ordered.java ================================================ package com.pinecone.framework.util.comparator; import com.pinecone.framework.system.prototype.Pinenut; public interface Ordered extends Pinenut { int HIGHEST_PRECEDENCE = -2147483648; int LOWEST_PRECEDENCE = 2147483647; int getOrder(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/comparator/PriorityOrdered.java ================================================ package com.pinecone.framework.util.comparator; public interface PriorityOrdered extends Ordered { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/Config.java ================================================ package com.pinecone.framework.util.config; import com.pinecone.framework.system.prototype.Pinenut; public interface Config extends Pinenut { Object getProtoConfig(); Object get( Object key ); Object getOrDefault( Object key, Object def ); default boolean containsKey( Object key ) { return this.get( key ) != null; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/Configson.java ================================================ package com.pinecone.framework.util.config; import java.util.Map; public interface Configson extends MappedConfig, PatriarchalConfig { Map getProtoConfig(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/GenericStartupCommandParser.java ================================================ package com.pinecone.framework.util.config; import com.pinecone.framework.unit.LinkedTreeMap; import java.util.HashMap; import java.util.Map; public class GenericStartupCommandParser implements StartupCommandParser { private String[] mValueStartSymbols = { "", "--", "-", "/", "\\" }; private String[] mAssignmentSymbols = { "=", ":", "=>", "->" }; private String[] mValueSeparators = { ",", ";", "|" }; public GenericStartupCommandParser() {} public GenericStartupCommandParser( String[] valueStartSymbols, String[] assignmentSymbols, String[] valueSeparators ) { this.mValueStartSymbols = valueStartSymbols; this.mAssignmentSymbols = assignmentSymbols; this.mValueSeparators = valueSeparators; } @Override public Map parse( String[] args ) { Map result = new LinkedTreeMap<>(); for ( String arg : args ) { String key = null; String value = null; for ( String startSymbol : this.mValueStartSymbols ) { if ( arg.startsWith( startSymbol ) ) { int assignmentIndex = -1; for ( String assignmentSymbol : this.mAssignmentSymbols ) { int index = arg.indexOf(assignmentSymbol, startSymbol.length()); if ( index > 0 ) { assignmentIndex = index; break; } } if ( assignmentIndex > 0 ) { key = arg.substring(startSymbol.length(), assignmentIndex); value = arg.substring(assignmentIndex + 1); } else { key = arg.substring(startSymbol.length()); value = ""; } break; } } if ( key != null ) { String[] values = this.splitValues( value ); result.put(key, values); } } return result; } @Override public Map parse( Map args ) { Map map = new HashMap<>( args.size() ); for ( String key : args.keySet() ) { String value = args.get(key); if ( value == null ) { value = ""; } String[] values = this.splitValues( value ); map.put( key, values ); } return map; } private String[] splitValues(String value ) { if ( value.isEmpty() ) { return new String[0]; } if ( value.charAt(0) == '"' || value.charAt(0) == '\'' ) { return new String[] { value }; } for ( String separator : this.mValueSeparators ) { if ( value.contains( separator ) ) { return value.split(java.util.regex.Pattern.quote(separator)); } } return new String[]{ value }; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/JSONConfig.java ================================================ package com.pinecone.framework.util.config; import com.pinecone.framework.system.ErrorStrings; import com.pinecone.framework.unit.MultiScopeMap; import com.pinecone.framework.unit.MultiScopeMaptron; import com.pinecone.framework.util.json.*; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.nio.file.Path; import java.util.Map; public class JSONConfig extends JSONMaptron implements Configson { protected MultiScopeMap mScope; protected JPlusContext mThisContext; protected JSONConfig mRoot; protected JSONConfig mParent; public JSONConfig( JSONConfig parent ) { this( (Map) null, parent ); } public JSONConfig( JSONObject thisScope, JSONConfig parent ) { this( thisScope.getMap(), parent ); } public JSONConfig( JSONObject thisScope ) { this( thisScope.getMap(), null ); } public JSONConfig( Map thisScope, JSONConfig parent ) { super(); this.mParent = parent; if( this.mParent != null ) { this.inherit( this.mParent ); } else { this.mRoot = this; this.mScope = new MultiScopeMaptron<>(); this.mThisContext = new JPlusContext(); if( thisScope == null ) { thisScope = this.getMap(); } this.mThisContext.asProgenitor( thisScope ); } this.setThisScope( thisScope ); } public JSONConfig() { this(null ); } @Override public JSONConfig inherit( PatriarchalConfig parent ) { JSONConfig that = (JSONConfig) parent; this.mScope = new MultiScopeMaptron<>(); this.mThisContext = that.mThisContext.clone(); this.mRoot = that.mRoot; this.mParent = that; this.mScope.setParents( that.mScope.getParents() ); this.mScope.setName ( that.mScope.getName() ); this.mThisContext.setParent( that.mThisContext.thisScope() ); this.setThisScope( this.getMap() ); return this; } public JSONConfig addGlobalScope( Map scope ) { this.getContext().addGlobalScope( scope ); if( scope instanceof MultiScopeMap ) { this.getScope().addParent( (MultiScopeMap)scope ); } else { this.getScope().addParent( new MultiScopeMaptron<>( scope ) ); } return this; } public JSONConfig addGlobalScope( Map scope, String name ) { this.getContext().addGlobalScope( scope ); if( scope instanceof MultiScopeMap ) { this.getScope().addParent( ( (MultiScopeMap)scope ).setName( name ) ); } else { this.getScope().addParent( ( new MultiScopeMaptron<>( scope ) ).setName( name ) ); } return this; } public JSONConfig setThisScope( Map thisScope ) { if( thisScope != null ) { this.assimilate( thisScope ); this.getContext().setThisScope( thisScope ); this.getScope().setThisScope( thisScope ); } return this; } public JSONConfig from( JSONObject prototype ) { this.setThisScope( prototype.getMap() ); return this; } public JSONObject fromFile( File fConf ) throws IOException { if( this.parent() != null ) { return ( (JSONConfig) this.root() ).fromFile( fConf ); } JPlusContext context = this.getContext().clone(); context.asProgenitor( this ); return new JSONMaptron( new JPlusCursorParser( new FileReader( fConf ), context ) ); } public JSONObject fromFileNoException( File fConf ) { try { return this.fromFile( fConf ); } catch ( IOException e ) { return null; } } public JSONObject fromPath( Path path ) throws IOException { try{ return this.fromFile( path.toFile() ); } catch ( IOException e ) { IOException ie = null; for( Path p : this.mThisContext.getParentPaths() ) { try{ return this.fromFile( path.resolve( p ).toFile() ); } catch ( IOException e1 ) { ie = e1; } } if( ie != null ) { throw new IOException( ErrorStrings.E_IRREDEEMABLE_NO_PATH_CONTEXT_MATCHED + "What-> '" + path + "'", ie ); } } throw new IOException( ErrorStrings.E_IRREDEEMABLE_NO_PATH_CONTEXT_MATCHED + "What-> '" + path + "'" ); } @Override public JSONConfig getChildFromPath( Path path ) throws IOException { JSONObject neo = this.fromPath( path ); return new JSONConfig( neo, this ); } @Override public Object get( Object key ) { return this.opt( key.toString() ); } @Override public Object getOrDefault( Object key, Object def ) { Object o = this.get( key ); if( o == null ) { return def; } return o; } @Override public Object opt( String key ) { return this.mScope.get( key ); } @Override public JSONConfig getChild( Object key ) { JSONObject prototype = this.optJSONObject( key.toString() ); if( prototype == null ) { return null; } return ( new JSONConfig( prototype, this ) ); } public JSONConfig apply ( File fConf ) throws IOException { return this.from( this.fromFile( fConf ) ); } @Override public JSONObject getProtoConfig() { return this.toJSONObject(); } @Override public JSONConfig parent() { return this.mParent; } public MultiScopeMap getScope() { return this.mScope; } @Override public JSONConfig root() { return this.mRoot; } @Override public JSONConfig setParent ( Object parent ) { this.mParent = (JSONConfig) parent; return this; } public JSONConfig setRoot( Object root ) { this.mRoot = (JSONConfig) root; return this; } @Override public Path[] getParentPaths() { return this.getContext().getParentPaths(); } @Override public JSONConfig setParentPaths( Path[] paths ) { this.getContext().setParentPaths( paths ); return this; } public JSONConfig addParentPath( Path newPath ) { this.getContext().addParentPath( newPath ); return this; } @Override public JPlusContext getContext() { return this.mThisContext; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/JSONSystemConfig.java ================================================ package com.pinecone.framework.util.config; import com.pinecone.framework.system.Pinecore; import com.pinecone.framework.util.json.JSONObject; import java.io.IOException; import java.nio.file.Path; import java.util.Map; public class JSONSystemConfig extends JSONConfig implements SysConfigson { protected Pinecore mSystem; public JSONSystemConfig ( Map map, JSONConfig parent, Pinecore system ) { super( map, parent ); this.setSystem( system ); } public JSONSystemConfig ( JSONConfig parent, Pinecore system ) { this( null, parent, system ); } public JSONSystemConfig ( Pinecore system ) { this(null , system ); } @Override public JSONSystemConfig getChild( Object key ) { JSONObject prototype = this.optJSONObject( key.toString() ); if( prototype == null ) { return null; } return new JSONSystemConfig( prototype, this, this.getSystem() ); } @Override public Pinecore getSystem() { return this.mSystem; } public JSONSystemConfig setSystem( Pinecore system ) { this.mSystem = system; if( this.parent() != null && ((JSONSystemConfig)this.parent() ).getSystem() != this.mSystem ) { this.getContext().addParentPath( Path.of( this.getSystem().getRuntimePath() ) ); } return this; } @Override public JSONSystemConfig getChildFromPath( Path path ) throws IOException { JSONObject neo = this.fromPath( path ); return new JSONSystemConfig( neo,this, this.getSystem() ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/MappedConfig.java ================================================ package com.pinecone.framework.util.config; import java.util.Map; public interface MappedConfig extends Config { Map getProtoConfig(); @Override default Object get( Object key ) { return this.getProtoConfig().get( key ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/OverridableConfig.java ================================================ package com.pinecone.framework.util.config; public interface OverridableConfig { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/PatriarchalConfig.java ================================================ package com.pinecone.framework.util.config; import com.pinecone.framework.system.prototype.FamilyContext; import java.io.IOException; import java.nio.file.Path; public interface PatriarchalConfig extends Config { PatriarchalConfig parent(); default PatriarchalConfig root() { PatriarchalConfig p = this.parent(); if( p == null ) { return this; } return p.root(); } PatriarchalConfig getChild ( Object key ); PatriarchalConfig setParent ( Object parent ); Path[] getParentPaths(); PatriarchalConfig setParentPaths( Path[] path ); PatriarchalConfig inherit( PatriarchalConfig parent ) ; FamilyContext getContext(); PatriarchalConfig getChildFromPath( Path path ) throws IOException; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/StartupCommandParser.java ================================================ package com.pinecone.framework.util.config; import com.pinecone.framework.system.prototype.Pinenut; import java.util.Map; public interface StartupCommandParser extends Pinenut { StartupCommandParser DefaultParser = new GenericStartupCommandParser(); Map parse( String[] args ); Map parse( Map args ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/SysConfigson.java ================================================ package com.pinecone.framework.util.config; public interface SysConfigson extends Configson, SystemConfig { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/SystemConfig.java ================================================ package com.pinecone.framework.util.config; import com.pinecone.framework.system.RuntimeSystem; public interface SystemConfig extends Config { RuntimeSystem getSystem(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/datetime/DatePattern.java ================================================ package com.pinecone.framework.util.datetime; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.util.Locale; public final class DatePattern { public static DateTimeFormatter createFormatter( String pattern ) { return DateTimeFormatter.ofPattern(pattern, Locale.getDefault()).withZone(ZoneId.systemDefault()); } public static final String NORM_DATETIME_PATTERN = "yyyy-MM-dd HH:mm:ss"; public static final DateTimeFormatter NORM_DATETIME_FORMATTER = createFormatter("yyyy-MM-dd HH:mm:ss"); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/datetime/GenericMultiFormDateTimeAudit.java ================================================ package com.pinecone.framework.util.datetime; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.StringUtils; import java.time.LocalDateTime; import java.time.temporal.ChronoUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; public class GenericMultiFormDateTimeAudit implements UniformDateTimeAudit { public GenericMultiFormDateTimeAudit() { } @Override public boolean matches ( String szDateTime, LocalDateTime targetTime ) { StorageDateTime dateTime = GenericMultiFormDateTimeAudit.fromString( szDateTime ); // Extract components from LocalDateTime int year = targetTime.getYear(); int month = targetTime.getMonthValue(); int dayOfMonth = targetTime.getDayOfMonth(); int hour = targetTime.getHour(); int minute = targetTime.getMinute(); int second = targetTime.getSecond(); int nano = targetTime.getNano(); return this.matchesDateTime( year, month, dayOfMonth, hour, minute, second, nano, dateTime ); } @Override public boolean betweenSec ( String szDateTime, LocalDateTime targetTime, int nSecondAccuracy ) { StorageDateTime dateTime = GenericMultiFormDateTimeAudit.fromString( szDateTime ); LocalDateTime localizedDateTime = GenericMultiFormDateTimeAudit.toLocalDateTime( dateTime, targetTime ); long differenceInSeconds = ChronoUnit.SECONDS.between( localizedDateTime, targetTime ); return Math.abs( differenceInSeconds ) <= nSecondAccuracy; } @Override public boolean betweenMin ( String szDateTime, LocalDateTime targetTime, int nMinuteAccuracy ) { StorageDateTime dateTime = GenericMultiFormDateTimeAudit.fromString( szDateTime ); LocalDateTime localizedDateTime = GenericMultiFormDateTimeAudit.toLocalDateTime( dateTime, targetTime ); long differenceInMinutes = ChronoUnit.SECONDS.between( localizedDateTime, targetTime ); return Math.abs( differenceInMinutes ) <= nMinuteAccuracy * 60; } @Override public boolean between ( String szDateTime, LocalDateTime targetTime, int nMillisAccuracy ) { StorageDateTime dateTime = GenericMultiFormDateTimeAudit.fromString( szDateTime ); LocalDateTime localizedDateTime = GenericMultiFormDateTimeAudit.toLocalDateTime( dateTime, targetTime ); long differenceInMillis = ChronoUnit.MILLIS.between( localizedDateTime, targetTime ); return Math.abs(differenceInMillis) <= nMillisAccuracy; } public static StorageDateTime fromString ( String szDateTime ) { String szStandardizeDateTime = GenericMultiFormDateTimeAudit.standardize( szDateTime ); if( szStandardizeDateTime == null ) { throw new IllegalArgumentException( "Datetime should be fmt `????-??-?? ??:??:??.???`" ); } return GenericMultiFormDateTimeAudit.parseDateTime( szStandardizeDateTime ); } // Usage methodology: Fill in all wildcard characters from `targetTime`. public static LocalDateTime toLocalDateTime ( StorageDateTime storageDateTime, LocalDateTime targetTime ) { StorageDate date = storageDateTime.getDate(); StorageTime time = storageDateTime.getTime(); int year = date.getYear() != -1 ? date.getYear() : targetTime.getYear(); int month = date.getMonth() != -1 ? date.getMonth() : targetTime.getMonthValue(); int day = date.getDay() != -1 ? date.getDay() : targetTime.getDayOfMonth(); int hour = time.getHour() != -1 ? time.getHour() : targetTime.getHour(); int minute = time.getMinute() != -1 ? time.getMinute() : targetTime.getMinute(); int second = time.getSecond() != -1 ? time.getSecond() : targetTime.getSecond(); int nano = time.getNano() != -1 ? time.getNano() : targetTime.getNano(); return LocalDateTime.of( year, month, day, hour, minute, second, nano ); } protected boolean matchesDateTime ( int year, int month, int dayOfMonth, int hour, int minute, int second, int nano, StorageDateTime dateTime ) { if ( dateTime.getYear() != -1 && dateTime.getYear() != year ) { return false; } if ( dateTime.getMonthValue() != -1 && dateTime.getMonthValue() != month ) { return false; } if ( dateTime.getDayOfMonth() != -1 && dateTime.getDayOfMonth() != dayOfMonth ) { return false; } if ( dateTime.getHour() != -1 && dateTime.getHour() != hour ) { return false; } if ( dateTime.getMinute() != -1 && dateTime.getMinute() != minute ) { return false; } if ( dateTime.getSecond() != -1 && dateTime.getSecond() != second ) { return false; } if ( dateTime.getNano() != -1 && dateTime.getNano() != nano ) { return false; } return true; } public static StorageDateTime parseDateTime ( String input ) { String[] parts = input.split( "[\\.\\-T:\\s]+" ); if ( parts.length != 7 ) { throw new IllegalArgumentException("Invalid input format: " + input); } int year = GenericMultiFormDateTimeAudit.parseComponent( parts[0] ); int month = GenericMultiFormDateTimeAudit.parseComponent( parts[1] ); int day = GenericMultiFormDateTimeAudit.parseComponent( parts[2] ); int hour = GenericMultiFormDateTimeAudit.parseComponent( parts[3] ); int minute = GenericMultiFormDateTimeAudit.parseComponent( parts[4] ); int second = GenericMultiFormDateTimeAudit.parseComponent( parts[5] ); int nano = GenericMultiFormDateTimeAudit.parseComponent( parts[6] ); return StorageDateTime.of( year, month, day, hour, minute, second, nano ); } private static int parseComponent ( String component ) { if ( component.matches( "\\?{1,13}" ) ) { return -1; } else { return Integer.parseInt(component); } } public static String standardize ( String input ) { if( input.contains("T") ) { // "????-??-??T??:??:??.???" input = input.replace( "T", " " ); } boolean bMatchBase = StringUtils.containsOnce( input, "-/" ); boolean hasColon = input.contains(":"); int dot = 0; if( !bMatchBase ) { dot = StringUtils.countOccurrencesOf( input, '.', 2 ); } if ( bMatchBase && hasColon || ( dot > 1/* yyyy.mm.dd */ && hasColon ) ) { return GenericMultiFormDateTimeAudit.standardizeDateTime( input, false ); } else if ( bMatchBase || ( dot > 1/* yyyy.mm.dd */ || ( dot == 1 && !hasColon /* yyyy.mm*/ ) ) ) { return GenericMultiFormDateTimeAudit.standardizeDateTime( input, true ) + " ??:??:??.???"; } else if ( hasColon ) { return "????-??-?? " + GenericMultiFormDateTimeAudit.standardizeTime( input ); } else if ( input.equals("?") ) { return "????-??-?? ??:??:??.???"; } return null; } private static String standardizeDateTime ( String input, boolean bOnlyYear ) { Pattern pattern; if( bOnlyYear ) { pattern = Pattern.compile( "(\\d{1,13}|\\?{1,13})[-/\\.](\\d{1,2}|\\?{1,2})(?:[-/\\.](\\d{1,2}|\\?{1,2}))?" ); } else { pattern = Pattern.compile( "(\\d{1,13}|\\?{1,13})[-/\\.](\\d{1,2}|\\?{1,2})(?:[-/\\.](\\d{1,2}|\\?{1,2}))? (\\d{1,2}|\\?{1,2}):(\\d{1,2}|\\?{1,2})(?:\\:(\\d{1,2}|\\?{1,2}))?(?:\\.(\\d{1,10}|\\?{1,10}))?" ); } Matcher matcher = pattern.matcher(input); if ( !matcher.matches() ) { throw new IllegalArgumentException( "Invalid date-time format: " + input ); } String year = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(1), 13 ); String month = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(2), 2 ); String day = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(3), 2 ); if( bOnlyYear ) { return String.format( "%s-%s-%s", year, month, day ); } else { String hour = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(4), 2 ); String minute = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(5), 2 ); String second = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(6), 2 ); String nano = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(7), 10, true ); return String.format( "%s-%s-%s %s:%s:%s.%s", year, month, day, hour, minute, second, nano ); } } private static String standardizeTime ( String input ) { Pattern pattern = Pattern.compile( "(\\d{1,2}|\\?{1,2}):(\\d{1,2}|\\?{1,2})(?:\\:(\\d{1,2}|\\?{1,2}))?(?:\\.(\\d{1,10}|\\?{1,10}))?" ); Matcher matcher = pattern.matcher(input); if ( !matcher.matches() ) { throw new IllegalArgumentException( "Invalid time format: " + input ); } String hour = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(1), 2 ); String minute = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(2), 2 ); String second = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(3), 2 ); String nano = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(4), 10, true ); return String.format( "%s:%s:%s.%s", hour, minute, second, nano ); } private static String formatComponent ( String component, int lengtn ) { return GenericMultiFormDateTimeAudit.formatComponent( component, lengtn, false ); } private static String formatComponent ( String component, int length, boolean bNano ) { if( component == null ) { component = "?"; } if ( component.contains("?") ) { return component; } int n = Integer.parseInt( component ); if( bNano ) { if( component.length() < 4 ) { n = n * 1000000; } } return String.format( "%0" + length + "d", n ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/datetime/StorageDate.java ================================================ package com.pinecone.framework.util.datetime; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.json.JSON; public class StorageDate implements Pinenut { private int mnYear; private short mnMonth; private short mnDay; public StorageDate( int year, short month, short day ) { this.mnYear = year; this.mnMonth = month; this.mnDay = day; } public StorageDate( int year, int month, int day ) { this( year, (short) month, (short)day ); } public int getYear() { return this.mnYear; } public void setYear( int year ) { this.mnYear = year; } public short getMonth() { return this.mnMonth; } public void setMonth( short month ) { this.mnMonth = month; } public short getDay() { return this.mnDay; } public void setDay( short day ) { this.mnDay = day; } @Override public String toString() { return String.format("%d-%02d-%02d", this.mnYear, this.mnMonth, this.mnDay); } @Override public String toJSONString() { return JSON.stringify( this.toString() ); } public static StorageDate of( int year, int month, int day ) { return new StorageDate(year, month, day); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/datetime/StorageDateTime.java ================================================ package com.pinecone.framework.util.datetime; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.json.JSON; import java.time.Month; public class StorageDateTime implements Pinenut { private StorageDate mDate; private StorageTime mTime; public StorageDateTime( StorageDate date, StorageTime time ) { this.mDate = date; this.mTime = time; } public StorageDate getDate() { return this.mDate; } public void setDate(StorageDate date) { this.mDate = date; } public StorageTime getTime() { return this.mTime; } public void setTime(StorageTime time) { this.mTime = time; } public int getYear() { return this.mDate.getYear(); } public void setYear( int year ) { this.mDate.setYear(year); } public int getMonthValue() { return this.mDate.getMonth(); } public void setMonth( int month ) { this.mDate.setMonth((short) month); } public int getDayOfMonth() { return this.mDate.getDay(); } public void setDay( int day ) { this.mDate.setDay((short) day); } public int getHour() { return this.mTime.getHour(); } public void setHour( int hour ) { this.mTime.setHour(hour); } public int getMinute() { return this.mTime.getMinute(); } public void setMinute( int minute ) { this.mTime.setMinute(minute); } public int getSecond() { return this.mTime.getSecond(); } public void setSecond( int second ) { this.mTime.setSecond(second); } public int getNano() { return this.mTime.getNano(); } public void setNano( int nano ) { this.mTime.setNano(nano); } @Override public String toString() { return this.mDate.toString() + " " + this.mTime.toString(); } @Override public String toJSONString() { return JSON.stringify( this.toString() ); } public static StorageDateTime of( int year, Month month, int dayOfMonth, int hour, int minute ) { return new StorageDateTime( StorageDate.of(year, month.getValue(), dayOfMonth ), StorageTime.of(hour, minute, 0, 0) ); } public static StorageDateTime of( int year, Month month, int dayOfMonth, int hour, int minute, int second ) { return new StorageDateTime( StorageDate.of(year, month.getValue(), dayOfMonth), StorageTime.of(hour, minute, second, 0) ); } public static StorageDateTime of( int year, Month month, int dayOfMonth, int hour, int minute, int second, int nanoOfSecond ) { return new StorageDateTime( StorageDate.of(year, month.getValue(), dayOfMonth), StorageTime.of(hour, minute, second, nanoOfSecond) ); } public static StorageDateTime of( int year, int month, int dayOfMonth, int hour, int minute ) { return new StorageDateTime( StorageDate.of(year, month, dayOfMonth), StorageTime.of(hour, minute, 0, 0) ); } public static StorageDateTime of( int year, int month, int dayOfMonth, int hour, int minute, int second ) { return new StorageDateTime( StorageDate.of(year, month, dayOfMonth), StorageTime.of( hour, minute, second, 0 ) ); } public static StorageDateTime of( int year, int month, int dayOfMonth, int hour, int minute, int second, int nanoOfSecond ) { return new StorageDateTime(StorageDate.of(year, month, dayOfMonth), StorageTime.of(hour, minute, second, nanoOfSecond)); } public static StorageDateTime of( StorageDate date, StorageTime time ) { return new StorageDateTime( date, time ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/datetime/StorageTime.java ================================================ package com.pinecone.framework.util.datetime; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.json.JSON; public class StorageTime implements Pinenut { private byte mnHour; private byte mnMinute; private byte mnSecond; private int mnNano; public StorageTime( int hour, int minute, int second, int nano ) { this.mnHour = (byte) hour; this.mnMinute = (byte) minute; this.mnSecond = (byte) second; this.mnNano = nano; } public int getHour() { return this.mnHour; } public void setHour( int hour ) { this.mnHour = (byte) hour; } public int getMinute() { return this.mnMinute; } public void setMinute( int minute ) { this.mnMinute = (byte) minute; } public int getSecond() { return this.mnSecond; } public void setSecond( int second ) { this.mnSecond = (byte) second; } public int getNano() { return this.mnNano; } public void setNano( int nano ) { this.mnNano = nano; } @Override public String toString() { return String.format("%02d:%02d:%02d.%09d", this.mnHour, this.mnMinute, this.mnSecond, this.mnNano); } @Override public String toJSONString() { return JSON.stringify( this.toString() ); } public static StorageTime of( int hour, int minute, int second, int nano ) { return new StorageTime( hour, minute, second, nano ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/datetime/UniformDateTimeAudit.java ================================================ package com.pinecone.framework.util.datetime; import com.pinecone.framework.system.prototype.Pinenut; import java.time.LocalDateTime; public interface UniformDateTimeAudit extends Pinenut { UniformDateTimeAudit DefaultAudit = new GenericMultiFormDateTimeAudit(); boolean matches ( String szDateTime, LocalDateTime targetTime ) ; boolean betweenSec ( String szDateTime, LocalDateTime targetTime, int nSecondAccuracy ) ; boolean betweenMin ( String szDateTime, LocalDateTime targetTime, int nMinuteAccuracy ) ; boolean between ( String szDateTime, LocalDateTime targetTime, int nMillisAccuracy ) ; default boolean matches ( String szDateTime ) { return this.matches( szDateTime, LocalDateTime.now() ); } default boolean betweenSec ( String szDateTime, int nSecondAccuracy ) { return this.betweenSec( szDateTime, LocalDateTime.now(), nSecondAccuracy ); } default boolean betweenMin ( String szDateTime, int nMinuteAccuracy ) { return this.betweenMin( szDateTime, LocalDateTime.now(), nMinuteAccuracy ); } default boolean between ( String szDateTime, int nMillisAccuracy ) { return this.between( szDateTime, LocalDateTime.now(), nMillisAccuracy ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/datetime/compact/CompactTimeUnit.java ================================================ package com.pinecone.framework.util.datetime.compact; import com.pinecone.framework.system.prototype.Pinenut; public interface CompactTimeUnit extends Pinenut { boolean isInfinite(); boolean isMilliseconds() ; boolean isSeconds() ; boolean isMinutes() ; boolean isHours() ; boolean isDays() ; long toMask64() ; String getSymbol(); short bits(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/datetime/compact/CompactTimeUnit32.java ================================================ package com.pinecone.framework.util.datetime.compact; public enum CompactTimeUnit32 implements CompactTimeUnit { INFINITE ( 0xFFFFFFFF, "INF" ), MILLISECONDS ( 0x00000000, "ms" ), SECONDS ( 0x20000000, "s" ), MINUTES ( 0x40000000, "m" ), HOURS ( 0x60000000, "h" ), DAYS ( 0x80000000, "d" ); private final int mask; private final String symbol; CompactTimeUnit32( int mask, String symbol ) { this.mask = mask; this.symbol = symbol; } public int getMask() { return this.mask; } @Override public String getSymbol() { return this.symbol; } @Override public boolean isInfinite() { return this.getMask() == CompactTimeUnit32.INFINITE.getMask(); } @Override public boolean isMilliseconds() { return this.getMask() == CompactTimeUnit32.MILLISECONDS.getMask(); } @Override public boolean isSeconds() { return this.getMask() == CompactTimeUnit32.SECONDS.getMask(); } @Override public boolean isMinutes() { return this.getMask() == CompactTimeUnit32.MINUTES.getMask(); } @Override public boolean isHours() { return this.getMask() == CompactTimeUnit32.HOURS.getMask(); } @Override public boolean isDays() { return this.getMask() == CompactTimeUnit32.DAYS.getMask(); } @Override public long toMask64() { return this.getMask(); } @Override public short bits() { return CompactTimestamp32.BITS; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/datetime/compact/CompactTimestamp.java ================================================ package com.pinecone.framework.util.datetime.compact; import com.pinecone.framework.system.prototype.Pinenut; public interface CompactTimestamp extends Pinenut { long toMilliseconds(); long toSeconds(); long toMinutes(); long toHours(); long toDays(); int toInt32(); CompactTimeUnit getUnit(); boolean isInfinite(); short bits(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/datetime/compact/CompactTimestamp32.java ================================================ package com.pinecone.framework.util.datetime.compact; public class CompactTimestamp32 implements CompactTimestamp { public static final int MASK_TYPE = 0xE0000000; // Hi 3 => Type public static final int MASK_VALUE = 0x1FFFFFFF; // Low 31 => Time public static final int INFINITE = 0xFFFFFFFF; public static final long MILLIS_PER_SECOND = 1_000L; public static final long MILLIS_PER_MINUTE = 60_000L; public static final long MILLIS_PER_HOUR = 3_600_000L; public static final long MILLIS_PER_DAY = 86_400_000L; public static final int BITS = Integer.SIZE; protected int mnUint32Timestamp; public CompactTimestamp32 ( int nUint32Timestamp, boolean raw ) { this.mnUint32Timestamp = nUint32Timestamp; } public CompactTimestamp32 ( int nMillis ) { this( nMillis, CompactTimeUnit32.MILLISECONDS ); } public CompactTimestamp32 ( int val, CompactTimeUnit timeUnit ) { this( CompactTimestamp32.encode( val, (CompactTimeUnit32) timeUnit ), true ); } @Override public long toMilliseconds() { return CompactTimestamp32.toMilliseconds( this.mnUint32Timestamp ); } @Override public long toSeconds() { return CompactTimestamp32.toSeconds( this.mnUint32Timestamp ); } @Override public long toMinutes() { return CompactTimestamp32.toMinutes( this.mnUint32Timestamp ); } @Override public long toHours() { return CompactTimestamp32.toHours( this.mnUint32Timestamp ); } @Override public long toDays() { return CompactTimestamp32.toDays( this.mnUint32Timestamp ); } @Override public int toInt32() { return CompactTimestamp32.decodeValue( this.mnUint32Timestamp ); } @Override public CompactTimeUnit32 getUnit() { return CompactTimestamp32.decodeType( this.mnUint32Timestamp ); } @Override public boolean isInfinite() { return CompactTimestamp32.isInfinite( this.mnUint32Timestamp ); } @Override public short bits() { return BITS; } @Override public boolean equals( Object obj ) { if ( this == obj ) { return true; } if ( obj == null || getClass() != obj.getClass() ) { return false; } CompactTimestamp32 that = ( CompactTimestamp32 ) obj; return this.mnUint32Timestamp == that.mnUint32Timestamp; } @Override public int hashCode() { return Integer.hashCode( this.mnUint32Timestamp ); } @Override public String toString() { return CompactTimestamp32.format( this.mnUint32Timestamp ); } public static CompactTimestamp32 from ( int val, CompactTimeUnit timeUnit ) { return new CompactTimestamp32( val, timeUnit ); } public static CompactTimestamp32 from ( long millis ) { return new CompactTimestamp32( CompactTimestamp32.fromMilliseconds( millis ), true ); } public static int encode( int value, CompactTimeUnit32 unit ) { if ( value < 0 || value > MASK_VALUE ) { throw new IllegalArgumentException( "Out of rang: " + value ); } return value | unit.getMask(); } public static int decodeValue( int encoded ) { if ( encoded == INFINITE ) { return INFINITE; } return encoded & MASK_VALUE; } public static CompactTimeUnit32 decodeType( int encoded ) { if ( encoded == INFINITE ) { return CompactTimeUnit32.INFINITE; } int type = encoded & MASK_TYPE; for ( CompactTimeUnit32 unit : CompactTimeUnit32.values() ) { if ( unit.getMask() == type ) { return unit; } } return null; } public static long toMilliseconds( int encoded ) { if ( CompactTimestamp32.isInfinite( encoded ) ) { return -1L; } int value = CompactTimestamp32.decodeValue( encoded ); CompactTimeUnit32 unit = CompactTimestamp32.decodeType( encoded ); if ( unit == null ) { throw new IllegalArgumentException( "Unknown `TimeUnit`." ); } switch ( unit ) { case MILLISECONDS: { return value; } case SECONDS: { return value * MILLIS_PER_SECOND; } case MINUTES: { return value * MILLIS_PER_MINUTE; } case HOURS: { return value * MILLIS_PER_HOUR; } case DAYS: { return value * MILLIS_PER_DAY; } default: { return -1L; } } } public static long toSeconds( int val ) { long millis = CompactTimestamp32.toMilliseconds( val ); return millis == -1L ? -1L : millis / MILLIS_PER_SECOND; } public static long toMinutes( int val ) { long millis = CompactTimestamp32.toMilliseconds( val ); return millis == -1L ? -1L : millis / MILLIS_PER_MINUTE; } public static long toHours( int val ) { long millis = CompactTimestamp32.toMilliseconds( val ); return millis == -1L ? -1L : millis / MILLIS_PER_HOUR; } public static long toDays( int val ) { long millis = CompactTimestamp32.toMilliseconds( val ); return millis == -1L ? -1L : millis / MILLIS_PER_DAY; } public static int fromMilliseconds( long millis ) { if ( millis == -1L ) { return INFINITE; } if ( millis < 0 ) { throw new IllegalArgumentException( "Negative milliseconds unacceptable." ); } if ( millis % MILLIS_PER_DAY == 0 ) { long days = millis / MILLIS_PER_DAY; if ( days <= MASK_VALUE ) { return CompactTimestamp32.encode( (int) days, CompactTimeUnit32.DAYS ); } } if ( millis % MILLIS_PER_HOUR == 0 ) { long hours = millis / MILLIS_PER_HOUR; if ( hours <= MASK_VALUE ) { return CompactTimestamp32.encode( (int) hours, CompactTimeUnit32.HOURS ); } } if ( millis % MILLIS_PER_MINUTE == 0 ) { long minutes = millis / MILLIS_PER_MINUTE; if ( minutes <= MASK_VALUE ) { return CompactTimestamp32.encode( (int) minutes, CompactTimeUnit32.MINUTES ); } } if ( millis % MILLIS_PER_SECOND == 0 ) { long seconds = millis / MILLIS_PER_SECOND; if ( seconds <= MASK_VALUE ) { return CompactTimestamp32.encode( (int) seconds, CompactTimeUnit32.SECONDS ); } } if ( millis <= MASK_VALUE ) { return CompactTimestamp32.encode( (int) millis, CompactTimeUnit32.MILLISECONDS ); } return INFINITE; } public static String format( int encoded ) { if ( CompactTimestamp32.isInfinite( encoded ) ) { return CompactTimeUnit32.INFINITE.getSymbol(); } CompactTimeUnit32 unit = CompactTimestamp32.decodeType( encoded ); if ( unit == null ) { return CompactTimeUnit32.INFINITE.getSymbol(); } return CompactTimestamp32.decodeValue( encoded ) + " " + unit.getSymbol(); } public static boolean isInfinite( int encoded ) { return encoded == INFINITE; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/BytesID.java ================================================ package com.pinecone.framework.util.id; public interface BytesID extends Identification { int length(); String toHexString(); String toBase64String(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/GUID.java ================================================ package com.pinecone.framework.util.id; public interface GUID extends NumericID { long hashCode64(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/GuidAllocator.java ================================================ package com.pinecone.framework.util.id; import com.pinecone.framework.system.prototype.Pinenut; public interface GuidAllocator extends Pinenut { GUID nextGUID(); GUID parse( final String hexId ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/GuidGenerateException.java ================================================ package com.pinecone.framework.util.id; public class GuidGenerateException extends RuntimeException { private static final long serialVersionUID = -27048199131316992L; public GuidGenerateException() { super(); } public GuidGenerateException( String message, Throwable cause ) { super(message, cause); } public GuidGenerateException( String message ) { super(message); } public GuidGenerateException( String msgFormat, Object... args ) { super(String.format(msgFormat, args)); } public GuidGenerateException( Throwable cause ) { super(cause); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/Identification.java ================================================ package com.pinecone.framework.util.id; import java.io.Serializable; import com.pinecone.framework.system.prototype.Pinenut; public interface Identification extends Pinenut, Serializable, Comparable { Identification parse( String code ); byte[] toBytes(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/IllegalIdentificationException.java ================================================ package com.pinecone.framework.util.id; import com.pinecone.framework.system.PineRuntimeException; public class IllegalIdentificationException extends PineRuntimeException { public IllegalIdentificationException () { super(); } public IllegalIdentificationException ( String message ) { super(message); } public IllegalIdentificationException ( String message, Throwable cause ) { super(message, cause); } public IllegalIdentificationException ( Throwable cause ) { super(cause); } protected IllegalIdentificationException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) { super( message, cause, enableSuppression, writableStackTrace ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/Int32ID.java ================================================ package com.pinecone.framework.util.id; import com.pinecone.framework.util.Bytes; public class Int32ID implements NumericID { protected int mId; public Int32ID( int id ) { this.mId = id; } @Override public Identification parse( String hexID ) { this.mId = Integer.parseInt( hexID, 16 ); return this; } @Override public long longVal() { return this.mId; } @Override public int intVal() { return this.mId; } @Override public String toString() { return Integer.toUnsignedString( this.mId ); } @Override public byte[] toBytesLE() { return Bytes.int32ToBytesLE( this.mId ); } @Override public byte[] toBytesBE() { return Bytes.int32ToBytesBE( this.mId ); } @Override public int sizeof() { return Integer.BYTES; } @Override public int compareTo( Identification that ) { Int32ID val; if ( that instanceof Int32ID ) { val = (Int32ID) that; } else { throw new IllegalArgumentException( "Not Int32ID" ); } return Integer.compare( this.mId, val.mId ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/Int64ID.java ================================================ package com.pinecone.framework.util.id; import com.pinecone.framework.util.Bytes; public class Int64ID implements NumericID { protected long mId; public Int64ID( long id ) { this.mId = id; } @Override public Identification parse( String hexID ) { this.mId = Long.parseLong( hexID, 16 ); return this; } @Override public long longVal() { return this.mId; } @Override public int intVal() { return (int) this.mId; } @Override public String toString() { return Long.toUnsignedString( this.mId ); } @Override public byte[] toBytesLE() { return Bytes.int64ToBytesLE( this.mId ); } @Override public byte[] toBytesBE() { return Bytes.int64ToBytesBE( this.mId ); } @Override public int sizeof() { return Long.BYTES; } @Override public int compareTo( Identification that ) { Int64ID val; if ( that instanceof Int64ID ) { val = (Int64ID) that; } else { throw new IllegalArgumentException( "Not Int64ID" ); } return Long.compare( this.mId, val.mId ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/NameStringID.java ================================================ package com.pinecone.framework.util.id; import com.pinecone.framework.util.json.JSONString; public class NameStringID implements StringID, JSONString { private String name; public NameStringID( String name ) { this.name = name; } @Override public Identification parse( String code ) { this.name = code; return this; } @Override public String toString() { return this.name; } @Override public String toJSONString() { return "\"" + this.name + "\""; } @Override public boolean equals( Object obj ) { if ( this == obj ) { return true; } if ( !(obj instanceof NameStringID) ) { return false; } NameStringID that = (NameStringID) obj; return this.name.equals(that.name); } @Override public int hashCode() { return this.name.hashCode(); } @Override public byte[] toBytes() { return this.name.getBytes(); } @Override public int compareTo( Identification that ) { StringID val; if ( that instanceof StringID ) { val = (StringID) that; } else { throw new IllegalArgumentException( "Not StringID" ); } return this.name.compareTo( val.toString() ); } @Override public int length() { return this.name.length(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/NumericID.java ================================================ package com.pinecone.framework.util.id; public interface NumericID extends Identification { long longVal(); int intVal(); int sizeof(); default int bitsof() { return this.sizeof() * 8; } @Override default String toJSONString() { return this.toString(); } byte[] toBytesLE(); byte[] toBytesBE(); // Pinecone is using uniformed Little-Endian by default. @Override default byte[] toBytes() { return this.toBytesLE(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/StringID.java ================================================ package com.pinecone.framework.util.id; public interface StringID extends Identification { int length(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/io/FileIterator.java ================================================ package com.pinecone.framework.util.io; import java.io.File; import java.io.IOException; import java.nio.file.Path; import java.util.Iterator; import java.util.NoSuchElementException; public class FileIterator implements Iterator { private PathIterator mPathIterator; public FileIterator( PathIterator iterator ) { this.mPathIterator = iterator; } public FileIterator( File root, boolean recursive, boolean ignoreException ) throws IOException { this.mPathIterator = new PathItemIterator( root.toPath(), recursive, ignoreException ); } public FileIterator( File root, boolean recursive ) throws IOException { this( root, recursive, false ); } public FileIterator( File root ) throws IOException { this( root, true ); } @Override public boolean hasNext() { return this.mPathIterator.hasNext(); } @Override public File next() { Path path = this.mPathIterator.next(); if ( path == null ) { throw new NoSuchElementException( "No more files" ); } return path.toFile(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/io/FileNamePathIterator.java ================================================ package com.pinecone.framework.util.io; import java.io.IOException; import java.nio.file.Path; import java.util.Iterator; public class FileNamePathIterator extends PathItemIterator implements Iterator { public FileNamePathIterator( Path root, boolean recursive, boolean ignoreException ) throws IOException { super( root, recursive, ignoreException ); } public FileNamePathIterator( Path root, boolean recursive ) throws IOException { this( root, recursive, false ); } public FileNamePathIterator( Path root ) throws IOException { this( root, true ); } @Override public Path next() { return super.next().getFileName(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/io/FileUtils.java ================================================ package com.pinecone.framework.util.io; import com.pinecone.framework.util.OSIdentifier; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.math.BigInteger; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; public final class FileUtils { public static byte[] readByteAll( File pFile ) throws IOException { Long fileLength = pFile.length(); byte[] fileContent = new byte[ fileLength.intValue() ]; try ( FileInputStream fileInputStream = new FileInputStream(pFile) ){ int nRealReadied = fileInputStream.read(fileContent); if ( nRealReadied != fileLength.intValue() ) { throw new IOException( "Read all content failed !" ); } } return fileContent; } public static String readAll ( String szFileDir ) throws IOException { File file = new File(szFileDir); return readAll(file); } public static String readAll ( String szFileDir, Charset charset ) throws IOException { File file = new File(szFileDir); return readAll( file, charset ); } public static String readAll ( File pFile, Charset charset ) throws IOException { return new String( readByteAll( pFile ), charset ); } public static String readAll ( File pFile ) throws IOException { return new String( readByteAll( pFile ), StandardCharsets.UTF_8 ); } private static void checkDirectory( File directory ) { if (!directory.exists()) { throw new IllegalArgumentException(directory + " does not exist"); } else if (!directory.isDirectory()) { throw new IllegalArgumentException(directory + " is not a directory"); } } public static boolean isSymlink( File file ) throws IOException { if ( file == null ) { throw new NullPointerException("file must not be null"); } else if ( OSIdentifier.isWindows() ) { return false; } else { File fileInCanonicalDir = null; if ( file.getParent() == null ) { fileInCanonicalDir = file; } else { File canonicalDir = file.getParentFile().getCanonicalFile(); fileInCanonicalDir = new File(canonicalDir, file.getName()); } return !fileInCanonicalDir.getCanonicalFile().equals( fileInCanonicalDir.getAbsoluteFile() ); } } public static long sizeOf( File file ) { if ( !file.exists() ) { String message = file + " does not exist"; throw new IllegalArgumentException(message); } else { return file.isDirectory() ? sizeOfDirectory(file) : file.length(); } } public static BigInteger sizeOfAsBigInteger( File file ) { if ( !file.exists() ) { String message = file + " does not exist"; throw new IllegalArgumentException(message); } else { return file.isDirectory() ? sizeOfDirectoryAsBigInteger(file) : BigInteger.valueOf(file.length()); } } public static long sizeOfDirectory( File directory ) { FileUtils.checkDirectory(directory); File[] files = directory.listFiles(); if ( files == null ) { return 0L; } else { long size = 0L; File[] arr = files; int len = files.length; for( int i = 0; i < len; ++i ) { File file = arr[i]; try { if ( !FileUtils.isSymlink(file) ) { size += FileUtils.sizeOf(file); if ( size < 0L ) { break; } } } catch ( IOException e ) { // Do nothing } } return size; } } public static BigInteger sizeOfDirectoryAsBigInteger( File directory ) { FileUtils.checkDirectory(directory); File[] files = directory.listFiles(); if (files == null) { return BigInteger.ZERO; } else { BigInteger size = BigInteger.ZERO; File[] arr = files; int len = files.length; for( int i = 0; i < len; ++i ) { File file = arr[i]; try { if ( !FileUtils.isSymlink(file) ) { size = size.add(BigInteger.valueOf(sizeOf(file))); } } catch ( IOException e ) { // Do nothing } } return size; } } public static void forceDelete( File file ) throws IOException { if ( file.isDirectory() ) { FileUtils.deleteDirectory(file); } else { boolean filePresent = file.exists(); if ( !file.delete() ) { if ( !filePresent ) { throw new FileNotFoundException("file does not exist: " + file); } String message = "Unable to delete file: " + file; throw new IOException( message ); } } } public static void purgeDirectory( File directory ) throws IOException { FileUtils.deleteDirectory( directory ); } public static void deleteDirectory( File directory ) throws IOException { if ( directory.exists() ) { if ( !isSymlink(directory) ) { FileUtils.cleanDirectory(directory); } if ( !directory.delete() ) { String message = "Unable to delete directory " + directory + "."; throw new IOException(message); } } } public static boolean deleteQuietly( File file ) { if ( file == null ) { return false; } else { try { if (file.isDirectory()) { FileUtils.cleanDirectory(file); } } catch ( Exception e ) { // Do nothing } try { return file.delete(); } catch ( Exception e ) { return false; } } } public static void cleanDirectory( File directory ) throws IOException { String message; if ( !directory.exists() ) { message = directory + " does not exist"; throw new IllegalArgumentException(message); } else if ( !directory.isDirectory() ) { message = directory + " is not a directory"; throw new IllegalArgumentException(message); } else { File[] files = directory.listFiles(); if ( files == null ) { throw new IOException("Failed to list contents of " + directory); } else { IOException exception = null; File[] arr = files; int len = files.length; for( int i = 0; i < len; ++i ) { File file = arr[i]; try { FileUtils.forceDelete(file); } catch ( IOException e ) { exception = e; } } if ( null != exception ) { throw exception; } } } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/io/PathItemIterator.java ================================================ package com.pinecone.framework.util.io; import com.pinecone.framework.system.ProxyProvokeHandleException; import java.io.IOException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Stack; public class PathItemIterator implements PathIterator { private Stack > mDirStack = new Stack<>() ; private Stack > mIterStack = new Stack<>() ; private Path mNextPath = null ; private boolean mbRecursive ; private boolean mbIgnoreException ; public PathItemIterator(Path root, boolean recursive, boolean ignoreException ) throws IOException { this.mbRecursive = recursive; this.mbIgnoreException = ignoreException; if ( root != null && Files.exists(root) ) { if ( Files.isDirectory( root ) ) { DirectoryStream stream = Files.newDirectoryStream(root); this.mDirStack.push(stream); this.mIterStack.push(stream.iterator()); } else { this.mNextPath = root; } } } public PathItemIterator(Path root, boolean recursive ) throws IOException { this( root, recursive, false ); } public PathItemIterator(Path root ) throws IOException { this( root, true ); } @Override public boolean hasNext() { if ( this.mNextPath != null ) { return true; } while ( !this.mIterStack.isEmpty() ) { Iterator iter = this.mIterStack.peek(); if ( iter.hasNext() ) { Path file = iter.next(); if ( Files.isDirectory(file) ) { this.mNextPath = file; if ( this.mbRecursive ) { try { DirectoryStream stream = Files.newDirectoryStream(file); this.mDirStack.push( stream ); this.mIterStack.push( stream.iterator() ); } catch ( IOException e ) { if( !this.mbIgnoreException ) { throw new ProxyProvokeHandleException( e ); } } } return true; } else { this.mNextPath = file; return true; } } else { this.mIterStack.pop(); try { this.mDirStack.pop().close(); } catch ( IOException e ) { if( !this.mbIgnoreException ) { throw new ProxyProvokeHandleException( e ); } } } } return false; } @Override public Path next() { if ( !this.hasNext() ) { throw new NoSuchElementException( "No more files" ); } Path result = this.mNextPath; this.mNextPath = null; return result; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/io/PathIterator.java ================================================ package com.pinecone.framework.util.io; import java.nio.file.Path; import java.util.Iterator; public interface PathIterator extends Iterator { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/io/Tracer.java ================================================ package com.pinecone.framework.util.io; import com.pinecone.framework.system.prototype.Pinenut; import java.io.PrintStream; public interface Tracer extends Pinenut { PrintStream getOut(); PrintStream getErr(); Tracer echo( Object data, Object...objects ) ; Tracer cerr( Object data, Object...objects ) ; Tracer log( Object that ); Tracer log( Object Anything, Object...objects ); Tracer info( Object that ); Tracer info( Object Anything, Object...objects ); Tracer warn ( Object Anything, Object...objects ); Tracer warn ( Object that ); Tracer error ( Object Anything, Object...objects ); Tracer error ( Object that ); Tracer trace() ; Tracer trace( Object Anything, Object...objects ) ; Tracer colorf( int colorCode, Object that ); Tracer colorf( int colorCode, Object Anything, Object...objects ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/io/Tracerson.java ================================================ package com.pinecone.framework.util.io; import com.pinecone.framework.system.prototype.PinenutTraits; import com.pinecone.framework.util.json.JSON; import java.io.PrintStream; public class Tracerson implements Tracer { public static final String COLOR_STRING_UTF_END = "\u001B[0m"; protected String mszInfoColor = "\u001B[34m"; protected String mszWarnColor = "\u001B[33m"; protected String mszTraceColor = "\u001B[36m"; protected String mszElementSpilt = " "; protected PrintStream out = System.out; protected PrintStream err = System.err; public Tracerson() { } public Tracerson( PrintStream out, PrintStream err ) { this.out = out; this.err = err; } protected String stringify( Object data ){ try { return PinenutTraits.invokeToJSONString( data ); } catch ( Exception e1 ){ return JSON.stringify( data ); } } @Override public PrintStream getOut() { return this.out; } @Override public PrintStream getErr() { return this.err; } @Override public Tracerson echo( Object data, Object...objects ) { this.out.print( data ); for ( Object row : objects ) { this.out.print( row ); } return this; } @Override public Tracerson cerr( Object data, Object...objects ) { this.err.print( data ); for ( Object row : objects ) { this.err.print( row ); } return this; } protected void printlnColorfulEnd() { if( this.out.equals( System.out ) ) { this.out.println( Tracerson.COLOR_STRING_UTF_END ); } else { this.out.println(); } } protected void printlnStringify( Object Anything, Object...objects ) { this.out.print( this.stringify( Anything ) ); for ( Object row : objects ) { this.out.print( this.mszElementSpilt ); this.out.print( this.stringify( row ) ); } } protected void printlnColorful( String szColor, Object Anything, Object...objects ) { this.out.print( szColor ); this.printlnStringify( Anything, objects ); this.printlnColorfulEnd(); } @Override public Tracerson log( Object that ){ this.out.println( this.stringify( that ) ); return this; } @Override public Tracerson log( Object Anything, Object...objects ){ this.printlnStringify( Anything, objects ); this.out.println(); return this; } protected String queryInfoColor(){ if( this.out.equals( System.out ) ) { return this.mszInfoColor; } else { return "[INFO] "; } } @Override public Tracerson info( Object that ){ this.out.print( this.queryInfoColor() ); this.out.print( this.stringify( that ) ); this.printlnColorfulEnd(); return this; } @Override public Tracerson info( Object Anything, Object...objects ){ this.printlnColorful( this.queryInfoColor(), Anything, objects ); return this; } protected String queryWarnColor(){ if( this.out.equals( System.out ) ) { return this.mszWarnColor; } else { return "[WARN] "; } } @Override public Tracerson warn ( Object that ){ this.out.print( this.queryWarnColor() ); this.out.print( this.stringify( that ) ); this.printlnColorfulEnd(); return this; } @Override public Tracerson warn ( Object Anything, Object...objects ){ this.printlnColorful( this.queryWarnColor(), Anything, objects ); return this; } @Override public Tracerson error ( Object that ){ this.err.println( this.stringify( that ) ); return this; } @Override public Tracerson error ( Object Anything, Object...objects ){ this.err.print( this.stringify( Anything ) ); for ( Object row : objects ) { this.err.print( this.mszElementSpilt ); this.err.print( this.stringify( row ) ); } return this; } protected String queryTraceColor(){ if( this.out.equals( System.out ) ) { return this.mszTraceColor; } else { return "[TRACE] "; } } protected void printTraceInfo( StackTraceElement[] elements ){ this.out.println( this.getClass().getName() + ": Call Trace Info:"); if( elements != null ){ for( int i = 0; i < elements.length; i++ ){ if( i == 0 && elements[0].getClassName().equals( "java.lang.Thread" ) ){ continue; } this.out.println( "\tat " + elements[i] ); } } } @Override public Tracerson trace() { this.out.print( this.queryTraceColor() ); this.printTraceInfo( Thread.currentThread().getStackTrace() ); this.printlnColorfulEnd(); return this; } @Override public Tracerson trace( Object Anything, Object...objects ) { this.out.print( this.queryTraceColor() ); this.log( Anything, objects ); this.printTraceInfo( Thread.currentThread().getStackTrace() ); this.printlnColorfulEnd(); return this; } @Override public Tracer colorf( int colorCode, Object that ){ this.out.print( "\u001B[" + colorCode + "m" ); this.out.print( this.stringify( that ) ); this.printlnColorfulEnd(); return this; } @Override public Tracer colorf( int colorCode, Object Anything, Object...objects ){ this.printlnColorful( "\u001B[" + colorCode + "m", Anything, objects ); return this; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/ArchCursorParser.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.util.CursorParser; import com.pinecone.framework.util.GeneralStrings; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.io.StringReader; /** * Pinecone For Java JSONCursorParser [ Bean Nuts Almond JSON For Pinecone Java ] * Copyright © 2008 - 2028 Bean Nuts Foundation ( DR.Undefined ) All rights reserved. [Harald.E / WJH] * Tip: * ***************************************************************************************** * JSON util Version Signature: Ver. 3.4 [Build 20240531] [Pinecone Ver.3.4] * Author: undefined * Last Modified Date: 2024-05-31 * ***************************************************************************************** * Principle : Bottom-up parsing implementation with recursive descendent method. * [ Due to Java doesn't had fucking pointer, using cursor method can be better ] * Reference[1]: https://www.json.org/json-en.html * Reference[2]: https://spec.json5.org/ * Syntax: T-> "", T->[0-9] T-> '', T->[a-zA-Z]..., etc * T-> { T : T }, T-> [ T ], etc * Support: JSON, JSON5 * ***************************************************************************************** * Notice: This is high tolerance JSON parser, It does not fully comply with the JSON standard for error situations * ***************************************************************************************** */ public abstract class ArchCursorParser implements CursorParser { protected long mnCharacter; protected boolean mbIsEOF; protected int mnParseAt ; protected int mnLineAt; protected char mcPrevious; protected Reader mReader; protected boolean mbUsePrevious; public ArchCursorParser( Reader reader ) { this.mReader = (Reader)(reader.markSupported() ? reader : new BufferedReader(reader)); this.mbIsEOF = false; this.mbUsePrevious = false; this.mcPrevious = 0; this.mnParseAt = 0; this.mnCharacter = 1L; this.mnLineAt = 1; } public ArchCursorParser( InputStream inputStream ) throws JSONParseException { this((Reader)(new InputStreamReader(inputStream))); } public ArchCursorParser( String s ) { this((Reader)(new StringReader(s))); } public void lineBack() { if ( !this.mbUsePrevious && this.mnParseAt > 0L ) { --this.mnParseAt; --this.mnLineAt; if( this.mnCharacter != 0 ) { --this.mnCharacter; } this.mbUsePrevious = true; this.mbIsEOF = false; } } @Override public void back() throws JSONParseException { if (!this.mbUsePrevious && this.mnParseAt > 0L) { --this.mnParseAt; --this.mnCharacter; this.mbUsePrevious = true; this.mbIsEOF = false; } else { throw new JSONParseException("Stepping back two steps is not supported"); } } public static int dehexchar(char c) { if (c >= '0' && c <= '9') { return c - 48; } else if (c >= 'A' && c <= 'F') { return c - 55; } else { return c >= 'a' && c <= 'f' ? c - 87 : -1; } } public boolean isEndLine() { return this.mbIsEOF && !this.mbUsePrevious; } public boolean more() throws JSONParseException { this.next(); if ( this.isEndLine() ) { return false; } else { this.back(); return true; } } @Override public char next() throws JSONParseException { int c; if (this.mbUsePrevious) { this.mbUsePrevious = false; c = this.mcPrevious; } else { try { c = this.mReader.read(); } catch ( IOException e ) { throw new JSONParseException(e); } if (c <= 0) { this.mbIsEOF = true; c = 0; } } ++this.mnParseAt; if ( this.mcPrevious == '\r' ) { ++this.mnLineAt; this.mnCharacter = (long)(c == 10 ? 0 : 1); } else if ( c == '\n' ) { ++this.mnLineAt; this.mnCharacter = 0L; } else { ++this.mnCharacter; } this.mcPrevious = (char)c; return this.mcPrevious; } public char next( char c ) throws JSONParseException { char n = this.next(); if (n != c) { throw this.syntaxError("Error parser json string with expected '" + c + "' and instead saw '" + n + "'"); } else { return n; } } @Override public String next( int n ) throws JSONParseException { if (n == 0) { return ""; } else { char[] chars = new char[n]; for( int pos = 0; pos < n; ++pos ) { chars[pos] = this.next(); if ( this.isEndLine() ) { throw this.syntaxError("Error parser json string with substring bounds error."); } } return new String(chars); } } public boolean skipComment( char cCurrentChar ){ if( cCurrentChar == '/' ){ char nextC = this.next(); if( nextC == '*' ){ while( true ) { char c = this.next(); if( c == '*' ){ c = this.next(); while ( c == '*' ) { c = this.next(); } if( c == '/' ){ return true; } } } } else if( nextC == '/' ){ while( true ) { char c = this.next(); if( c == '\n' ){ this.lineBack(); return true; } else if( c == '\r' ){ c = this.next(); if( c == '\n' ){ this.lineBack(); return true; } this.back(); return true; } } } } return false; } public char nextClean() throws JSONParseException { char c; do { c = this.next(); if ( this.skipComment( c ) ){ c = this.next(); } } while( c != 0 && c <= ' ' ); return c; } public StringBuilder nextString( char quote ) throws JSONParseException { StringBuilder sb = new StringBuilder(); while( true ) { char c = this.next(); if( this.isEndLine() ) { return sb; } switch(c) { case '\u0000': { sb.append( '\0' ); continue; } case '\n': { sb.append( '\n' ); continue; } case '\r': { sb.append( '\r' ); continue; //throw this.syntaxError("Error parser json string with unterminated string."); //What fucking ever, who care. } case '\\': { c = this.next(); if( GeneralStrings.transferCharParse( c, this, sb ) ){ continue; } } default: { if ( c == quote ) { return sb; } sb.append(c); } } } } public String nextTo( char delimiter ) throws JSONParseException { StringBuffer sb = new StringBuffer(); while(true) { char c = this.next(); if (c == delimiter || c == 0 || c == '\n' || c == '\r') { if (c != 0) { this.back(); } return sb.toString().trim(); } sb.append(c); } } public String nextTo( String delimiters ) throws JSONParseException { StringBuffer sb = new StringBuffer(); while(true) { char c = this.next(); if (delimiters.indexOf(c) >= 0 || c == 0 || c == '\n' || c == '\r') { if (c != 0) { this.back(); } return sb.toString().trim(); } sb.append(c); } } protected StringBuilder eval_next_string( char currentChat ) { StringBuilder sb; for ( sb = new StringBuilder(); currentChat >= ' ' && ",:]}/\\\"[{;=#&".indexOf(currentChat) < 0; currentChat = this.next() ) { sb.append(currentChat); } return sb; } protected Object eval_next_string_token( StringBuilder sb, char currentChat ) { this.back(); String string = sb.toString().trim(); if ( string.isEmpty() ) { throw this.syntaxError("Error parser json string missing value."); } else { return JSONUtils.stringToValue( string ); } } @Override public Object nextValue( Object indexKey, Object parent, Object[] args ) throws JSONParseException { char c = this.nextClean(); switch(c) { case '"': case '\'': { return this.nextString(c).toString(); } case '[': { this.back(); return this.newJSONArray( indexKey, this, parent, args ); } case '{': { this.back(); return this.newJSONObject( indexKey, this, parent, args ); } default: { StringBuilder sb = this.eval_next_string( c ); return this.eval_next_string_token(sb, c); } } } @Override public Object nextValue() throws JSONParseException { return this.nextValue( null, null, null ); } public char skipTo( char to ) throws JSONParseException { char c; try { int startIndex = this.mnParseAt; int startLine = this.mnLineAt; long startCharacter = this.mnCharacter; this.mReader.mark(1000000); do { c = this.next(); if (c == 0) { this.mReader.reset(); this.mnParseAt = startIndex; this.mnCharacter = startCharacter; this.mnLineAt = startLine; return c; } } while(c != to); } catch ( IOException e ) { throw new JSONParseException( e ); } this.back(); return c; } public JSONParseException syntaxError( String message ) { return new JSONParseException( message + this.toString(), (int)this.mnParseAt ); } @Override public String toString() { return " at " + this.mnParseAt + " [character " + this.mnCharacter + " line " + this.mnLineAt + "]"; } public void handleRedirectException( JSONParserRedirectException e ) { } protected abstract Object newJSONArray( Object indexKey, ArchCursorParser parser, Object parent, Object[] args ); protected abstract Object newJSONObject( Object indexKey, ArchCursorParser parser, Object parent, Object[] args ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/ArchJSONArray.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.system.prototype.Prototype; import com.pinecone.framework.system.prototype.TypeIndex; import com.pinecone.framework.util.Debug; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Iterator; import java.util.ListIterator; public abstract class ArchJSONArray implements JSONArray { protected abstract void jsonDecode0( ArchCursorParser x ) throws JSONException ; @Override public abstract JSONArray jsonDecode( ArchCursorParser x ) throws JSONException ; @Override public abstract JSONArray jsonDecode( String source ) throws JSONException ; @Override public abstract void assimilate( List that ); @Override public abstract List getArray(); @Override public Object front() { return this.opt( 0 ); } @Override public Object back() { return this.opt( this.length() - 1 ); } @Override public int length() { return this.size(); } /** Basic List **/ @Override public abstract int size(); @Override public abstract boolean isEmpty(); @Override public abstract boolean contains( Object o ); @Override public abstract Iterator iterator(); @Override public abstract Object[] toArray(); @Override public abstract T[] toArray( T[] a ) ; protected abstract boolean innerListAdd( Object e ); @Override public boolean add( Object e ) { return this.innerListAdd(e); } @Override public abstract void clear(); protected abstract boolean innerListRemove( Object index ); @Override public Object remove( int index ) { Object o = this.opt(index); if ( index >= 0 && index < this.length() ) { this.innerListRemove( index ); } return o; } @Override public JSONArray xRemove( int index ) { this.remove(index); return this; } @Override public boolean remove( Object o ) { return this.innerListRemove( o ); } @Override public JSONArray xRemove( Object o ) { this.remove(o); return this; } @Override public Object erase( Object key ) { return this.remove( JSONUtils.asInt32Key( key ) ); } @Override public abstract boolean containsAll( Collection c ); @Override public abstract boolean addAll( Collection c ); @Override public JSONArray xAddAll( Collection c ) { this.addAll(c); return this; } @Override public abstract boolean addAll( int index, Collection c ); @Override public JSONArray xAddAll( int index, Collection c ) { this.addAll( index, c ); return this; } @Override public abstract boolean removeAll( Collection c ); @Override public JSONArray xRemoveAll( Collection c ) { this.removeAll(c); return this; } @Override public abstract boolean retainAll( Collection c ); @Override public JSONArray xRetainAll(Collection c) { this.retainAll(c); return this; } protected void affirmCapacity( int cap ) { for( int i = this.size(); i < cap; ++i ) { this.innerListAdd( JSON.NULL ); } } @Override public Object set( int index, Object element ) { if ( index == -1 ) { this.innerListAdd( element ); return null; } else if ( this.size() > index ) { return this.innerListSet( index, element ); } else { this.affirmCapacity( index ); this.innerListAdd( element ); return null; } } @Override public JSONArray xSet( int index, Object element ) { this.set( index, element ); return this; } @Override public Object affirm( int index ) { if ( index == -1 ) { this.innerListAdd( JSON.NULL ); return JSON.NULL; } else if ( this.size() > index ) { return this.innerListGet( index ); } else { this.affirmCapacity( index + 1 ); return this.innerListGet( index ); } } @Override public JSONObject affirmObject( int index ) { if ( index == -1 ) { JSONObject obj = new JSONMaptron(); this.innerListAdd( obj ); return obj; } else if ( this.size() > index ) { Object obj = this.innerListGet( index ); if( obj instanceof JSONObject ) { return (JSONObject) obj; } obj = new JSONMaptron(); this.innerListSet( index, obj ); return (JSONObject)obj; } else { this.affirmCapacity( index ); JSONObject obj = new JSONMaptron(); this.innerListAdd( obj ); return obj; } } @Override public JSONArray affirmArray(int index ) { if ( index == -1 ) { JSONArray obj = new JSONArraytron(); this.innerListAdd( obj ); return obj; } else if ( this.size() > index ) { Object obj = this.innerListGet( index ); if( obj instanceof JSONArray ) { return (JSONArray) obj; } obj = new JSONArraytron(); this.innerListSet( index, obj ); return (JSONArray) obj; } else { this.affirmCapacity( index ); JSONArray obj = new JSONArraytron(); this.innerListAdd( obj ); return obj; } } @Override public boolean containsValue( Object value ) { return this.contains( value ); } @Override public abstract void add( int index, Object element ) ; @Override public JSONArray xAdd( int index, Object element ) { this.add(index, element); return this; } @Override public abstract int indexOf( Object o ); @Override public abstract int lastIndexOf( Object o ); @Override public abstract ListIterator listIterator(); @Override public abstract ListIterator listIterator( int index ); @Override public abstract List subList( int fromIndex, int toIndex ) ; protected abstract Object innerListGet( int key ); @Override public Object get( int index ) throws JSONException { Object object = this.opt( index ); if ( object == null ) { throw new JSONException("JSONArray[" + index + "] not found."); } else { return object; } } @Override public Object get( Object key ) { return this.get( JSONUtils.asInt32Key( key ) ); } @Override public boolean getBoolean( int index ) throws JSONException { Object object = this.get(index); if ( !object.equals(Boolean.FALSE) && (!(object instanceof String) || !((String)object).equalsIgnoreCase("false")) ) { if ( !object.equals(Boolean.TRUE) && (!(object instanceof String) || !((String)object).equalsIgnoreCase("true")) ) { throw new JSONException("JSONArray[" + index + "] is not a boolean."); } else { return true; } } else { return false; } } @Override public double getDouble( int index ) throws JSONException { Object object = this.get(index); try { return object instanceof Number ? ( (Number)object ).doubleValue() : Double.parseDouble( (String)object ); } catch ( Exception e ) { throw new JSONException("JSONArray[" + index + "] is not a number."); } } @Override public int getInt( int index ) throws JSONException { Object object = this.get(index); try { return object instanceof Number ? ( (Number)object ).intValue() : Integer.parseInt( (String)object ); } catch ( Exception e ) { throw new JSONException("JSONArray[" + index + "] is not a number."); } } @Override public JSONArray getJSONArray(int index ) throws JSONException { Object object = this.get(index); if ( object instanceof JSONArray ) { return (JSONArray)object; } else { throw new JSONException("JSONArray[" + index + "] is not a JSONArray."); } } @Override public JSONObject getJSONObject( int index ) throws JSONException { Object object = this.get(index); if ( object instanceof JSONObject ) { return (JSONObject)object; } else { throw new JSONException("JSONArray[" + index + "] is not a JSONObject."); } } @Override public long getLong( int index ) throws JSONException { Object object = this.get(index); try { return object instanceof Number ? ( (Number)object ).longValue() : Long.parseLong( (String)object ); } catch ( Exception e ) { throw new JSONException("JSONArray[" + index + "] is not a number."); } } @Override public String getString( int index ) throws JSONException { Object object = this.get(index); if ( object instanceof String ) { return (String)object; } else { throw new JSONException("JSONArray[" + index + "] not a string."); } } @Override public byte[] getBytes( int index ) throws JSONException { Object object = this.get(index); if ( object instanceof String ) { return ((String) object).getBytes(); } else if ( object instanceof byte[] ) { return (byte[])( (byte[])object ); } else { throw new JSONException("JSONObject[" + index + "] not a string nor bytes."); } } @Override public boolean isNull( int index ) { return JSON.NULL.equals(this.opt(index)); } @Override public String join( String separator ) throws JSONException { int len = this.length(); StringBuffer sb = new StringBuffer(); for( int i = 0; i < len; ++i ) { if (i > 0) { sb.append(separator); } sb.append( JSONUtils.valueToString( this.innerListGet(i)) ); } return sb.toString(); } @Override public Object opt( int index ) { return index >= 0 && index < this.length() ? this.innerListGet( index ) : null; } @Override public boolean optBoolean( int index ) { return this.optBoolean(index, false); } @Override public boolean optBoolean( int index, boolean defaultValue ) { try { return this.getBoolean(index); } catch (Exception e) { return defaultValue; } } @Override public double optDouble( int index ) { return this.optDouble( index, Double.NaN ); } @Override public double optDouble( int index, double defaultValue ) { try { return this.getDouble(index); } catch (Exception e) { return defaultValue; } } @Override public int optInt( int index ) { return this.optInt(index, 0); } @Override public int optInt( int index, int defaultValue ) { try { return this.getInt(index); } catch (Exception e) { return defaultValue; } } @Override public JSONArray optJSONArray( int index ) { Object o = this.opt(index); return o instanceof JSONArray ? (JSONArray)o : null; } @Override public JSONObject optJSONObject( int index ) { Object o = this.opt(index); return o instanceof JSONObject ? (JSONObject)o : null; } @Override public long optLong( int index ) { return this.optLong(index, 0L); } @Override public long optLong( int index, long defaultValue ) { try { return this.getLong(index); } catch (Exception e) { return defaultValue; } } @Override public String optString( int index ) { return this.optString(index, ""); } @Override public String optString( int index, String defaultValue ) { Object object = this.opt(index); return JSON.NULL.equals(object) ? defaultValue : object.toString(); } @Override public byte[] optBytes( int index ) { return this.optBytes( index, "".getBytes() ); } @Override public byte[] optBytes( int index, byte[] defaultValue ) { try { return this.getBytes( index ); } catch ( Exception e ) { return defaultValue; } } @Override public Object opt( Object key ) { try { return this.opt(JSONUtils.asInt32Key(key)); } catch ( Exception e ) { return null; } } @Override public boolean optBoolean( Object key ) { try { return this.optBoolean(JSONUtils.asInt32Key(key)); } catch ( Exception e ) { return false; } } @Override public double optDouble( Object key ) { try { return this.optDouble(JSONUtils.asInt32Key(key)); } catch ( Exception e ) { return Double.NaN; } } @Override public int optInt( Object key ) { try { return this.optInt(JSONUtils.asInt32Key(key)); } catch ( Exception e ) { return Integer.MAX_VALUE; } } @Override public JSONArray optJSONArray( Object key ) { try { return this.optJSONArray(JSONUtils.asInt32Key(key)); } catch ( Exception e ) { return null; } } @Override public JSONObject optJSONObject( Object key ) { try { return this.optJSONObject(JSONUtils.asInt32Key(key)); } catch ( Exception e ) { return null; } } @Override public long optLong( Object key ) { try { return this.optLong(JSONUtils.asInt32Key(key)); } catch ( Exception e ) { return Long.MAX_VALUE; } } @Override public String optString( Object key ) { try { return this.optString(JSONUtils.asInt32Key(key)); } catch ( Exception e ) { return null; } } @Override public byte[] optBytes( Object key ) { try { return this.optBytes(JSONUtils.asInt32Key(key)); } catch ( Exception e ) { return null; } } protected abstract Object innerListSet( int index, Object element ); @Override public JSONArray insert( Object key, Object val ) { return this.put( JSONUtils.asInt32Key( key ), val ); } @Override public Object insertIfAbsent( Object key, Object value ) { if( !this.containsKey( JSONUtils.asInt32Key( key ) ) ){ return this.insert( key, value ); } return null; } @Override public JSONArray put( boolean value ) { this.put((Object)(value ? Boolean.TRUE : Boolean.FALSE)); return this; } @Override public abstract JSONArray put( Collection value ); @Override public JSONArray put( double value ) throws JSONException { Double d = value; JSONUtils.prospectNumberQualify(d); this.put( (Object)d ); return this; } @Override public JSONArray put( int value ) { this.put( (Integer)value ); return this; } @Override public JSONArray put( long value ) { this.put( (Long)value ); return this; } @Override public JSONArray put( Map value ) { this.put((Object)(new JSONMaptron(value))); return this; } @Override public JSONArray put( Object value ) { this.innerListAdd( value ); return this; } @Override public JSONArray put( JSONObject value ) { this.innerListAdd( value ); return this; } @Override public JSONArray put( JSONArray value ) { this.innerListAdd( value ); return this; } @Override public JSONArray put( int index, boolean value ) throws JSONException { this.put( index, (Object)(value ? Boolean.TRUE : Boolean.FALSE) ); return this; } @Override public abstract JSONArray put( int index, Collection value ) throws JSONException ; @Override public JSONArray put( int index, double value ) throws JSONException { this.put( index, (Double)value ); return this; } @Override public JSONArray put( int index, int value ) throws JSONException { this.put( index, (Integer)value ); return this; } @Override public JSONArray put( int index, long value ) throws JSONException { this.put( index, (Long)value ); return this; } @Override public JSONArray put( int index, JSONArray value ) throws JSONException { this.innerListSet( index, value ); return this; } @Override public JSONArray put( int index, JSONObject value ) throws JSONException { this.innerListSet( index, value ); return this; } @Override public abstract JSONArray put( int index, Map value ) throws JSONException ; @Override public JSONArray put( int index, Object value ) throws JSONException { JSONUtils.prospectNumberQualify(value); if ( index < 0 ) { throw new JSONException("JSONArray[" + index + "] not found."); } else { if ( index < this.length() ) { this.innerListSet( index, value ); } else { while( index != this.length() ) { this.put( JSON.NULL ); } this.put( value ); } return this; } } @Override public JSONObject toJSONObject( JSONArray names ) throws JSONException { if ( names != null && names.length() != 0 && this.length() != 0 ) { JSONObject jo = new JSONMaptron(); for( int i = 0; i < names.length(); ++i ) { jo.put(names.getString(i), this.opt(i)); } return jo; } return null; } @Override public JSONObject toJSONObject() { JSONObject jo = new JSONMaptron(); for( int i = 0; i < this.size(); ++i ) { jo.put( String.valueOf(i), this.opt( i ) ); } return jo; } @Override public JSONArray toJSONArray() { return this; } @Override public abstract Set entrySet() ; @Override public Collection values() { return this; } @Override public Map toMap() { return this.toJSONObject(); } @Override public List toList() { return this; } @Override public boolean hasOwnProperty( Object elm ) { return this.containsKey( elm ); } @Override public boolean hasOwnProperty( int elm ) { return this.containsKey( elm ); } @Override public boolean containsKey( Object elm ) { try { if( elm instanceof Number ) { return this.hasOwnProperty( ( (Number)elm ).intValue() ); } return this.hasOwnProperty( (int)Integer.valueOf(elm.toString()) ); } catch ( NumberFormatException e ){ return false; } } @Override public boolean containsKey( int elm ) { int nLength = this.length(); if( elm < 0 || nLength == 0 ){ return false; } return nLength > elm; } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { try { return this.toJSONString(0); } catch (Exception e) { return null; } } @Override public String toJSONStringI( int nIndentFactor ) { try { return this.toJSONString(nIndentFactor); } catch (Exception e) { return null; } } @Override public String toJSONString( int nIndentFactor ) throws IOException { StringWriter sw = new StringWriter(); synchronized(sw.getBuffer()) { return this.write( sw, nIndentFactor, 0 ).toString(); } } @Override public TypeIndex prototype() { return Prototype.typeid( this ); } @Override public String prototypeName() { return Prototype.prototypeName( this ); } @Override public boolean isPrototypeOf( TypeIndex that ) { return that.equals( this.prototype() ); } @Override public JSONArray clone() { try { return (JSONArray) super.clone(); } catch ( CloneNotSupportedException e ) { // this shouldn't happen, since we are Cloneable throw new InternalError(e); } } @Override public Writer write( Writer writer ) throws IOException { return this.write( writer, 0, 0 ); } @Override public Writer write( Writer writer, int nIndentFactor ) throws IOException { return this.write( writer, nIndentFactor, 0 ); } @Override public abstract Writer write( Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/ArchJSONObject.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.system.prototype.Prototype; import com.pinecone.framework.system.prototype.TypeIndex; import com.pinecone.framework.util.StringUtils; import java.io.IOException; import java.io.Serializable; import java.io.StringWriter; import java.io.Writer; import java.util.Map; import java.util.Set; import java.util.Collection; import java.util.UUID; import java.util.Iterator; import java.util.List; public abstract class ArchJSONObject implements JSONObject, Serializable { protected abstract void jsonDecode0( ArchCursorParser x ) throws JSONException ; @Override public abstract JSONObject jsonDecode( ArchCursorParser x ) throws JSONException ; @Override public abstract JSONObject jsonDecode( String source ) throws JSONException; @Override public abstract JSONObject assimilate( Map that ); @Override public JSONObject shareFrom( JSONObject that, String szKey ) { this.put( szKey, that.get( szKey ) ); return this; } @Override public JSONObject shareFrom( JSONObject that, String[] szKeys ) { for ( String szKey : szKeys ) { this.putOnce( szKey, that.get( szKey ) ); } return this; } @Override public JSONObject subJson ( String szKey ) { JSONObject that = new JSONMaptron(); that.shareFrom( this, szKey ); return that; } @Override public JSONObject subJson ( String[] szKeys ) { JSONObject that = new JSONMaptron(); that.shareFrom( this, szKeys ); return that; } @Override public JSONObject detachSub ( String szKey ) { JSONObject that = new JSONMaptron(); that.put( szKey, this.get( szKey ) ); this.remove( szKey ); return that; } @Override public JSONObject detachSub ( String[] szKeys ) { JSONObject that = new JSONMaptron(); for ( String szKey : szKeys ) { that.putOnce( szKey, this.get( szKey ) ); this.remove( szKey ); } return that; } @Override public JSONObject moveSubFrom ( JSONObject that, String szKey ) { this.put( szKey, that.get( szKey ) ); that.remove( szKey ); return this; } @Override public JSONObject moveSubFrom ( JSONObject that, String[] szKeys ) { for ( String szKey : szKeys ) { this.putOnce( szKey, that.get( szKey ) ); that.remove( szKey ); } return this; } @Override public abstract Map getMap(); /** Basic Map **/ @Override public abstract int size(); @Override public abstract boolean isEmpty(); protected abstract boolean innerMapContainsKey( Object key ); @Override public boolean containsKey( Object key ) { boolean result = this.innerMapContainsKey( key ); if ( !result && ( key instanceof Number || key instanceof Character || key instanceof Boolean || key instanceof UUID ) ) { result = this.innerMapContainsKey( key.toString() ); } return result; } @Override public abstract boolean containsValue( Object value ); @Override public abstract void putAll( Map m ); public JSONObject xPutAll(Map m ) { this.putAll(m); return this; } @Override public abstract void clear(); @Override public JSONObject xClear() { this.clear(); return this; } @Override public abstract Object remove( Object key ); @Override public Object erase( Object key ) { return this.remove( key ); } @Override public JSONObject xRemove(Object key) { this.remove(key); return this; } @Override public abstract Set keySet(); @Override public abstract Collection values(); @Override public abstract Set > entrySet(); @Override public JSONObject accumulate( String key, Object value ) throws JSONException { JSONUtils.prospectNumberQualify(value); Object object = this.opt(key); if ( object == null ) { this.put( key, value instanceof JSONArray ? (new JSONArraytron()).put(value) : value ); } else if ( object instanceof JSONArray ) { ((JSONArray)object).put(value); } else { this.put(key, (Object)( new JSONArraytron()).put(object).put(value) ); } return this; } @Override public JSONObject append( String key, Object value ) throws JSONException { JSONUtils.prospectNumberQualify(value); Object object = this.opt(key); if ( object == null ) { this.put(key, (Object)( new JSONArraytron() ).put(value)); } else { if ( !(object instanceof JSONArray) ) { throw new JSONException("JSONObject[" + key + "] is not a JSONArray."); } this.put(key, (Object)((JSONArray)object).put(value)); } return this; } protected abstract Object innerMapGet( Object key ) ; @Override public Object get( Object key ) { Object val = this.innerMapGet(key); if ( val == null && ( key instanceof Number || key instanceof Character || key instanceof Boolean || key instanceof UUID ) ) { val = this.innerMapGet( key.toString() ); } return val; } @Override public Object get( String key ) throws JSONException { if ( key == null ) { throw new JSONException("Null key."); } else { Object object = this.opt(key); if (object == null) { throw new JSONException("JSONObject[" + StringUtils.jsonQuote(key) + "] not found."); } else { return object; } } } @Override public boolean getBoolean( String key ) throws JSONException { Object object = this.get(key); if (!object.equals(Boolean.FALSE) && (!(object instanceof String) || !((String)object).equalsIgnoreCase("false"))) { if (!object.equals(Boolean.TRUE) && (!(object instanceof String) || !((String)object).equalsIgnoreCase("true"))) { throw new JSONException("JSONObject[" + StringUtils.jsonQuote(key) + "] is not a Boolean."); } else { return true; } } else { return false; } } @Override public double getDouble( String key ) throws JSONException { Object object = this.get(key); try { return object instanceof Number ? ((Number)object).doubleValue() : Double.parseDouble((String)object); } catch (Exception e) { throw new JSONException("JSONObject[" + StringUtils.jsonQuote(key) + "] is not a number."); } } @Override public int getInt( String key ) throws JSONException { Object object = this.get(key); try { return object instanceof Number ? ((Number)object).intValue() : Integer.parseInt((String)object); } catch (Exception e) { throw new JSONException("JSONObject[" + StringUtils.jsonQuote(key) + "] is not an int."); } } @Override public JSONArray getJSONArray ( String key ) throws JSONException { Object object = this.get(key); if ( object instanceof JSONArray ) { return (JSONArray)object; } else { throw new JSONException("JSONObject[" + StringUtils.jsonQuote(key) + "] is not a JSONArray."); } } @Override public JSONObject getJSONObject( String key ) throws JSONException { Object object = this.get(key); if ( object instanceof JSONObject ) { return (JSONObject)object; } else { throw new JSONException("JSONObject[" + StringUtils.jsonQuote(key) + "] is not a JSONObject."); } } @Override public long getLong( String key ) throws JSONException { Object object = this.get(key); try { return object instanceof Number ? ((Number)object).longValue() : Long.parseLong((String)object); } catch (Exception e) { throw new JSONException("JSONObject[" + StringUtils.jsonQuote(key) + "] is not a long."); } } @Override public String getString( String key ) throws JSONException { Object object = this.get(key); if ( object instanceof String ) { return (String)object; } else { throw new JSONException("JSONObject[" + StringUtils.jsonQuote(key) + "] not a string."); } } @Override public byte[] getBytes( String key ) throws JSONException { Object object = this.get(key); if ( object instanceof String ) { return ( (String)object ).getBytes(); } else if ( object instanceof byte[] ) { return (byte[])( (byte[])object ); } else { throw new JSONException("JSONObject[" + StringUtils.jsonQuote(key) + "] not a string nor bytes."); } } @Override public JSONArray affirmArray( String key ) { Object o = this.opt(key); if( o instanceof JSONArray ){ return (JSONArray)o; } JSONArray jNew = new JSONArraytron(); this.put( key, jNew ); return jNew; } @Override public JSONObject affirmObject(String key ) { Object o = this.opt( key ); if( o instanceof JSONObject ){ return (JSONObject) o; } JSONObject jNew = new JSONMaptron(); this.put( key, jNew ); return jNew; } @Override public Object affirm( String key ) { if( this.containsKey( key ) ){ return this.opt(key); } Object o = JSON.NULL; this.put( key, o ); return o; } @Override public Object opt( String key ) { return key == null ? null : this.innerMapGet( key ); } @Override public boolean optBoolean( String key ) { return this.optBoolean(key, false); } @Override public boolean optBoolean( String key, boolean defaultValue ) { try { return this.getBoolean(key); } catch (Exception e) { return defaultValue; } } @Override public double optDouble( String key ) { return this.optDouble( key, Double.NaN ); } @Override public double optDouble( String key, double defaultValue ) { try { return this.getDouble(key); } catch ( Exception e ) { return defaultValue; } } @Override public int optInt( String key ) { return this.optInt(key, 0); } @Override public int optInt( String key, int defaultValue ) { try { return this.getInt(key); } catch (Exception e) { return defaultValue; } } @Override public JSONArray optJSONArray( String key) { Object o = this.opt(key); return o instanceof JSONArray ? (JSONArray)o : null; } @Override public JSONObject optJSONObject( String key) { Object object = this.opt(key); return object instanceof JSONObject ? (JSONObject)object : null; } @Override public long optLong( String key ) { return this.optLong(key, 0L); } @Override public long optLong( String key, long defaultValue ) { try { return this.getLong(key); } catch ( Exception e ) { return defaultValue; } } @Override public String optString( String key ) { return this.optString(key, ""); } @Override public String optString( String key, String defaultValue ) { Object object = this.opt(key); return JSON.NULL.equals(object) ? defaultValue : object.toString(); } @Override public byte[] optBytes( String key ) { return this.optBytes( key, "".getBytes() ); } @Override public byte[] optBytes( String key, byte[] defaultValue ) { try { return this.getBytes( key ); } catch ( Exception e ) { return defaultValue; } } @Override public Object opt( Object key ) { try{ return this.opt( JSONUtils.asStringKey( key ) ); } catch ( Exception e ) { return null; } } @Override public boolean optBoolean( Object key ) { try { return this.optBoolean(JSONUtils.asStringKey(key)); } catch (Exception e) { return false; } } @Override public double optDouble( Object key ) { try { return this.optDouble(JSONUtils.asStringKey(key)); } catch ( Exception e ) { return Double.NaN; } } @Override public int optInt( Object key ) { try { return this.optInt(JSONUtils.asStringKey(key)); } catch ( Exception e ) { return Integer.MAX_VALUE; } } @Override public JSONArray optJSONArray( Object key ) { try { return this.optJSONArray( JSONUtils.asStringKey(key) ); } catch ( Exception e ) { return null; } } @Override public JSONObject optJSONObject( Object key ) { try { return this.optJSONObject( JSONUtils.asStringKey(key) ); } catch ( Exception e ) { return null; } } @Override public long optLong( Object key ) { try { return this.optLong(JSONUtils.asStringKey(key)); } catch ( Exception e ) { return Long.MAX_VALUE; } } @Override public String optString( Object key ) { try { return this.optString(JSONUtils.asStringKey(key)); } catch ( Exception e ) { return null; } } @Override public byte[] optBytes( Object key ) { try { return this.optBytes(JSONUtils.asStringKey(key)); } catch (Exception e) { return null; } } @Override public JSONObject increment( String key ) throws JSONException { Object value = this.opt(key); if (value == null) { this.put(key, 1); } else if (value instanceof Integer) { this.put(key, (Integer)value + 1); } else if (value instanceof Long) { this.put(key, (Long)value + 1L); } else if (value instanceof Double) { this.put(key, (Double)value + 1.0D); } else { if (!(value instanceof Float)) { throw new JSONException("Unable to increment [" + StringUtils.jsonQuote(key) + "]."); } this.put(key, (double)((Float)value + 1.0F)); } return this; } @Override public boolean isNull( String key ) { return JSON.NULL.equals(this.opt(key)); } @Override public Iterator keys() { return this.keySet().iterator(); } @Override public JSONArray names() { JSONArray ja = new JSONArraytron(); Iterator keys = this.keys(); while( keys.hasNext() ) { ja.put( keys.next() ); } return ja.length() == 0 ? null : ja; } @Override public String[] getOwnPropertyNames () { return JSONUtils.getOwnPropertyNames( this ); } protected abstract Object innerMapPut( String key, Object value ); @Override public JSONObject insert( Object key, Object value ) { return this.put( key.toString(), value ); } @Override public Object putIfAbsent( String key, Object value ) { return this.getMap().putIfAbsent( key, value ); } @Override public Object insertIfAbsent( Object key, Object value ) { return this.putIfAbsent( key.toString(), value ); } @Override public JSONObject put( String key, boolean value ) throws JSONException { this.put(key, (Object)(value ? Boolean.TRUE : Boolean.FALSE)); return this; } @Override public JSONObject put( String key, Collection value ) throws JSONException { this.put(key, (Object)(new JSONArraytron(value))); return this; } @Override public JSONObject put( String key, double value ) throws JSONException { this.put( key, (Double) value ); return this; } @Override public JSONObject put( String key, int value ) throws JSONException { this.put( key, (Integer)value ); return this; } @Override public JSONObject put( String key, long value ) throws JSONException { this.put(key, (Object) (Long) value ); return this; } @Override public abstract JSONObject put( String key, Map value ) throws JSONException ; @Override public JSONObject put( String key, JSONArray value ) throws JSONException { this.innerMapPut( key, value ); return this; } @Override public JSONObject put( String key, JSONObject value ) throws JSONException { this.innerMapPut( key, value ); return this; } @Override public JSONObject put( String key, Object value ) throws JSONException { if ( key == null ) { throw new NullPointerException( "Null key." ); } else { if ( value != null ) { JSONUtils.prospectNumberQualify( value ); this.innerMapPut( key, value ); } else { this.remove( key ); } return this; } } @Override public JSONObject embed( String key, Object value ) throws JSONException { if ( key == null ) { throw new NullPointerException("Null key."); } else { if ( value != null ) { JSONUtils.prospectNumberQualify(value); this.innerMapPut( key, value ); } else { this.innerMapPut( key, JSON.NULL ); } return this; } } @Override public JSONObject putOnce( String key, Object value ) throws JSONException { if ( key != null && value != null ) { if ( this.opt(key) != null ) { throw new JSONException("Duplicate key \"" + key + "\""); } this.put(key, value); } return this; } @Override public JSONObject putOpt( String key, Object value ) throws JSONException { if (key != null && value != null) { this.put(key, value); } return this; } protected abstract Object innerMapRemove( String key ); @Override public Object remove( String key ) { return this.innerMapRemove(key); } @Override public JSONObject removeAll( Collection keys ) { for( String key : keys ) { this.remove( key ); } return this; } @Override public JSONObject removeAll( String[] keys ) { for( String key : keys ) { this.remove( key ); } return this; } @Override public JSONArray toJSONArray( JSONArray names ) throws JSONException { if (names != null && names.length() != 0) { JSONArray ja = new JSONArraytron(); for( int i = 0; i < names.length(); ++i ) { ja.put(this.opt(names.getString(i))); } return ja; } else { return null; } } @Override public JSONArray toJSONArray() { JSONArray jRegressed = new JSONArraytron(); for ( Object obj : this.entrySet() ) { Map.Entry kv = ( Map.Entry ) obj; jRegressed.put( kv.getValue() ); } return jRegressed; } @Override public JSONObject toJSONObject() { return this; } @Override public abstract Map.Entry front() ; @Override public abstract Map.Entry back() ; @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { try { return this.toJSONString( 0 ); } catch ( Exception e ) { return null; } } @Override public String toJSONStringI( int nIndentFactor ) { try { return this.toJSONString(nIndentFactor); } catch (Exception e) { return null; } } @Override public String toJSONString( int nIndentFactor ) throws IOException { StringWriter w = new StringWriter(); synchronized( w.getBuffer() ) { return this.write( w, nIndentFactor,0 ).toString(); } } @Override public TypeIndex prototype() { return Prototype.typeid( this ); } @Override public String prototypeName() { return Prototype.prototypeName( this ); } @Override public boolean isPrototypeOf ( TypeIndex that ) { return that.equals( this.prototype() ); } @Override public boolean hasOwnProperty ( Object key ) { return this.containsKey( key ); } @Override public Map toMap(){ return this; } @Override public List toList(){ return this.toJSONArray(); } @Override public JSONObject clone() { try { return (JSONObject) super.clone(); } catch ( CloneNotSupportedException e ) { // this shouldn't happen, since we are Cloneable throw new InternalError(e); } } @Override public Writer write( Writer writer ) throws IOException { return this.write( writer, 0, 0 ); } @Override public Writer write( Writer writer, int nIndentFactor ) throws IOException { return this.write( writer, nIndentFactor, 0 ); } @Override public abstract Writer write( Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/CustomizableJSONCursorParser.java ================================================ package com.pinecone.framework.util.json; import java.io.InputStream; import java.io.Reader; import java.lang.reflect.InvocationTargetException; public class CustomizableJSONCursorParser extends ArchCursorParser { protected Class mJSONObjectClass; protected Class mJSONArrayClass; public CustomizableJSONCursorParser( Reader reader, Class jObjectClass, Class jArrayClass ) { super( reader ); this.mJSONObjectClass = jObjectClass; this.mJSONArrayClass = jArrayClass; } public CustomizableJSONCursorParser( InputStream inputStream, Class jObjectClass, Class jArrayClass ) throws JSONParseException { super( inputStream ); this.mJSONObjectClass = jObjectClass; this.mJSONArrayClass = jArrayClass; } public CustomizableJSONCursorParser( String s, Class jObjectClass, Class jArrayClass ) { super( s ); this.mJSONObjectClass = jObjectClass; this.mJSONArrayClass = jArrayClass; } @Override protected JSONArray newJSONArray( Object indexKey, ArchCursorParser parser, Object parent, Object[] args ) { try { return this.mJSONArrayClass.getDeclaredConstructor().newInstance(); } catch ( InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e ) { return null; } } @Override protected JSONObject newJSONObject( Object indexKey, ArchCursorParser parser, Object parent, Object[] args ) { try { return this.mJSONObjectClass.getDeclaredConstructor().newInstance(); } catch ( InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e ) { return null; } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/Dictson.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.unit.Dictionary; public interface Dictson extends Dictionary, JSONDictium { default Object insert( Object key, Object value ) { if( this.isList() ) { int index = JSONUtils.asInt32Key( key ); if( index >= 0 ) { if( index == this.getList().size() ){ return this.getList().put( value ); } } this.convertToMap(); } return this.getMap().put( JSONUtils.asStringKey(key), value ); } JSONObject affirmMap() ; JSONArray affirmList() ; JSONObject resetAsMap() ; JSONArray resetAsList() ; JSONObject getMap() throws ClassCastException ; JSONArray getList() throws ClassCastException ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/GenericJSONEncoder.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.system.prototype.PinenutTraits; import com.pinecone.framework.util.StringUtils; import java.io.IOException; import java.io.Writer; import java.lang.reflect.Array; import java.util.Collection; import java.util.Iterator; import java.util.Map; public class GenericJSONEncoder implements JSONEncoder { public static void beforeJsonElementWrote( Writer writer, int nIndentFactor, int nIndentBlankNum, boolean bHasNextElement ) throws IOException { if ( bHasNextElement ) { writer.write(','); } if ( nIndentFactor > 0 ) { writer.write('\n'); } GenericJSONEncoder.indentBlank( writer, nIndentBlankNum ); } public static void indentBlank( Writer writer, int nIndentBlankNum ) throws IOException { for( int i = 0; i < nIndentBlankNum; ++i ) { writer.write(' ' ); } } public GenericJSONEncoder() { } protected Writer writeUnidentifiedObject ( Object that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { StringUtils.addSlashes( String.format( PinenutTraits.OBJ_STRINGIFY_DEFAULT, that.getClass().getName() + "(0x" + Integer.toHexString( that.hashCode() ) + ")" ), writer, true ); return writer; } protected Writer writeUnknownAnyObject ( Object that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { if ( that != null ) { String szJsonString = ""; try { szJsonString = PinenutTraits.invokeToJSONString( that, nIndentFactor, nIndentBlankNum ); } catch ( Exception e ){ try { szJsonString = PinenutTraits.invokeToJSONString( that ); } catch ( Exception e1 ){ try{ szJsonString = PinenutTraits.invokeCaseToString( that, null ); StringUtils.addSlashes( szJsonString, writer, true ); return writer; } catch ( IllegalArgumentException ea ) { return this.writeUnidentifiedObject( that, writer, nIndentFactor, nIndentBlankNum ) ; } } } writer.write( szJsonString ); } else { writer.write( JSONEncoder.JSON_OBJ_NULL_DEFAULT ); } return writer; } @Override public Writer write ( Pinenut that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { if ( that != null ) { writer.write( that.toJSONString() ); } else { writer.write( JSONEncoder.JSON_OBJ_NULL_DEFAULT ); } return writer; } @Override public Writer write ( JSONObject that, Writer writer ) throws IOException { return this.write( that, writer,0,0 ); } @Override public Writer write ( JSONObject that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { if ( that != null ) { that.write( writer, nIndentFactor, nIndentBlankNum ); } else { writer.write( JSONEncoder.JSON_OBJ_NULL_DEFAULT ); } return writer; } @Override public Writer write ( JSONArray that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { if ( that != null ) { that.write( writer, nIndentFactor, nIndentBlankNum ); } else { writer.write( JSONEncoder.JSON_OBJ_NULL_DEFAULT ); } return writer; } @Override public void writeKeyValue ( Writer writer, Object key, Object val, int nIndentFactor, int nIndentBlankNum ) throws JSONException, IOException { writer.write( StringUtils.jsonQuote( key.toString() ) ); writer.write(':'); if ( nIndentFactor > 0 ) { writer.write( ' '); } this.write( val, writer, nIndentFactor, nIndentBlankNum ); } @Override public Writer writeMapFmtEntries ( Collection that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { if ( that != null ) { boolean bHasNextElement = false; int length = that.size(); Iterator iter = that.iterator(); writer.write('{'); if ( length == 1 ) { Object o = iter.next(); Map.Entry kv = (Map.Entry) o; this.writeKeyValue( writer, kv.getKey(), kv.getValue(), nIndentFactor, nIndentBlankNum ); } else if ( length != 0 ) { for( int nNewIndent = nIndentBlankNum + nIndentFactor; iter.hasNext(); bHasNextElement = true ) { GenericJSONEncoder.beforeJsonElementWrote( writer, nIndentFactor, nNewIndent, bHasNextElement ); Object o = iter.next(); Map.Entry kv = (Map.Entry) o; this.writeKeyValue( writer, kv.getKey(), kv.getValue(), nIndentFactor, nNewIndent ); } if ( nIndentFactor > 0 ) { writer.write( '\n' ); } GenericJSONEncoder.indentBlank( writer, nIndentBlankNum ); } writer.write( '}' ); return writer; } else { writer.write( JSONEncoder.JSON_OBJ_NULL_DEFAULT ); } return writer; } @Override public Writer writeArray ( Object that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { if ( that != null ) { boolean bHasNextElement = false; int length = Array.getLength( that ); writer.write('['); if ( length == 1 ) { this.write( Array.get( that, 0 ), writer, nIndentFactor, nIndentBlankNum ); } else if ( length != 0 ) { int nNewIndent = nIndentBlankNum + nIndentFactor; for( int i = 0; i < length; ++i ) { GenericJSONEncoder.beforeJsonElementWrote( writer, nIndentFactor, nNewIndent, bHasNextElement ); this.write( Array.get( that, i ),writer, nIndentFactor, nNewIndent ); bHasNextElement = true; } if ( nIndentFactor > 0 ) { writer.write( '\n' ); } GenericJSONEncoder.indentBlank( writer, nIndentBlankNum ); } writer.write(']'); } else { writer.write( JSONEncoder.JSON_OBJ_NULL_DEFAULT ); } return writer; } @Override public Writer write ( Collection that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { if ( that != null ) { boolean bHasNextElement = false; int length = that.size(); writer.write('['); Iterator iter = that.iterator(); if ( length == 1 ) { this.write( iter.next(),writer , nIndentFactor, nIndentBlankNum ); } else if ( length != 0 ) { int nNewIndent = nIndentBlankNum + nIndentFactor; while( iter.hasNext() ) { GenericJSONEncoder.beforeJsonElementWrote( writer, nIndentFactor, nNewIndent, bHasNextElement ); this.write( iter.next(), writer, nIndentFactor, nNewIndent ); bHasNextElement = true; } if ( nIndentFactor > 0 ) { writer.write( '\n' ); } GenericJSONEncoder.indentBlank( writer, nIndentBlankNum ); } writer.write(']'); } else { writer.write( JSONEncoder.JSON_OBJ_NULL_DEFAULT ); } return writer; } public Writer write ( Map.Entry that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { if ( that != null ) { writer.write('{'); this.writeKeyValue( writer, that.getKey(), that.getValue(), nIndentFactor, nIndentBlankNum ); writer.write( '}' ); return writer; } else { writer.write( JSONEncoder.JSON_OBJ_NULL_DEFAULT ); } return writer; } @Override public Writer write ( Object that, Writer writer ) throws IOException { return this.write( that, writer, 0, 0 ); } @Override public Writer write ( Object that, Writer writer, int nIndentFactor ) throws IOException { return this.write( that, writer, nIndentFactor, 0 ); } @Override public Writer write ( Object that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { if ( that != null ) { if ( that instanceof JSONObject ) { ((JSONObject)that).write(writer, nIndentFactor, nIndentBlankNum ); } else if ( that instanceof JSONArray ) { ((JSONArray)that).write(writer, nIndentFactor, nIndentBlankNum ); } else if ( that == JSON.NULL ) { writer.write( that.toString() ); } else if ( that instanceof String ){ StringUtils.addSlashes( (String) that, writer, true ); } else if ( that instanceof Map ) { this.write( (Map) that, writer, nIndentFactor, nIndentBlankNum ); //(new JSONMaptron((Map)jsonValue, true)).write(writer, nIndentFactor, nIndentBlankNum ); } else if ( that instanceof Collection ) { this.write( (Collection)that, writer, nIndentFactor, nIndentBlankNum ); //(new JSONArraytron((Collection)jsonValue)).write(writer, nIndentFactor, nIndentBlankNum ); } else if ( that.getClass().isArray() ) { this.writeArray( that, writer, nIndentFactor, nIndentBlankNum ); //(new JSONArraytron(jsonValue)).write(writer, nIndentFactor, nIndentBlankNum ); } else if ( that instanceof Number ) { writer.write( JSONUtils.numberToString((Number)that) ); } else if ( that instanceof Boolean ) { writer.write(that.toString()); } else if ( that instanceof JSONString ) { String o; try { o = ((JSONString)that).toJSONString(); } catch ( Exception e ) { throw new JSONException(e); } writer.write( o != null ? o.toString() : StringUtils.jsonQuote(that.toString()) ); } else if ( that instanceof Map.Entry ) { this.write( (Map.Entry) that, writer, nIndentFactor, nIndentBlankNum ); } else if ( that instanceof Pinenut ){ this.write( (Pinenut)that, writer, nIndentFactor, nIndentBlankNum ); } else { this.writeUnknownAnyObject( that, writer, nIndentFactor, nIndentBlankNum ); } } else { writer.write( JSONEncoder.JSON_OBJ_NULL_DEFAULT ); } return writer; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/GenericJSONMarshal.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.ReflectionUtils; import com.pinecone.framework.util.json.handler.EncodeHandlerRegistry; import com.pinecone.framework.util.json.handler.GenericEncodeHandlerRegistry; import com.pinecone.framework.util.json.handler.JSONObjectEncodeHandler; import com.pinecone.framework.util.json.homotype.AnnotatedJSONInjector; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.framework.util.json.homotype.GenericBeanJSONEncoder; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.List; public class GenericJSONMarshal extends GenericJSONEncoder implements JSONMarshal { protected long mnMode; protected BeanJSONEncoder mBeanEncoder; protected EncodeHandlerRegistry mEncodeHandlerRegistry; public GenericJSONMarshal( long mode, @Nullable BeanJSONEncoder beanEncoder, @Nullable EncodeHandlerRegistry registry ) { super(); this.mnMode = mode; if ( beanEncoder == null ) { this.mBeanEncoder = new RecursiveBeanJSONEncoder( this ); } else { this.mBeanEncoder = beanEncoder; } if ( registry == null ) { this.mEncodeHandlerRegistry = new GenericEncodeHandlerRegistry(); } else { this.mEncodeHandlerRegistry = registry; } } public GenericJSONMarshal( long mode ) { this( mode, null, null ); } public GenericJSONMarshal() { this( JSONMarshalMode.MODE_DEFAULT ); } @Override public void setMode( long mode ) { this.mnMode = mode; } @Override public long getMode() { return this.mnMode; } @Override public void setBeanEncoder( BeanJSONEncoder encoder ) { this.mBeanEncoder = encoder; } @Override public BeanJSONEncoder getBeanEncoder() { return this.mBeanEncoder; } @Override public void setEncodeHandlerRegistry( EncodeHandlerRegistry registry ) { this.mEncodeHandlerRegistry = registry; } @Override public EncodeHandlerRegistry getEncodeHandlerRegistry() { return this.mEncodeHandlerRegistry; } public void registerEncodeHandler( Class type, JSONObjectEncodeHandler handler ) { this.mEncodeHandlerRegistry.register( type, handler ); } protected boolean tryCustomEncodeHandler( Object that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { if ( that == null ) { return false; } JSONObjectEncodeHandler handler = this.mEncodeHandlerRegistry.get( that.getClass() ); if ( handler == null ) { return false; } handler.serialize( that, writer, nIndentFactor, nIndentBlankNum, this ); return true; } protected boolean tryBeanMode( Object that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { if ( ( this.mnMode & JSONMarshalMode.MODE_BEAN_GETTER ) == 0L ) { return false; } this.mBeanEncoder.encode( that, writer, nIndentFactor, nIndentBlankNum ); return true; } protected void collectAnnotatedFields( Object that, List list ) { Field[] fields = that.getClass().getDeclaredFields(); for ( Field field : fields ) { ReflectionUtils.makeAccessible( field ); String szKey = AnnotatedJSONInjector.getAnnotatedKey( field ); if ( szKey == null ) { continue; } if ( szKey.isEmpty() ) { szKey = field.getName(); } Object value; try { value = field.get( that ); } catch ( IllegalAccessException e ) { value = null; } list.add( new Object[]{ szKey, field, value } ); } } protected void collectAnyFields( Object that, List list ) { Field[] fields = that.getClass().getDeclaredFields(); for ( Field field : fields ) { ReflectionUtils.makeAccessible( field ); String szKey = field.getName(); Object value; try { value = field.get( that ); } catch ( IllegalAccessException e ) { value = null; } list.add( new Object[]{ szKey, field, value } ); } } protected void collectPublicFields( Object that, List list ) { Field[] fields = that.getClass().getFields(); for ( Field field : fields ) { String szKey = field.getName(); Object value; try { value = field.get( that ); } catch ( IllegalAccessException e ) { value = null; } list.add( new Object[]{ szKey, field, value } ); } } @Override public Writer writeUnidentifiedObject( Object that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { if ( this.tryCustomEncodeHandler( that, writer, nIndentFactor, nIndentBlankNum ) ) { return writer; } if ( this.tryBeanMode( that, writer, nIndentFactor, nIndentBlankNum ) ) { return writer; } List list = new ArrayList<>(); if ( ( this.mnMode & JSONMarshalMode.MODE_ANNOTATED_FIELD ) != 0L ) { this.collectAnnotatedFields( that, list ); } if ( ( this.mnMode & JSONMarshalMode.MODE_ANY_FIELD ) != 0L ) { this.collectAnyFields( that, list ); } if ( ( this.mnMode & JSONMarshalMode.MODE_PUBLIC_FIELD ) != 0L ) { this.collectPublicFields( that, list ); } if ( list.isEmpty() ) { return super.writeUnidentifiedObject( that, writer, nIndentFactor, nIndentBlankNum); } writer.write( '{' ); boolean bHasNextElement = false; int nNewIndent = nIndentBlankNum + nIndentFactor; int i = 0; for ( Object[] item : list ) { GenericJSONEncoder.beforeJsonElementWrote( writer, nIndentFactor, nNewIndent, bHasNextElement ); this.writeKeyValue( writer, item[0], item[2], nIndentFactor, nIndentBlankNum ); bHasNextElement = true; ++i; } if ( nIndentFactor > 0 ) { writer.write( '\n' ); } GenericJSONEncoder.indentBlank( writer, nIndentBlankNum); writer.write( '}' ); return writer; } public static class RecursiveBeanJSONEncoder extends GenericBeanJSONEncoder { protected GenericJSONMarshal mJSONMarshal; public RecursiveBeanJSONEncoder( GenericJSONMarshal marshal ) { this.mJSONMarshal = marshal; } @Override public String valueJsonify( Object val ) { StringWriter w = new StringWriter(); try { synchronized( w.getBuffer() ) { this.valueJsonify( val, w, 0,0 ); return w.toString(); } } catch ( IOException e ){ return null; } } @Override public void valueJsonify( Object val, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { this.mJSONMarshal.write( val, writer, nIndentFactor, nIndentBlankNum ); } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JPlus.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.system.prototype.FamilyContext; public abstract class JPlus { public static Object parse ( String szJsonString ) { return ( new JPlusCursorParser( szJsonString, new JPlusContext() ) ).nextValue(); } public static Object parse ( String szJsonString, FamilyContext context ) { return ( new JPlusCursorParser( szJsonString, context ) ).nextValue(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JPlusContext.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.system.prototype.OverridableFamily; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.List; public class JPlusContext implements OverridableFamily, Cloneable { protected List mGlobalScopes; protected Object mParent; protected Object mThisScope; protected Object mRoot; protected Path[] mParentPaths; protected boolean mOverriddenAffinity; public JPlusContext() { this( null, null, null, new Path[0] ); } public JPlusContext( Object parent, Object thisScope, Object root, Path[] parentPaths ) { this( new ArrayList<>(), parent, thisScope, root, parentPaths ); } public JPlusContext( List globalScopes, Object parent, Object thisScope, Object root, Path[] parentPaths ) { this.mGlobalScopes = globalScopes; this.mParent = parent; this.mThisScope = thisScope; this.mRoot = root; this.mParentPaths = parentPaths; } public JPlusContext( Object globalScope, Object parent, Object thisScope, Object root, Path[] parentPaths ) { this( parent, thisScope, root, parentPaths ); this.addGlobalScope( globalScope ); } @Override public List getGlobalScopes() { return this.mGlobalScopes; } @Override public Object parent() { return this.mParent; } @Override public Object thisScope() { return this.mThisScope; } @Override public Object root() { return this.mRoot; } @Override public Path[] getParentPaths() { return this.mParentPaths; } @Override public JPlusContext setGlobalScopes(List globalScopes) { this.mGlobalScopes = globalScopes; return this; } @Override public JPlusContext setParent(Object parent) { this.mParent = parent; return this; } @Override public JPlusContext setThisScope(Object thisScope) { this.mThisScope = thisScope; return this; } @Override public JPlusContext setRoot(Object root) { this.mRoot = root; return this; } @Override public JPlusContext setParentPaths(Path[] parentPaths) { this.mParentPaths = parentPaths; return this; } @Override public JPlusContext addParentPath( Path newPath ) { int length = this.mParentPaths.length; Path[] newParentPaths = Arrays.copyOf( this.mParentPaths, length + 1 ); newParentPaths[length] = newPath; this.mParentPaths = newParentPaths; return this; } @Override public JPlusContext addGlobalScope( Object scope ) { this.getGlobalScopes().add( scope ); return this; } /** * isOverriddenAffinity * @return if $this, $super and $root, are forced overridden by global scope. */ @Override public boolean isOverriddenAffinity() { return this.mOverriddenAffinity; } @Override public void setOverriddenAffinity( boolean overrideAffinity ) { this.mOverriddenAffinity = overrideAffinity; } @Override public JPlusContext clone() { JPlusContext clone; try { clone = (JPlusContext) super.clone(); } catch ( CloneNotSupportedException e ) { throw new InternalError(e); } return clone; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JPlusCursorParser.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.system.ErrorStrings; import com.pinecone.framework.system.prototype.FamilyContext; import com.pinecone.framework.system.prototype.OverridableFamily; import com.pinecone.framework.unit.MultiScopeMap; import com.pinecone.framework.unit.MultiScopeMaptron; import com.pinecone.framework.util.template.TemplateParser; import java.io.*; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.TreeMap; /** * Pinecone For Java JPlusCursorParser [ Bean Nuts Almond Dragon, JSON+ For Pinecone Java ] * Copyright © 2008 - 2024 Bean Nuts Foundation ( Dragon King ) All rights reserved. [Harald.E / JH.W] * ***************************************************************************************** * Author: undefined * Last Modified Date: 2024-02-17 * ***************************************************************************************** * JSON Plus is an enhanced JSON5 Edition * { * "parent": { k1: 123 }, * "next": { * key: #include "path", * key2: #"${this.key}", * key3: #"${root.parent.k1}", // key3: #"${parent.k1}", * key4: "normal-string", // key4: normal-string, "key4": normal-string, 'key4': normal-string * key5: #extends root.parent, * } * } * Support: JSON, JSON5, JSON Plus * ***************************************************************************************** */ public class JPlusCursorParser extends JSONCursorParser { protected FamilyContext mScopeContext; public JPlusCursorParser( Reader reader, FamilyContext scopeContext ) { super(reader); this.mScopeContext = scopeContext; } public JPlusCursorParser( InputStream inputStream, FamilyContext scopeContext ) throws JSONParseException { this( (Reader)(new InputStreamReader(inputStream)), scopeContext ); } public JPlusCursorParser( String s, FamilyContext scopeContext ) { this( (Reader)(new StringReader(s)), scopeContext ); } public FamilyContext getScopeContext() { return this.mScopeContext; } public void setScopeContext( JPlusContext context ) { this.mScopeContext = context; } @SuppressWarnings("unchecked") protected Map construct_reinterpret_scope_domain() { FamilyContext context = this.getScopeContext(); List> dummy_root = new ArrayList<>(); dummy_root.add( new MultiScopeMaptron<>( (Map)context.root() ) ); List globalScopes = context.getGlobalScopes(); if( globalScopes != null ) { for ( Object scope : globalScopes ) { dummy_root.add( new MultiScopeMaptron<>( (Map)scope ) ); } } Object dyThisScope = context.thisScope(); Object dySuperScope = context.parent(); Object dyRootScope = context.root(); Map thisScope; if( dyThisScope instanceof Map ){ thisScope = (Map) dyThisScope; } else if( dyThisScope instanceof JSONArray ){ thisScope = (Map)( (JSONArray) dyThisScope ).toJSONObject(); } else { thisScope = new TreeMap<>(); } MultiScopeMaptron scope = new MultiScopeMaptron<>( null, dummy_root ); if( context instanceof OverridableFamily && ( (OverridableFamily)context ).isOverriddenAffinity() ) { Object $this = scope.get( "this" ); if( $this != null ) { dyThisScope = $this; } Object $super = scope.get( "super" ); if( $super != null ) { dySuperScope = $super; } Object $root = scope.get( "__root__" ); if( $super != null ) { dyRootScope = $root; } } scope.setThisScope( thisScope ); scope.elevate( new TreeMap<>() ); scope.put( "this" , dyThisScope ); scope.put( "super" , dySuperScope ); scope.put( "__root__" , dyRootScope ); scope.put( "__scope__" , (Object) scope ); return scope; } protected Object reinterpret_eval_object( StringBuilder token ) { Map scope = this.construct_reinterpret_scope_domain(); TemplateParser tp = new TemplateParser( new StringReader(token.toString()), scope ); return tp.evalValue(); } protected void reinterpret_eval_token( StringBuilder token ) { Map scope = this.construct_reinterpret_scope_domain(); TemplateParser tp = new TemplateParser( new StringReader(token.toString()), scope ); token.setLength(0); token.append( tp.eval() ); } /** * override_object_with_parent * @param dyThisScope * @param parent * @return a boolean, which indicates it is a qualified K-V-Based object. */ @SuppressWarnings("unchecked") protected boolean reinterpret_override_object_with_parent( Object dyThisScope, Object parent ) { if( parent instanceof Map ) { if( dyThisScope instanceof Map ) { ( (Map)dyThisScope ).putAll( (Map)parent ); } else if( dyThisScope instanceof List ) { ( (List)dyThisScope ).addAll( ( (Map)parent ).values() ); } } else if( parent instanceof List ) { List l = (List)parent; if( dyThisScope instanceof Map ) { int i = 0; for ( Object item : l ) { ( (Map)dyThisScope ).put( String.valueOf(i), item ); ++i; } } else if( dyThisScope instanceof List ) { ( (List)dyThisScope ).addAll( l ); } } else { return false; } return true; } protected Object reinterpret_include_path_from_context_paths( StringBuilder path ) { try{ // Notice, currently context should be Parent NOT 'this'! // Under object context, the parser is sequentially parse from sibling to sibling. return ( new JPlusCursorParser( new FileReader(path.toString()), this.getScopeContext() ) ).nextValue( null, this.getScopeContext().parent(), null ); } catch ( IOException e ){ Path[] parentPaths = this.getScopeContext().getParentPaths(); Object ret = null; for ( int i = 0; i < parentPaths.length; ++i ) { try{ ret = ( new JPlusCursorParser( new FileReader( parentPaths[i].resolve(path.toString()).toFile() ), this.getScopeContext() ) ).nextValue( null, this.getScopeContext().parent(), null ); } catch ( IOException e1 ) { ret = null; } } if( ret == null ) { throw this.syntaxError( ErrorStrings.E_IRREDEEMABLE_NO_PATH_CONTEXT_MATCHED + "What-> '" + path + "'" ); } return ret; } } protected boolean select_reinterpret_token( StringBuilder token, Object[] ret ) { String szToken = token.toString(); if( szToken.isEmpty() ) { return false; } char c = szToken.charAt(0); switch ( c ) { case 'i':{ if( szToken.equals( "include" ) ) { c = this.next(); token = this.devour_follow_string( c ); ret[0] = this.reinterpret_include_path_from_context_paths( token ); return true; } else { return false; } } case 'e':{ if( szToken.equals( "extends" ) ) { c = this.next(); token = this.devour_follow_string( c ); Object parent = this.reinterpret_eval_object( token ); Object dyThisScope = this.getScopeContext().thisScope(); if( !this.reinterpret_override_object_with_parent( dyThisScope, parent ) ) { throw this.syntaxError( "Overridden object should be be K-V-Based object." ); } throw new JSONParserRedirectException( 1 ); // Redirect key-parse sequence and skip. } else { return false; } } case 'r':{ if( szToken.equals( "ref" ) ) { c = this.next(); token = this.devour_follow_string( c ); // #ref T, T->STRING ret[0] = this.reinterpret_eval_object( token ); return true; } else { return false; } } default: { this.reinterpret_eval_token( token ); ret[0] = token; return false; } } } protected StringBuilder devour_follow_string( char currentChar ) { switch ( currentChar ) { case '\'': case '\"':{ return this.nextString( currentChar ); } } StringBuilder sb; for ( sb = new StringBuilder(); currentChar >= ' ' && ",:]}/\\\"\'[{;=#& ".indexOf(currentChar) < 0; currentChar = this.next() ) { sb.append(currentChar); } if( currentChar != ' ' ) { this.back(); } return sb; } @Override protected Object eval_next_string_token( StringBuilder sb, char currentChar ) { switch ( currentChar ) { case '#':{ currentChar = this.next(); sb = this.devour_follow_string( currentChar ); Object[] ret = new Object[1]; boolean bIsReferObject = this.select_reinterpret_token( sb, ret ); currentChar = this.next(); // [xxx\'] <- at '\'', to get follow, before eval_next_string_token invoked this.back(); if( bIsReferObject ){ return ret[0]; } break; } case '&':{ currentChar = this.next(); sb = this.devour_follow_string( currentChar ); return this.reinterpret_eval_object( sb ); } default:{ break; } } return super.eval_next_string_token( sb, currentChar ); } protected void apply_inner_patriarch( Object parent, Object neo ) { if( this.getScopeContext().parent() == null && this.getScopeContext().root() == null ) { this.getScopeContext().setRoot ( neo ); } this.getScopeContext().setThisScope ( neo ); this.getScopeContext().setParent ( parent ); } @Override public Object nextValue( Object indexKey, Object parent, Object[] args ) throws JSONParseException { char c = this.nextClean(); switch(c) { case '"': case '\'': { return this.nextString(c).toString(); } case '[': { this.back(); Object lastThis = this.getScopeContext().thisScope(); JSONArray p = new JSONArraytron( ); this.apply_inner_patriarch( parent, p ); p.jsonDecode( this ); this.getScopeContext().setThisScope( lastThis ); return p; } case '{': { this.back(); JSONObject p = new JSONMaptron( ); Object lastThis = this.getScopeContext().thisScope(); this.apply_inner_patriarch( parent, p ); p.jsonDecode( this ); this.getScopeContext().setThisScope( lastThis ); return p; } default: { StringBuilder sb = this.eval_next_string( c ); return this.eval_next_string_token(sb, c); } } } @Override public void handleRedirectException( JSONParserRedirectException e ) { if( e.getContext() != null ) { Object[] context = (Object[])e.getContext(); String key = (String) context[0]; Object val = context[1]; if( key != null ){ throw this.syntaxError( "Macro function '#extends' can't be value." ); } } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSON.java ================================================ package com.pinecone.framework.util.json; import java.io.IOException; import java.io.Reader; import java.io.StringWriter; public final class JSON { public static final Object NULL = new JSON.Null(); public static Object parse ( String szJsonString ) { return ( new JSONCursorParser( szJsonString ) ).nextValue(); } public static String stringify ( Object that ) { return JSON.encode( that, JSONEncoder.BASIC_JSON_ENCODER ); } public static String stringify ( Object that, int nIndentFactor ) { return JSON.encode( that, nIndentFactor, JSONEncoder.BASIC_JSON_ENCODER ); } public static String marshal ( Object that ) { return JSON.encode( that, JSONEncoder.BASIC_JSON_MARSHAL ); } public static String marshal ( Object that, long mode ) { return JSON.encode( that, new GenericJSONMarshal( mode ) ); } public static String render ( Object that ) { return JSON.encode( that, JSONEncoder.COMMON_JSON_MARSHAL ); } public static String unbean ( Object that ) { return JSON.encode( that, JSONEncoder.BEAN_JSON_MARSHAL ); } public static T unmarshal ( String szJsonString, Class classType ) { ObjectJSONCursorUnmarshal unmarshal = new ObjectJSONCursorUnmarshal( szJsonString, classType ); return classType.cast( unmarshal.nextValue() ) ; } public static T unmarshal ( Reader reader, Class classType ) { ObjectJSONCursorUnmarshal unmarshal = new ObjectJSONCursorUnmarshal( reader, classType ); return classType.cast( unmarshal.nextValue() ) ; } @SuppressWarnings( "unchecked" ) public static T unmarshal( String json, TypeReference typeRef ) { ObjectJSONCursorUnmarshal u = new ObjectJSONCursorUnmarshal( json, typeRef.getType() ); return (T) u.nextValue(); } @SuppressWarnings( "unchecked" ) public static T unmarshal ( Reader reader, TypeReference typeRef ) { ObjectJSONCursorUnmarshal u = new ObjectJSONCursorUnmarshal( reader, typeRef.getType() ); return (T) u.nextValue(); } public static String encode ( Object that, JSONEncoder encoder ) { return JSON.encode( that, 0, encoder ); } public static String encode ( Object that, int nIndentFactor, JSONEncoder encoder ) { StringWriter w = new StringWriter(); try { synchronized( w.getBuffer() ) { return encoder.write( that, w, nIndentFactor,0 ).toString(); } } catch ( IOException e ){ return null; } } public static final class Null { private Null() { } @Override protected final Object clone() { try{ super.clone(); } catch ( CloneNotSupportedException e ) { throw new InternalError(e); } return this; } @Override public boolean equals( Object that ) { if ( that == this || that instanceof Null ) { return true; } return that == null; } @Override public String toString() { return this.toJSONString(); } public String toJSONString() { return "null"; } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONArray.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.system.prototype.PineUnit; import com.pinecone.framework.system.prototype.TypeIndex; import java.io.IOException; import java.io.Serializable; import java.io.Writer; import java.util.*; public interface JSONArray extends PineUnit, List, JSONDictium, Serializable, RandomAccess, Cloneable { JSONArray jsonDecode( ArchCursorParser x ) throws JSONException ; JSONArray jsonDecode( String source ) throws JSONException ; void assimilate( List that ); List getArray(); Object front() ; Object back() ; int length() ; /** Basic List **/ @Override int size() ; @Override boolean isEmpty() ; @Override boolean contains( Object o ) ; @Override Iterator iterator() ; @Override Object[] toArray() ; @Override T[] toArray( T[] a ) ; @Override boolean add( Object e ) ; @Override void clear() ; @Override Object remove( int index ) ; JSONArray xRemove( int index ) ; @Override boolean remove( Object o ) ; JSONArray xRemove(Object o) ; @Override Object erase( Object key ) ; @Override boolean containsAll( Collection c ) ; @Override boolean addAll( Collection c ) ; JSONArray xAddAll( Collection c ) ; @Override boolean addAll( int index, Collection c ) ; JSONArray xAddAll( int index, Collection c ) ; @Override boolean removeAll( Collection c ) ; JSONArray xRemoveAll( Collection c ) ; @Override boolean retainAll( Collection c ) ; JSONArray xRetainAll(Collection c) ; @Override Object set( int index, Object element ) ; JSONArray xSet( int index, Object element ) ; Object affirm( int index ) ; JSONObject affirmObject( int index ) ; JSONArray affirmArray( int index ) ; @Override boolean containsValue( Object value ) ; @Override void add( int index, Object element ) ; JSONArray xAdd( int index, Object element ) ; @Override int indexOf( Object o ) ; @Override int lastIndexOf( Object o ) ; @Override ListIterator listIterator() ; @Override ListIterator listIterator(int index) ; @Override List subList( int fromIndex, int toIndex ) ; @Override Object get( int index ) throws JSONException ; @Override Object get( Object key ) ; boolean getBoolean( int index ) throws JSONException ; double getDouble( int index ) throws JSONException ; int getInt( int index ) throws JSONException ; JSONArray getJSONArray( int index ) throws JSONException ; JSONObject getJSONObject( int index ) throws JSONException ; long getLong( int index ) throws JSONException ; String getString( int index ) throws JSONException ; byte[] getBytes( int index ) throws JSONException ; boolean isNull( int index ) ; String join( String separator ) throws JSONException ; Object opt( int index ) ; boolean optBoolean( int index ) ; boolean optBoolean(int index, boolean defaultValue) ; double optDouble( int index ) ; double optDouble( int index, double defaultValue ) ; int optInt( int index ) ; int optInt( int index, int defaultValue ) ; JSONArray optJSONArray( int index ) ; JSONObject optJSONObject( int index ) ; long optLong( int index ) ; long optLong( int index, long defaultValue ) ; String optString( int index ) ; String optString( int index, String defaultValue ) ; byte[] optBytes( int index ) ; byte[] optBytes( int index, byte[] defaultValue ) ; @Override Object opt( Object key ) ; @Override boolean optBoolean( Object key ) ; @Override double optDouble( Object key ) ; @Override int optInt( Object key ) ; @Override JSONArray optJSONArray( Object key ) ; @Override JSONObject optJSONObject( Object key ) ; @Override long optLong( Object key ) ; @Override String optString( Object key ) ; @Override byte[] optBytes( Object key ) ; @Override JSONArray insert( Object key, Object val ) ; JSONArray put( boolean value ) ; JSONArray put( Collection value ) ; JSONArray put( double value ) throws JSONException ; JSONArray put( int value ) ; JSONArray put( long value ) ; JSONArray put( Map value ) ; JSONArray put( Object value ) ; JSONArray put( JSONObject value ) ; JSONArray put( JSONArray value ) ; JSONArray put( int index, boolean value ) throws JSONException ; JSONArray put( int index, Collection value ) throws JSONException ; JSONArray put( int index, double value ) throws JSONException ; JSONArray put( int index, int value ) throws JSONException ; JSONArray put( int index, long value ) throws JSONException ; JSONArray put( int index, Map value ) throws JSONException ; JSONArray put( int index, Object value ) throws JSONException ; JSONArray put( int index, JSONObject value ) throws JSONException ; JSONArray put( int index, JSONArray value ) throws JSONException ; JSONObject toJSONObject( JSONArray names ) throws JSONException ; @Override JSONObject toJSONObject() ; @Override JSONArray toJSONArray() ; @Override Set entrySet() ; @Override Collection values() ; @Override Map toMap() ; @Override List toList() ; @Override boolean hasOwnProperty( Object elm ) ; boolean hasOwnProperty( int elm ) ; @Override boolean containsKey( Object elm ) ; boolean containsKey( int elm ) ; @Override String toJSONString() ; String toJSONStringI( int nIndentFactor ) ; String toJSONString( int nIndentFactor ) throws IOException ; @Override TypeIndex prototype() ; @Override String prototypeName() ; @Override boolean isPrototypeOf( TypeIndex that ) ; JSONArray clone() ; Writer write(Writer writer) throws IOException ; Writer write( Writer writer, int nIndentFactor ) throws IOException ; Writer write( Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONArrayDecoder.java ================================================ package com.pinecone.framework.util.json; import java.lang.reflect.Type; public abstract class JSONArrayDecoder implements JSONDecoder { protected abstract void add( Object self, Object parent, Object indexKey, Object val ); @Override public void decode( Object self, Object parent, Object indexKey, ArchCursorParser x, Type genericTypes ) { if ( x.nextClean() != '[' ) { throw x.syntaxError("A JSONArray text must start with '['"); } else if ( x.nextClean() != ']' ) { x.back(); int i = 0; while( true ) { if ( x.nextClean() == ',' ) { x.back(); this.add( self, parent, indexKey, JSON.NULL ); } else { x.back(); try { Object[] args = null; if( genericTypes != null ) { args = new Object[]{ genericTypes }; } this.add( self, parent, indexKey, x.nextValue( i, self, args ) ); ++i; } catch ( JSONParserRedirectException e ) { x.handleRedirectException( e ); } } switch( x.nextClean() ) { case ',': { if (x.nextClean() == ']') { return; } x.back(); break; } case ']': { return; } default: { throw x.syntaxError("Expected a ',' or ']'"); } } } } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONArraytron.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.system.prototype.Prototype; import com.pinecone.framework.system.prototype.TypeIndex; import com.pinecone.framework.util.StringUtils; import java.io.IOException; import java.io.Writer; import java.lang.reflect.Array; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Iterator; import java.util.ListIterator; import java.util.AbstractSet; import java.util.Spliterator; import java.util.Spliterators; import java.util.NoSuchElementException; public class JSONArraytron extends ArchJSONArray implements JSONArray { private List mList; protected transient Set > entrySet ; public JSONArraytron() { this.mList = new ArrayList<>(); } public JSONArraytron( ArchCursorParser x ) throws JSONException { this(); this.jsonDecode0( x ); } public JSONArraytron( String source ) throws JSONException { this(new JSONCursorParser(source)); } public JSONArraytron( Collection collection ) { this.mList = new ArrayList<>(); if ( collection != null ) { Iterator iter = collection.iterator(); while( iter.hasNext() ) { this.mList.add(JSONUtils.wrapValue( iter.next()) ); } } } public JSONArraytron( Object array ) throws JSONException { this(); if (!array.getClass().isArray()) { throw new JSONException("JSONArray initial value should be a string or collection or array."); } else { int length = Array.getLength(array); for(int i = 0; i < length; ++i) { this.put(JSONUtils.wrapValue(Array.get(array, i))); } } } public JSONArraytron( List array, boolean bAssimilateMode ) throws JSONException { if( bAssimilateMode ){ this.mList = array; } else { this.mList = new ArrayList<>(); if ( array != null ) { for ( Object o : array ) { this.put( JSONUtils.wrapValue(o) ); } } } } public JSONArraytron( List array ) throws JSONException { this( array, false ); } @Override protected void jsonDecode0( ArchCursorParser x ) throws JSONException { JSONArrayDecoder.INNER_JSON_ARRAY_DECODER.decode( this, null, null, x, null ); } @Override public JSONArraytron jsonDecode( ArchCursorParser x ) throws JSONException { this.clear(); this.jsonDecode0( x ); return this; } @Override public JSONArraytron jsonDecode( String source ) throws JSONException { return this.jsonDecode( new JSONCursorParser(source) ); } @Override public void assimilate( List that ){ this.mList = that; } @Override public List getArray(){ return this.mList; } /** Basic List **/ @Override public int size() { return this.mList.size(); } @Override public boolean isEmpty() { return this.mList.isEmpty(); } @Override public boolean contains( Object o ) { return this.mList.contains(o); } @Override public Iterator iterator() { return this.mList.iterator(); } @Override public Object[] toArray() { return this.mList.toArray(); } @Override public T[] toArray( T[] a ) { return (T[])this.mList.toArray(a); } @Override protected boolean innerListAdd( Object e) { return this.mList.add( e ); } @Override public void clear() { this.mList.clear(); } @Override protected boolean innerListRemove( Object index ) { return this.mList.remove( index ); } @Override public boolean containsAll( Collection c ) { return this.mList.containsAll(c); } @Override public boolean addAll( Collection c ) { return this.mList.addAll(c); } @Override public boolean addAll( int index, Collection c ) { return this.mList.addAll(index, c); } @Override public boolean removeAll( Collection c ) { return this.mList.removeAll(c); } @Override public boolean retainAll( Collection c ) { return this.mList.retainAll(c); } @Override public void add( int index, Object element ) { this.mList.add( index, element ); } @Override public int indexOf( Object o ) { return this.mList.indexOf(o); } @Override public int lastIndexOf( Object o ) { return this.mList.lastIndexOf(o); } @Override public ListIterator listIterator() { return this.mList.listIterator(); } @Override public ListIterator listIterator(int index) { return this.mList.listIterator(index); } @Override public List subList( int fromIndex, int toIndex ) { return this.mList.subList(fromIndex, toIndex); } @Override protected Object innerListGet( int key ) { return this.mList.get( key ); } @Override protected Object innerListSet( int index, Object element ) { return this.mList.set( index, element ); } @Override public JSONArraytron put( Collection value ) { this.put((Object)(new JSONArraytron(value))); return this; } @Override public JSONArraytron put( int index, Collection value ) throws JSONException { this.put(index, (Object)( new JSONArraytron(value)) ); return this; } @Override public JSONArraytron put( int index, Map value ) throws JSONException { this.put(index, (Object)(new JSONMaptron(value))); return this; } @Override public Set entrySet() { Set > es; return (es = this.entrySet) == null ? (this.entrySet = new ListEntrySet()) : es; } @Override public JSONArraytron clone() { JSONArraytron that = (JSONArraytron) super.clone(); that.mList = new ArrayList<>(); for ( Object row : this.mList ) { that.put( JSONUtils.cloneElement( row ) ); } return that; } @Override public Writer write( Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { return JSONEncoder.BASIC_JSON_ENCODER.write( this.mList, writer, nIndentFactor, nIndentBlankNum ); } protected static boolean valEquals( Object o1, Object o2 ) { return (o1==null ? o2==null : o1.equals(o2)); } protected static class JSONArrayEntry implements Map.Entry, Pinenut { Integer key; Object value; JSONArrayEntry( Integer key, Object value ) { this.key = key; this.value = value; } @Override public Integer getKey() { return this.key; } @Override public Object getValue() { return this.value; } @Override public Object setValue( Object value ) { Object oldValue = this.value; this.value = value; return oldValue; } public void setKey( Integer key ) { this.key = key; } @Override public boolean equals(Object o) { if (!(o instanceof Map.Entry)) return false; Map.Entry e = (Map.Entry)o; return valEquals(key,e.getKey()) && valEquals(value,e.getValue()); } @Override public int hashCode() { int keyHash = (key==null ? 0 : key.hashCode()); int valueHash = (value==null ? 0 : value.hashCode()); return keyHash ^ valueHash; } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return "{" + StringUtils.jsonQuote( this.key.toString() ) + ":" + JSON.stringify( this.value ) + "}"; } @Override public TypeIndex prototype() { return Prototype.typeid( this ); } } protected class ListEntrySet extends AbstractSet > { public final int size() { return JSONArraytron.this.size(); } public final void clear() { JSONArraytron.this.clear(); } public final Iterator > iterator() { return new ListEntryIterator(); } public final boolean contains( Object o ) { if ( !(o instanceof Map.Entry) ) { return false; } Map.Entry e = (Map.Entry) o; Object key = e.getKey(); Object v = JSONArraytron.this.get(key); return v != null && v.equals(e.getValue()); } public final boolean remove( Object o ) { if ( this.contains(o) ) { Map.Entry e = (Map.Entry) o; Object key = e.getKey(); return JSONArraytron.this.remove(key) ; } return false; } public final Spliterator > spliterator() { return Spliterators.spliterator( this, Spliterator.SIZED | Spliterator.ORDERED | Spliterator.DISTINCT ); } } protected abstract class JAListEntryIterator { protected Iterator currentIterator; protected int index; protected JSONArrayEntry dummyEntry; public JAListEntryIterator() { this.index = 0; this.currentIterator = JSONArraytron.this.iterator(); this.dummyEntry = new JSONArrayEntry( this.index, null ); } public boolean hasNext() { return this.currentIterator.hasNext(); } protected Map.Entry nextNode() { if ( !this.hasNext() ) { throw new NoSuchElementException(); } this.dummyEntry.setKey( this.index++ ); this.dummyEntry.setValue( this.currentIterator.next() ); return this.dummyEntry; } public void remove() { this.currentIterator.remove(); } } protected final class ListKeyIterator extends JAListEntryIterator implements Iterator { public final Integer next() { return nextNode().getKey(); } } protected final class ListValueIterator extends JAListEntryIterator implements Iterator { public final Object next() { return nextNode().getValue(); } } protected final class ListEntryIterator extends JAListEntryIterator implements Iterator > { public final Map.Entry next() { return nextNode(); } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONCompiler.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.system.prototype.Pinenut; import java.io.IOException; import java.io.OutputStream; import java.util.Collection; import java.util.Map; public interface JSONCompiler extends Pinenut { OutputStream compile( Map that, OutputStream outputStream ) throws IOException; OutputStream compile( Collection that, OutputStream outputStream ) throws IOException ; OutputStream compile( Object[] those, OutputStream outputStream ) throws IOException ; OutputStream compile( Object that, OutputStream outputStream ) throws IOException ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONCompilerException.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.system.ParseException; public class JSONCompilerException extends ParseException { private static final long serialVersionUID = 0L; private Throwable cause; public JSONCompilerException ( String what ) { super( what ); } public JSONCompilerException ( String what, int errorOffset ) { super( what, errorOffset ); } public JSONCompilerException ( String message, int errorOffset, Throwable cause ) { super( message, errorOffset ); this.cause = cause; } public JSONCompilerException ( Throwable cause, int errorOffset ) { super( cause.getMessage(), errorOffset ); this.cause = cause; } public JSONCompilerException ( Throwable cause ) { super( cause.getMessage() ); this.cause = cause; } @Override public Throwable getCause() { return this.cause; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONCursorParser.java ================================================ package com.pinecone.framework.util.json; import java.io.InputStream; import java.io.Reader; public class JSONCursorParser extends ArchCursorParser { public JSONCursorParser( Reader reader ) { super( reader ); } public JSONCursorParser( InputStream inputStream ) throws JSONParseException { super( inputStream ); } public JSONCursorParser( String s ) { super( s ); } @Override protected JSONArray newJSONArray( Object indexKey, ArchCursorParser parser, Object parent, Object[] args ) { return new JSONArraytron( parser ); } @Override protected JSONObject newJSONObject( Object indexKey, ArchCursorParser parser, Object parent, Object[] args ) { return new JSONMaptron( parser ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONDecoder.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.system.prototype.Pinenut; import java.lang.reflect.Type; import java.util.List; import java.util.Map; public interface JSONDecoder extends Pinenut { JSONDecoder INNER_JSON_OBJECT_DECODER = new JSONObjectDecoder() { @Override protected void set( Object self, String key, Object val ) { ( (JSONObject) self ).put( key, val ); } }; JSONDecoder INNER_JSON_ARRAY_DECODER = new JSONArrayDecoder() { @Override protected void add( Object self, Object parent, Object indexKey, Object val ) { ( (JSONArray) self ).add( val ); } }; JSONDecoder INNER_MAP_DECODER = new JSONObjectDecoder() { @Override @SuppressWarnings( "unchecked" ) protected void set( Object self, String key, Object val ) { ( (Map) self ).put( key, val ); } }; JSONDecoder INNER_LIST_DECODER = new JSONArrayDecoder() { @Override @SuppressWarnings( "unchecked" ) protected void add( Object self, Object parent, Object indexKey, Object val ) { ( (List) self ).add( val ); } }; void decode( Object self, Object parent, Object indexKey, ArchCursorParser x, Type genericTypes ) ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONDecompiler.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.system.prototype.Pinenut; public interface JSONDecompiler extends Pinenut { Object nextValue( Object parent ) throws JSONCompilerException ; Object nextValue() throws JSONCompilerException ; Object decompile( Object parent ) ; Object decompile() ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONDictium.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.unit.Dictium; import com.pinecone.framework.unit.Units; public interface JSONDictium extends Dictium { Object opt( Object key ); boolean optBoolean( Object key ); double optDouble( Object key ); int optInt( Object key ); JSONArray optJSONArray( Object key ); JSONObject optJSONObject( Object key ); long optLong( Object key ); String optString( Object key ); byte[] optBytes( Object key ); JSONObject toJSONObject(); JSONArray toJSONArray(); default JSONArray affirmArray ( Object key ) { Object o = this.opt(key); if (o instanceof JSONArray) { return (JSONArray) o; } JSONArray jNew = new JSONArraytron(); this.insert( key, jNew ); return jNew; } default JSONObject affirmObject ( Object key ) { Object o = this.opt(key); if (o instanceof JSONObject) { return (JSONObject) o; } JSONObject jNew = new JSONMaptron(); this.insert( key, jNew ); return jNew; } default Object affirm ( Object key ) { if ( this.containsKey(key) ) { return this.opt(key); } Object o = JSON.NULL; this.insert( key, o ); return o; } /** * query * 202406029 * @param evalKey Object simple-eval key, fmt: key1.key2.key3...keyN (T->.T) * @return null for nothing, object for the value which just be queried. */ default Object query( String evalKey ) { return Units.getValueFromMapStructureRecursively( this, evalKey ); } default String queryString( String evalKey, String defaultValue ) { Object object = this.query( evalKey ); return JSON.NULL.equals(object) ? defaultValue : object.toString(); } default String queryString( String evalKey ) { return this.queryString( evalKey, "" ); } default JSONObject queryJSONObject( String evalKey ) { Object o = this.query( evalKey ); return o instanceof JSONObject ? (JSONObject)o : null; } default JSONArray queryJSONArray( String evalKey ) { Object o = this.query( evalKey ); return o instanceof JSONArray ? (JSONArray)o : null; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONEncoder.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.system.prototype.Pinenut; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; import java.util.Collection; import java.util.Iterator; import java.util.Map; public interface JSONEncoder extends Pinenut { String JSON_OBJ_STRINGIFY_DEFAULT = "[object %s]"; String JSON_OBJ_NULL_DEFAULT = "null"; JSONEncoder BASIC_JSON_ENCODER = new GenericJSONEncoder(); JSONMarshal BASIC_JSON_MARSHAL = new GenericJSONMarshal(); JSONMarshal STRUCT_JSON_MARSHAL = new GenericJSONMarshal( JSONMarshalMode.MODE_PUBLIC_FIELD ); JSONMarshal COMMON_JSON_MARSHAL = new GenericJSONMarshal( JSONMarshalMode.MODE_COMMON ); JSONMarshal BEAN_JSON_MARSHAL = new GenericJSONMarshal( JSONMarshalMode.MODE_BEAN_GETTER ); Writer write ( Pinenut that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ; Writer write ( JSONObject that, Writer writer ) throws IOException ; Writer write ( JSONObject that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ; Writer write ( JSONArray that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ; void writeKeyValue ( Writer writer, Object key, Object val, int nIndentFactor, int nIndentBlankNum ) throws JSONException, IOException ; default Writer write ( Map that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { return this.writeMapFmtEntries( that.entrySet(), writer, nIndentFactor, nIndentBlankNum ); } default Writer writeMapFmtEntriesT ( Collection that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { return this.writeMapFmtEntries( that, writer, nIndentFactor, nIndentBlankNum ); } Writer writeMapFmtEntries ( Collection that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ; Writer writeArray ( Object that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ; Writer write ( Collection that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ; Writer write ( Object that, Writer writer ) throws IOException ; Writer write ( Object that, Writer writer, int nIndentFactor ) throws IOException ; Writer write ( Object that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ; static Writer writeMapFormat ( Writer writer, Object[] those, int nIndentFactor, int nIndentBlankNum, JSONEncoder encoder ) throws IOException { int length = those.length; writer.write('{'); if ( length == 1 ) { if( !(those[0] instanceof Map.Entry ) ){ throw new IllegalArgumentException( "Serialized object should be [Map.Entry]." ); } Map.Entry kv = (Map.Entry)those[0]; encoder.writeKeyValue( writer, kv.getKey(), kv.getValue(), nIndentFactor, nIndentBlankNum ); } else if ( length != 0 ) { int nNewIndent = nIndentBlankNum + nIndentFactor; for ( int i = 0; i < length; ++i ) { Map.Entry kv = (Map.Entry)those[i]; GenericJSONEncoder.beforeJsonElementWrote( writer, nIndentFactor, nNewIndent, i !=0 ); encoder.writeKeyValue( writer, kv.getKey(), kv.getValue(), nIndentFactor, nNewIndent ); } if ( nIndentFactor > 0 ) { writer.write( '\n' ); } GenericJSONEncoder.indentBlank( writer, nIndentBlankNum ); } writer.write( '}' ); return writer; } static Writer writeMapFormat ( Writer writer, Object[] those, int nIndentFactor, int nIndentBlankNum ) throws IOException { return JSONEncoder.writeMapFormat( writer, those, nIndentFactor, nIndentBlankNum, JSONEncoder.BASIC_JSON_ENCODER ); } static String writeMapFormat ( Object[] those, int nIndentFactor ) { StringWriter w = new StringWriter(); try { synchronized( w.getBuffer() ) { JSONEncoder.writeMapFormat( w, (Object[])those, nIndentFactor,0 ); return w.toString(); } } catch ( IOException e ){ return null; } } static String stringifyMapFormat ( Object[] those ) { return JSONEncoder.writeMapFormat( those, 0 ); } static Writer writeMapFormat ( Writer writer, Collection those, int nIndentFactor, int nIndentBlankNum, JSONEncoder encoder ) throws IOException { boolean bHasNextElement = false; int length = those.size(); writer.write('{'); Iterator iter = those.iterator(); if ( length == 1 ) { Map.Entry kv = (Map.Entry)iter.next(); encoder.writeKeyValue( writer, kv.getKey(), kv.getValue(), nIndentFactor, nIndentBlankNum ); } else if ( length != 0 ) { int nNewIndent = nIndentBlankNum + nIndentFactor; while( iter.hasNext() ) { GenericJSONEncoder.beforeJsonElementWrote( writer, nIndentFactor, nNewIndent, bHasNextElement ); Map.Entry kv = (Map.Entry)iter.next(); encoder.writeKeyValue( writer, kv.getKey(), kv.getValue(), nIndentFactor, nNewIndent ); bHasNextElement = true; } if ( nIndentFactor > 0 ) { writer.write( '\n' ); } GenericJSONEncoder.indentBlank( writer, nIndentBlankNum ); } writer.write('}'); return writer; } static Writer writeMapFormat ( Writer writer, Collection those, int nIndentFactor, int nIndentBlankNum ) throws IOException { return JSONEncoder.writeMapFormat( writer, those, nIndentFactor, nIndentBlankNum, JSONEncoder.BASIC_JSON_ENCODER ); } static String writeMapFormat ( Collection those, int nIndentFactor ) { StringWriter w = new StringWriter(); try { synchronized( w.getBuffer() ) { JSONEncoder.writeMapFormat( w, (Collection)those, nIndentFactor,0 ); return w.toString(); } } catch ( IOException e ){ return null; } } static String stringifyMapFormat ( Collection those ) { return JSONEncoder.writeMapFormat( those, 0 ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONException.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.system.PineRuntimeException; public class JSONException extends PineRuntimeException { private static final long serialVersionUID = 0L; private Throwable cause; public JSONException ( String what ) { super( what ); } public JSONException( Throwable cause ) { super( cause.getMessage() ); this.cause = cause; } @Override public Throwable getCause() { return this.cause; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONMaptron.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.unit.LinkedTreeMap; import com.pinecone.framework.util.json.homotype.BeanColonist; import java.io.IOException; import java.io.Serializable; import java.io.Writer; import java.lang.reflect.Field; import java.util.Map; import java.util.LinkedHashMap; import java.util.HashMap; import java.util.TreeMap; import java.util.Locale; import java.util.ResourceBundle; import java.util.Enumeration; import java.util.Set; import java.util.Collection; import java.util.Iterator; public class JSONMaptron extends ArchJSONObject implements JSONObject, Serializable { private Map mMap; public JSONMaptron() { this( true ); } public JSONMaptron( boolean bLinked ){ this.mMap = bLinked ? new LinkedHashMap<>() : new HashMap<>(); } public JSONMaptron( int nInitialCapacity, boolean bLinked ){ if ( bLinked ) { this.mMap = new LinkedHashMap<>( nInitialCapacity ); } else { this.mMap = new HashMap<>( nInitialCapacity ); } } public JSONMaptron( ArchCursorParser x ) throws JSONException { this(); this.jsonDecode0( x ); } public JSONMaptron( Map map, String[] names ) { this(); for( int i = 0; i < names.length; ++i ) { try { this.putOnce( names[i], map.get( names[i] ) ); } catch ( Exception e ) { this.putOnce( names[i], JSON.NULL ); } } } public JSONMaptron( Map map ) { this( map,false ); } public JSONMaptron( Map map, boolean bAssimilateMode ) { if( bAssimilateMode ){ this.mMap = map; } else { this.mMap = new LinkedHashMap<>(); if (map != null) { for ( Object o : map.entrySet() ) { Entry e = (Entry) o; Object value = e.getValue(); if (value != null) { this.mMap.put( (String) e.getKey(), JSONUtils.wrapValue(value) ); } } } } } public JSONMaptron( Object bean ) { this(); this.populateMap( bean ); } public JSONMaptron( Object object, String[] names ) { this(); Class c = object.getClass(); for( int i = 0; i < names.length; ++i ) { String name = names[i]; try { this.putOpt( name, c.getField(name).get(object) ); } catch ( Exception ignore ) { //Do nothing. } } } public JSONMaptron( String source ) throws JSONException { this(new JSONCursorParser(source)); } public JSONMaptron( String baseName, Locale locale ) throws JSONException { this(); ResourceBundle bundle = ResourceBundle.getBundle(baseName, locale, Thread.currentThread().getContextClassLoader()); Enumeration keys = bundle.getKeys(); while( true ) { Object key; do { if ( !keys.hasMoreElements() ) { return; } key = keys.nextElement(); } while(!(key instanceof String)); String[] path = ((String)key).split("\\."); int last = path.length - 1; JSONObject target = this; for( int i = 0; i < last; ++i ) { String segment = path[i]; JSONObject nextTarget = target.optJSONObject(segment); if ( nextTarget == null ) { nextTarget = new JSONMaptron(); target.put(segment, (Object)nextTarget); } target = nextTarget; } target.put(path[last], (Object)bundle.getString((String)key)); } } @Override protected void jsonDecode0( ArchCursorParser x ) throws JSONException { JSONObjectDecoder.INNER_JSON_OBJECT_DECODER.decode( this, null, null, x, null ); } @Override public JSONMaptron jsonDecode( ArchCursorParser x ) throws JSONException { this.clear(); this.jsonDecode0( x ); return this; } @Override public JSONMaptron jsonDecode( String source ) throws JSONException { return this.jsonDecode( new JSONCursorParser(source) ); } @Override public JSONMaptron assimilate( Map that ){ this.mMap = that; return this; } @Override public Map getMap(){ return this.mMap; } /** Basic Map **/ @Override public int size() { return this.mMap.size(); } @Override public boolean isEmpty() { return this.mMap.isEmpty(); } @Override protected boolean innerMapContainsKey( Object key ) { return this.mMap.containsKey( key ); } @Override public boolean containsValue( Object value ) { return this.mMap.containsValue(value); } @Override public void putAll( Map m ) { this.mMap.putAll(m); } @Override public void clear() { this.mMap.clear(); } @Override public Object remove( Object key ) { return this.mMap.remove(key); } @Override public Set keySet() { return this.mMap.keySet(); } @Override public Collection values() { return this.mMap.values(); } @Override public Set > entrySet() { return this.mMap.entrySet(); } @Override protected Object innerMapGet( Object key ) { return this.mMap.get( key ); } protected void populateMap( Object bean ) { BeanColonist.WrappedColonist.populate( bean, this ); } @Override protected Object innerMapPut( String key, Object value ){ return this.mMap.put( key, value ); } @Override public JSONMaptron put( String key, Map value ) throws JSONException { this.put( key, (Object)( new JSONMaptron(value) ) ); return this; } @Override protected Object innerMapRemove( String key ) { return this.mMap.remove( key ); } @Override public Map.Entry front() { return this.mMap.entrySet().iterator().next(); } @Override public Map.Entry back() { try{ if( this.mMap instanceof LinkedHashMap ){ Field tail = this.mMap.getClass().getDeclaredField("tail" ); tail.setAccessible( true ); Map.Entry kv = (Map.Entry )tail.get( this.mMap ); return (Map.Entry ) kv; } else if( this.mMap instanceof LinkedTreeMap ){ return ( (LinkedTreeMap)this.mMap ).getLast(); } else if( this.mMap instanceof TreeMap ){ return ( (TreeMap)this.mMap ).lastEntry(); } else { throw new IllegalStateException(); } } catch ( NoSuchFieldException | IllegalAccessException | IllegalStateException e ) { // It seem there is the only way, fuck. Iterator > iterator = this.mMap.entrySet().iterator(); Map.Entry tail = null; while ( iterator.hasNext() ) { tail = iterator.next(); } return tail; } } @Override public JSONMaptron clone() { JSONMaptron that = (JSONMaptron) super.clone(); that.mMap = new LinkedHashMap<>(); for ( Entry e : this.mMap.entrySet() ) { Object value = e.getValue(); that.mMap.put( e.getKey(), JSONUtils.cloneElement( value ) ); } return that; } @Override public Writer write( Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { return JSONEncoder.BASIC_JSON_ENCODER.write( this.mMap, writer, nIndentFactor, nIndentBlankNum ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONMarshal.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.util.json.handler.EncodeHandlerRegistry; import com.pinecone.framework.util.json.handler.JSONObjectEncodeHandler; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; public interface JSONMarshal extends JSONEncoder { void setMode( long mode ); long getMode(); void setBeanEncoder( BeanJSONEncoder encoder ); BeanJSONEncoder getBeanEncoder(); void setEncodeHandlerRegistry( EncodeHandlerRegistry registry ); EncodeHandlerRegistry getEncodeHandlerRegistry(); void registerEncodeHandler( Class type, JSONObjectEncodeHandler handler ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONMarshalMode.java ================================================ package com.pinecone.framework.util.json; public final class JSONMarshalMode { private JSONMarshalMode() {} public static final long MODE_ANNOTATED_FIELD = 1L; public static final long MODE_ANY_FIELD = 1L << 1; public static final long MODE_PUBLIC_FIELD = 1L << 2; public static final long MODE_BEAN_GETTER = 1L << 3; public static final long MODE_DEFAULT = MODE_ANNOTATED_FIELD; public static final long MODE_COMMON = MODE_ANNOTATED_FIELD | MODE_PUBLIC_FIELD | MODE_BEAN_GETTER; public static final long MODE_ALL = MODE_ANNOTATED_FIELD | MODE_PUBLIC_FIELD | MODE_BEAN_GETTER | MODE_ANY_FIELD; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONObject.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.system.prototype.PineUnit; import com.pinecone.framework.system.prototype.TypeIndex; import java.io.IOException; import java.io.Serializable; import java.io.Writer; import java.util.*; public interface JSONObject extends PineUnit, Map, JSONDictium, Cloneable, Serializable { JSONObject jsonDecode( ArchCursorParser x ) throws JSONException ; JSONObject jsonDecode( String source ) throws JSONException ; JSONObject assimilate( Map that ); JSONObject shareFrom( JSONObject that, String szKey ) ; JSONObject shareFrom( JSONObject that, String[] szKeys ) ; JSONObject subJson ( String szKey ) ; JSONObject subJson ( String[] szKeys ) ; JSONObject detachSub ( String szKey ) ; JSONObject detachSub ( String[] szKeys ) ; JSONObject moveSubFrom ( JSONObject that, String szKey ) ; JSONObject moveSubFrom ( JSONObject that, String[] szKeys ) ; /** * 20240625 * Eliminates all keys excepted the survivor key. * @param szSurvivorKey The `key` the can surviving. * @return this */ default JSONObject eliminateExcepts( String szSurvivorKey ) { Object sub = this.opt( szSurvivorKey ); this.clear(); this.embed( szSurvivorKey, sub ); return this; } /** * 20240625 * Eliminates all keys excepted survivor keys. * @param szSurvivorKeys The batch of `keys` that can surviving. * @return this */ default JSONObject eliminateExcepts( String[] szSurvivorKeys ) { JSONObject sub = this.subJson( szSurvivorKeys ); this.clear(); this.assimilate( sub.getMap() ); return this; } Map getMap(); /** Basic Map **/ @Override int size() ; @Override boolean isEmpty() ; @Override boolean containsKey( Object key ) ; @Override boolean containsValue( Object value ) ; @Override void putAll( Map m ) ; JSONObject xPutAll( Map m ) ; @Override void clear() ; JSONObject xClear() ; @Override Object remove( Object key ) ; @Override Object erase( Object key ) ; JSONObject xRemove(Object key) ; @Override Set keySet() ; @Override Collection values() ; @Override Set > entrySet() ; JSONObject accumulate( String key, Object value ) throws JSONException ; JSONObject append( String key, Object value ) throws JSONException ; @Override Object get( Object key ) ; Object get( String key ) throws JSONException ; boolean getBoolean( String key ) throws JSONException ; double getDouble( String key ) throws JSONException ; int getInt( String key ) throws JSONException ; JSONArray getJSONArray ( String key ) throws JSONException ; JSONObject getJSONObject( String key ) throws JSONException ; long getLong( String key ) throws JSONException ; String getString( String key ) throws JSONException ; byte[] getBytes( String key ) throws JSONException ; JSONArray affirmArray( String key ) ; JSONObject affirmObject( String key ) ; Object affirm( String key ) ; Object opt( String key ) ; boolean optBoolean( String key ) ; boolean optBoolean( String key, boolean defaultValue ) ; double optDouble( String key ) ; double optDouble( String key, double defaultValue ) ; int optInt( String key ) ; int optInt( String key, int defaultValue ) ; JSONArray optJSONArray( String key) ; JSONObject optJSONObject( String key) ; long optLong( String key ) ; long optLong( String key, long defaultValue ) ; String optString( String key ) ; String optString( String key, String defaultValue ) ; byte[] optBytes( String key ) ; byte[] optBytes( String key, byte[] defaultValue ) ; @Override Object opt( Object key ) ; @Override boolean optBoolean( Object key ) ; @Override double optDouble( Object key ) ; @Override int optInt( Object key ) ; @Override JSONArray optJSONArray( Object key ) ; @Override JSONObject optJSONObject( Object key ) ; @Override long optLong( Object key ) ; @Override String optString( Object key ) ; @Override byte[] optBytes( Object key ) ; JSONObject increment( String key ) throws JSONException ; boolean isNull( String key ) ; Iterator keys() ; JSONArray names() ; String[] getOwnPropertyNames () ; @Override JSONObject insert( Object key, Object value ) ; JSONObject put( String key, boolean value ) throws JSONException ; JSONObject put( String key, Collection value ) throws JSONException ; JSONObject put( String key, double value ) throws JSONException ; JSONObject put( String key, int value ) throws JSONException ; JSONObject put( String key, long value ) throws JSONException ; JSONObject put( String key, Map value ) throws JSONException ; JSONObject put( String key, JSONArray value ) throws JSONException ; JSONObject put( String key, JSONObject value ) throws JSONException ; @Override JSONObject put( String key, Object value ) throws JSONException ; JSONObject embed( String key, Object value ) throws JSONException ; JSONObject putOnce( String key, Object value ) throws JSONException ; JSONObject putOpt( String key, Object value ) throws JSONException ; Object remove( String key ) ; JSONObject removeAll( Collection keys ); JSONObject removeAll( String[] keys ); JSONArray toJSONArray( JSONArray names ) throws JSONException ; @Override JSONArray toJSONArray() ; @Override JSONObject toJSONObject() ; Map.Entry front() ; Map.Entry back() ; @Override String toJSONString() ; String toJSONStringI( int nIndentFactor ) ; String toJSONString( int nIndentFactor ) throws IOException ; @Override TypeIndex prototype() ; @Override String prototypeName() ; @Override boolean isPrototypeOf ( TypeIndex that ) ; @Override boolean hasOwnProperty ( Object key ) ; @Override Map toMap(); @Override List toList(); JSONObject clone() ; Writer write(Writer writer ) throws IOException ; Writer write( Writer writer, int nIndentFactor ) throws IOException ; Writer write( Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONObjectDecoder.java ================================================ package com.pinecone.framework.util.json; import java.lang.reflect.Type; public abstract class JSONObjectDecoder implements JSONDecoder { protected abstract void set( Object self, String key, Object val ); @Override public void decode( Object self, Object parent, Object indexKey, ArchCursorParser x, Type genericTypes ) { if ( x.nextClean() != '{' ) { throw x.syntaxError("A JSONObject text must begin with '{'"); } else { while( true ) { char c = x.nextClean(); switch(c) { case '\u0000': { throw x.syntaxError( "A JSONObject text must end with '}'" ); } case '}': { return; } default: { x.back(); String key = null; Object val = null; try { key = x.nextValue( null, self, null ).toString(); c = x.nextClean(); if ( c != ':' && c != '=' ) { throw x.syntaxError( "Expected a ':', '=' after a key" ); } Object[] args = null; if( genericTypes != null ) { args = new Object[]{ genericTypes }; } val = x.nextValue( key, self, args ); this.set( self, key, val ); } catch ( JSONParserRedirectException e ) { e.setContext( new Object[]{ key, val } ); x.handleRedirectException( e ); } switch ( x.nextClean() ) { case ',': case ';': { if ( x.nextClean() == '}' ) { return; } x.back(); break; } case '}':{ return; } default: { throw x.syntaxError( "Expected a ',' or '}'" ); } } } } } } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONParseException.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.system.ParseException; public class JSONParseException extends ParseException { private static final long serialVersionUID = 0L; private Throwable cause; public JSONParseException ( String what ) { super( what ); } public JSONParseException ( String what, int errorOffset ) { super( what, errorOffset ); } public JSONParseException ( String message, int errorOffset, Throwable cause ) { super( message, errorOffset ); this.cause = cause; } public JSONParseException ( Throwable cause, int errorOffset ) { super( cause.getMessage(), errorOffset ); this.cause = cause; } public JSONParseException ( Throwable cause ) { super( cause.getMessage() ); this.cause = cause; } @Override public Throwable getCause() { return this.cause; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONParserRedirectException.java ================================================ package com.pinecone.framework.util.json; public class JSONParserRedirectException extends JSONException { Object context; int type ; public JSONParserRedirectException ( int type ) { super( "" ); this.type = type; } public int getType() { return this.type; } public void setContext( Object context ) { this.context = context; } public Object getContext() { return this.context; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONString.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.system.prototype.Pinenut; public interface JSONString extends Pinenut { String toJSONString(); static JSONString wrapRaw( String that ) { return new JSONString() { @Override public String toJSONString() { return that; } @Override public String toString() { return that; } }; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONUtils.java ================================================ package com.pinecone.framework.util.json; import com.pinecone.framework.util.StringUtils; import java.math.BigDecimal; import java.math.BigInteger; import java.util.Collection; import java.util.Iterator; import java.util.Map; public abstract class JSONUtils { public static void prospectNumberQualify(Object o) throws JSONException { if (o != null) { if (o instanceof Double) { if (((Double)o).isInfinite() || ((Double)o).isNaN()) { throw new JSONException("JSON does not allow non-finite numbers."); } } else if (o instanceof Float && (((Float)o).isInfinite() || ((Float)o).isNaN())) { throw new JSONException("JSON does not allow non-finite numbers."); } } } private static String numberLikeStringTransfer( String string ){ if ( string.indexOf(46) > 0 && string.indexOf(101) < 0 && string.indexOf(69) < 0 ) { while(string.endsWith("0")) { string = string.substring(0, string.length() - 1); } if (string.endsWith(".")) { string = string.substring(0, string.length() - 1); } } return string; } public static String numberToString( Number number ) throws JSONException { if (number == null) { throw new JSONException("Null pointer"); } else { JSONUtils.prospectNumberQualify(number); String string = number.toString(); return JSONUtils.numberLikeStringTransfer( string ); } } public static String doubleToString( double d ) { if (!Double.isInfinite(d) && !Double.isNaN(d)) { String string = Double.toString(d); return JSONUtils.numberLikeStringTransfer( string ); } else { return "null"; } } public static String valueToString( Object value ) throws JSONException { if ( value != null && !value.equals((Object)null) ) { if ( value instanceof JSONString ) { String object; try { object = ((JSONString)value).toJSONString(); } catch ( Exception e ) { throw new JSONException(e); } return object; } else if ( value instanceof Number ) { return JSONUtils.numberToString((Number)value); } else if (!(value instanceof Boolean) && !(value instanceof JSONObject) && !(value instanceof JSONArray)) { if (value instanceof Map) { return (new JSONMaptron((Map)value)).toString(); } else if (value instanceof Collection) { return (new JSONArraytron((Collection)value)).toString(); } else { return value.getClass().isArray() ? (new JSONArraytron(value)).toString() : StringUtils.jsonQuote(value.toString()); } } else { return value.toString(); } } else { return "null"; } } public static String noneStartZeroInteger( String szNum ) { if( szNum.startsWith( "0" ) ) { int i; for ( i = 0; i < szNum.length(); i++ ) { if( i == szNum.length() - 1 && szNum.charAt(i) == '0' ){ // 0000001, 0nX break; } if( szNum.charAt(i) != '0' ) { break; } } return szNum.substring( i ); } return szNum; } public static Object stringToValue( String string ) { if ( string.equals("") ) { return string; } else if (string.equalsIgnoreCase("true")) { return Boolean.TRUE; } else if (string.equalsIgnoreCase("false")) { return Boolean.FALSE; } else if ( string.equalsIgnoreCase("null") || string.equalsIgnoreCase("undefined") ) { return JSON.NULL; } else { char b = string.charAt(0); if ( b >= '0' && b <= '9' || b == '-' ) { try { if ( string.indexOf( '.' ) <= -1 && string.indexOf( 'e' ) <= -1 && string.indexOf( 'E' ) <= -1 ) { String szToken = JSONUtils.noneStartZeroInteger( string ); if( szToken.length() < 18 ) { Long n = Long.parseLong( szToken ); if ( szToken.equals( n.toString() ) ) { if ( n == (long)n.intValue() ) { return n.intValue(); } return n; } } else { return new BigInteger( szToken ); } } else { if( string.length() < 18 ) { Double d = Double.valueOf( string ); if ( !d.isInfinite() && !d.isNaN() ) { return d; } } else { return new BigDecimal( string ); } } } catch ( Exception e ) { //e.printStackTrace(); } } return string; } } public static Object wrapValue( Object value, boolean bWrapBean ) { try { if ( value == null ) { return JSON.NULL; } else if (!(value instanceof JSONObject) && !(value instanceof JSONArray) && !JSON.NULL.equals(value) && !(value instanceof JSONString) && !(value instanceof Byte) && !(value instanceof Character) && !(value instanceof Short) && !(value instanceof Integer) && !(value instanceof Long) && !(value instanceof Boolean) && !(value instanceof Float) && !(value instanceof Double) && !(value instanceof String)) { if (value instanceof Collection) { return new JSONArraytron((Collection)value); } else if ( value.getClass().isArray() ) { return new JSONArraytron(value); } else if ( value instanceof Map ) { return new JSONMaptron((Map)value); } else { if( bWrapBean ) { Package objectPackage = value.getClass().getPackage(); String objectPackageName = objectPackage != null ? objectPackage.getName() : ""; return !objectPackageName.startsWith("java.") && !objectPackageName.startsWith("javax.") && value.getClass().getClassLoader() != null ? new JSONMaptron(value) : value.toString(); } else { return null; } } } else { return value; } } catch ( Exception e ) { return null; } } public static Object wrapValue( Object value ) { return JSONUtils.wrapValue( value, true ); } public static String[] getOwnPropertyNames ( JSONObject that ) { int nSize = that.size(); if ( nSize == 0 ) { return null; } else { Iterator iterator = that.keys(); String[] names = new String[nSize]; for( int i = 0; iterator.hasNext(); ++i ) { names[i] = (String)iterator.next(); } return names; } } public static Object cloneElement ( Object that ) { if( that instanceof JSONArray ) { return ( ( JSONArray ) that ).clone(); } else if( that instanceof JSONObject ) { return ( ( JSONObject ) that ).clone(); } return that; } public static int asInt32Key( Object key ) { if( key instanceof Integer ) { return (int) key; } else if( key instanceof Float || key instanceof Double || key instanceof BigDecimal ) { throw new JSONException( "Array does not allow float as key." ); } else if( key instanceof Number ) { return ((Number) key).intValue(); } else if( key instanceof String ) { return Integer.parseInt( (String) key ); } throw new JSONException( "Key of Array should be integer or integer-fmt-string." ); } public static int toInt32Key( Object key ) { if( key instanceof Integer ) { return (int) key; } else if( key instanceof Number ) { return ((Number) key).intValue(); } else if( key instanceof String ) { return Integer.parseInt( (String) key ); } return Integer.parseInt( key.toString() ); } public static String asStringKey( Object key ) { if( key instanceof String ) { return (String) key; } return key.toString(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/ObjectJSONCursorUnmarshal.java ================================================ package com.pinecone.framework.util.json; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.io.StringReader; import java.lang.reflect.Array; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import com.pinecone.framework.system.prototype.ObjectiveEvaluator; import com.pinecone.framework.util.ReflectionUtils; import com.pinecone.framework.util.UnitHelper; public class ObjectJSONCursorUnmarshal extends ArchCursorParser { public static JSONDecoder INNER_OBJECT_DECODER = new JSONObjectDecoder() { @Override protected void set( Object self, String key, Object val ) { ObjectiveEvaluator.MapStructures.set( self, key, val ); } }; public static JSONDecoder INNER_ARRAY_DECODER = new JSONArrayDecoder() { @Override @SuppressWarnings( "unchecked" ) protected void add( Object self, Object parent, Object indexKey, Object val ) { if( self.getClass().isArray() ) { Object[] ref = (Object[])self; // Fuck java, no pointer. ref[ 0 ] = UnitHelper.append( (Object[])ref[ 0 ], val ); } else if( self instanceof Collection ) { ((Collection) self).add( val ); } } }; protected Class mClassType ; protected Type mRootType ; protected final TypeContext mTypeContext = new TypeContext(); public ObjectJSONCursorUnmarshal( Reader reader, Class classType, Type rootType ) throws JSONParseException { super(reader); this.mRootType = rootType; this.mClassType = classType; } public ObjectJSONCursorUnmarshal( Reader reader, Type rootType ) throws JSONParseException { this( reader, ObjectiveEvaluator.resolveRawClass( rootType ), rootType ); } public ObjectJSONCursorUnmarshal( Reader reader, Class classType ) throws JSONParseException { this( reader, (Type) classType ); } public ObjectJSONCursorUnmarshal( InputStream inputStream, Class classType ) throws JSONParseException { this((Reader)( new InputStreamReader(inputStream)), classType ); } public ObjectJSONCursorUnmarshal( InputStream inputStream, Type rootType ) throws JSONParseException { this((Reader)( new InputStreamReader(inputStream)), ObjectiveEvaluator.resolveRawClass( rootType ), rootType ); } public ObjectJSONCursorUnmarshal( String s, Class classType ) throws JSONParseException { this((Reader)( new StringReader(s)), classType ); } public ObjectJSONCursorUnmarshal( String s, Type rootType ) throws JSONParseException { this((Reader)( new StringReader(s)), ObjectiveEvaluator.resolveRawClass( rootType ), rootType ); } protected GenericTypeContext nextGenericTypeContext( Object parent, Object indexKey ) throws JSONParseException { Class thisType ; Type elemGenericType ; if ( parent == null ) { if ( this.mTypeContext.isEmpty() ) { this.mTypeContext.push( this.mRootType, ObjectiveEvaluator.extractGenericElementType( this.mRootType ) ); } TypeContext.Frame frame = this.mTypeContext.peek(); thisType = ObjectiveEvaluator.resolveRawClass( frame.mContainerType ); elemGenericType = frame.mElementType; } else { TypeContext.Frame frame = this.mTypeContext.peek(); if ( frame != null && frame.mElementType != null ) { elemGenericType = frame.mElementType; thisType = ObjectiveEvaluator.resolveRawClass( frame.mElementType ); } else { elemGenericType = ObjectiveEvaluator.MapStructures.getElementGenericType( parent, indexKey.toString() ); thisType = ObjectiveEvaluator.MapStructures.getType( parent, indexKey ); } } GenericTypeContext context = new GenericTypeContext(); context.thisType = thisType; context.elemGenericType = elemGenericType; return context; } @Override protected Object newJSONArray( Object indexKey, ArchCursorParser parser, Object parent, Object[] args ) { try { GenericTypeContext context = this.nextGenericTypeContext( parent, indexKey ); Class thisType = context.thisType; Type elemGenericType = context.elemGenericType; // if ( parent != null ){ // elemGenericType = ObjectiveEvaluator.MapStructures.getElementGenericType( parent, indexKey.toString() ); // thisType = ObjectiveEvaluator.MapStructures.getType( parent, indexKey ); // } // else { // thisType = ObjectiveEvaluator.resolveRawClass( this.mRootType ); // elemGenericType = ObjectiveEvaluator.extractGenericElementType( this.mRootType ); // } Object self; if ( thisType == null ) { self = new Object(); // Dummy ObjectJSONCursorUnmarshal.INNER_ARRAY_DECODER.decode( self, parent, indexKey,this, elemGenericType ); return self; } if ( thisType.equals( List.class ) || thisType.equals( Void.class ) || thisType.equals( Object.class ) ) { thisType = JSONArraytron.class; if( elemGenericType != null ) { String genericTypeName = elemGenericType.getTypeName(); if( !genericTypeName.equals( "?" ) && !genericTypeName.equals( Object.class.getSimpleName() ) ) { thisType = ArrayList.class; } } } else if ( thisType.equals( Set.class ) ) { thisType = LinkedHashSet.class; } if ( thisType.isArray() ) { Class innerType = thisType.getComponentType(); if ( innerType.equals( Object.class ) ) { self = new Object[]{ new Object[ 0 ] }; // Object[]*, ptr -> Object[] } else { elemGenericType = innerType; self = new Object[]{ Array.newInstance( innerType, 0 ) }; // Object[]*, ptr -> Object[] } } else { self = thisType.getConstructor().newInstance(); } ObjectJSONCursorUnmarshal.INNER_ARRAY_DECODER.decode( self, parent, indexKey,this, elemGenericType ); if ( self.getClass().isArray() ) { return Array.get( self, 0 ); } return self; } catch ( NoSuchMethodException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) { return null; } catch ( InstantiationException e1 ) { throw new JSONParseException( e1 ); } } protected Class findDirectJSONObjectAssignableType( Class thisType ) { if( thisType == null || thisType.equals( Map.class ) || thisType.equals( Void.class ) || thisType.equals( Object.class ) ) { thisType = JSONMaptron.class; } else if( thisType.isInterface() && Map.class.isAssignableFrom( thisType ) ) { thisType = JSONMaptron.class; } else if( thisType.isInterface() && JSONObject.class.isAssignableFrom( thisType ) ) { thisType = JSONMaptron.class; } return thisType; } @Override protected Object newJSONObject( Object indexKey, ArchCursorParser parser, Object parent, Object[] args ) { try { GenericTypeContext context = this.nextGenericTypeContext( parent, indexKey ); Class thisType = context.thisType; Type elemGenericType = context.elemGenericType; // if( parent == null ){ // thisType = ObjectiveEvaluator.resolveRawClass( this.mRootType ); // elemGenericType = ObjectiveEvaluator.extractGenericElementType( this.mRootType ); // } // else { // thisType = ObjectiveEvaluator.MapStructures.getType( parent, indexKey ); // elemGenericType = ObjectiveEvaluator.MapStructures.getFieldGenericType( parent, indexKey.toString() ); // } thisType = this.findDirectJSONObjectAssignableType( thisType ); if( elemGenericType != null ) { String genericTypeName = elemGenericType.getTypeName(); if( genericTypeName.contains( "<" ) && genericTypeName.contains( ">" ) ) { thisType = LinkedHashMap.class; } } Object self; if( thisType == null ) { self = new Object(); // Dummy ObjectJSONCursorUnmarshal.INNER_OBJECT_DECODER.decode( self, parent, indexKey,this, elemGenericType ); return self; } if( args != null && args.length > 0 ) { Object dyType = args[ 0 ]; Type eleType = (Type) dyType; if( eleType != null ) { if( parent != null && parent.getClass().isArray() ) { if( !dyType.equals( Object[].class ) && !dyType.equals( Object.class ) && !dyType.equals( Map.class ) ) { thisType = (Class) eleType; } } else { String[] genericTypeNames = ReflectionUtils.extractGenericClassNames( eleType.getTypeName() ); if( genericTypeNames != null && genericTypeNames.length > 0 ) { String genericTypeName; if( genericTypeNames.length > 1 ) { genericTypeName = genericTypeNames[ 1 ]; // Map value. } else { genericTypeName = genericTypeNames[ 0 ]; // Collection value. } if( !genericTypeName.equals( "?" ) && !genericTypeName.equals( Object.class.getSimpleName() ) ) { try{ thisType = this.getClass().getClassLoader().loadClass( genericTypeName ); thisType = this.findDirectJSONObjectAssignableType( thisType ); } catch ( ClassNotFoundException e ) { thisType = JSONMaptron.class; } } } } } } self = thisType.getConstructor().newInstance(); ObjectJSONCursorUnmarshal.INNER_OBJECT_DECODER.decode( self, parent, indexKey, this, elemGenericType ); return self; } catch ( NoSuchMethodException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) { return null; } catch ( InstantiationException e1 ) { throw new JSONParseException( e1 ); } } public static class GenericTypeContext { protected Class thisType ; protected Type elemGenericType ; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/TypeContext.java ================================================ package com.pinecone.framework.util.json; import java.lang.reflect.Type; import java.util.ArrayDeque; import java.util.Deque; public final class TypeContext { static final class Frame { final Type mContainerType; final Type mElementType; Frame( Type containerType, Type elementType ) { this.mContainerType = containerType; this.mElementType = elementType; } } private final Deque mStack = new ArrayDeque<>(); public void push( Type containerType, Type elementType ) { this.mStack.push( new Frame( containerType, elementType ) ); } public void pop() { if ( !this.mStack.isEmpty() ) { this.mStack.pop(); } } public Frame peek() { return this.mStack.peek(); } public boolean isEmpty() { return this.mStack.isEmpty(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/TypeReference.java ================================================ package com.pinecone.framework.util.json; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; public abstract class TypeReference implements Comparable> { protected final Type _type; protected TypeReference() { Type superClass = this.getClass().getGenericSuperclass(); if (superClass instanceof Class) { throw new IllegalArgumentException("Internal error: TypeReference constructed without actual type information"); } else { this._type = ((ParameterizedType)superClass).getActualTypeArguments()[0]; } } public Type getType() { return this._type; } @Override public int compareTo(TypeReference o) { return 0; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/binary/BsonTraits.java ================================================ package com.pinecone.framework.util.json.binary; import com.pinecone.framework.util.ReflectionUtils; import java.io.OutputStream; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; public final class BsonTraits { public static String FUN_TO_BSON_BYTES_NAME = "toBsonBytes"; public static String FUN_BSON_SERIALIZE_NAME = "bsonSerialize"; public static byte[] invokeToBsonBytes ( Object that ) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException { Method fnToBsonBytes = that.getClass().getMethod( BsonTraits.FUN_TO_BSON_BYTES_NAME ); ReflectionUtils.makeAccessible( fnToBsonBytes ); return (byte[]) fnToBsonBytes.invoke( that ); } public static void invokeBsonSerialize ( Object that, OutputStream os ) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException { Method fnBsonSerialize = that.getClass().getMethod( BsonTraits.FUN_BSON_SERIALIZE_NAME, OutputStream.class ); ReflectionUtils.makeAccessible( fnBsonSerialize ); fnBsonSerialize.invoke( that, os ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/binary/Bsonut.java ================================================ package com.pinecone.framework.util.json.binary; import com.pinecone.framework.system.prototype.Pinenut; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; public interface Bsonut extends Pinenut { default byte[] toBsonBytes() { ByteArrayOutputStream os = new ByteArrayOutputStream(); try{ this.bsonSerialize( os ); os.flush(); return os.toByteArray(); } catch ( IOException e ) { return null; } } void bsonSerialize( OutputStream os ) throws IOException; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/handler/EncodeHandlerRegistry.java ================================================ package com.pinecone.framework.util.json.handler; import com.pinecone.framework.system.prototype.Pinenut; public interface EncodeHandlerRegistry extends Pinenut { void register( Class type, JSONObjectEncodeHandler serializer ); JSONObjectEncodeHandler get( Class type ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/handler/GenericEncodeHandlerRegistry.java ================================================ package com.pinecone.framework.util.json.handler; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; public class GenericEncodeHandlerRegistry implements EncodeHandlerRegistry { protected final Map, JSONObjectEncodeHandler> mSerializers; public GenericEncodeHandlerRegistry() { this.mSerializers = new ConcurrentHashMap<>(); } public void register( Class type, JSONObjectEncodeHandler serializer ) { this.mSerializers.put( type, serializer ); } @SuppressWarnings("unchecked") public JSONObjectEncodeHandler get( Class type ) { JSONObjectEncodeHandler exact = this.mSerializers.get( type ); if ( exact != null ) { return (JSONObjectEncodeHandler) exact; } for ( Map.Entry, JSONObjectEncodeHandler> e : this.mSerializers.entrySet() ) { if ( e.getKey().isAssignableFrom( type ) ) { return (JSONObjectEncodeHandler) e.getValue(); } } return null; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/handler/JSONObjectEncodeHandler.java ================================================ package com.pinecone.framework.util.json.handler; import java.io.IOException; import java.io.Writer; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.json.GenericJSONEncoder; public interface JSONObjectEncodeHandler extends Pinenut { void serialize( T object, Writer writer, int nIndentFactor, int nIndentBlankNum, GenericJSONEncoder encoder ) throws IOException; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/AnnotatedJSONInjector.java ================================================ package com.pinecone.framework.util.json.homotype; import java.lang.annotation.Annotation; import java.lang.reflect.Field; public class AnnotatedJSONInjector extends JSONInjector { public AnnotatedJSONInjector() { super(); } @Override protected String getFieldName( Field field ){ String szKey = AnnotatedJSONInjector.getAnnotatedKey( field ); if( szKey == null ) { return null; } else if( szKey.isEmpty() ) { return field.getName(); } return szKey; } public static String getAnnotatedKey( Field field ) { String szKey = null; Annotation[] annotations = field.getAnnotations(); for ( Annotation a : annotations ) { if( a instanceof JSONGet ) { szKey = ( (JSONGet) a ).value(); break; } else if( a instanceof MapStructure ) { szKey = ( (MapStructure) a ).value(); break; } } return szKey; } public static AnnotatedJSONInjector instance() { return new AnnotatedJSONInjector(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/AnnotatedObjectInjector.java ================================================ package com.pinecone.framework.util.json.homotype; import com.pinecone.framework.util.ReflectionUtils; import java.lang.reflect.Field; import java.util.List; import java.util.Map; public class AnnotatedObjectInjector extends ObjectInjector { public AnnotatedObjectInjector( Class type ) { super( type ); } protected String getAnnotatedKey( Field field ) { return AnnotatedJSONInjector.getAnnotatedKey( field ); } @Override public Object inject ( Map that, Class type, Object instance ) { Field[] fields = type.getDeclaredFields(); for ( Field field : fields ) { ReflectionUtils.makeAccessible( field ); try { String szKey = this.getAnnotatedKey( field ); if( szKey == null ) { continue; } else if( szKey.isEmpty() ) { szKey = field.getName(); } Object val = that.get( this.getFieldName( szKey ) ); if( val == null ){ val = that.get( szKey ); } if( val == null && szKey.contains( "." ) ){ val = this.getValueFromMapRecursively( that, szKey ); } try { Object j = this.inject( val , field.getType() ); field.set( instance, j ); } catch ( IllegalArgumentException e ){ //e.printStackTrace(); field = null; } } catch ( IllegalAccessException e ){ throw new IllegalStateException(e); // This should never be happened. } } return instance; } protected Object getValueFromMapRecursively( Map map, String key ) { String[] keys = key.split("\\.|\\/"); Object value = map; for ( String k : keys ) { if ( value instanceof Map ) { value = ((Map) value).get(k); } else if ( value instanceof List ) { try{ value = ((List) value).get( Integer.parseInt( k ) ); } catch ( NumberFormatException e ) { return null; } } else { return null; } } return value; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/ArchBeanColonist.java ================================================ package com.pinecone.framework.util.json.homotype; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.Map; import com.pinecone.framework.system.stereotype.JavaBeans; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.JSONUtils; public abstract class ArchBeanColonist implements BeanColonist { public ArchBeanColonist() { } @Override public void populate( Object bean, JSONObject target, boolean bRecursive ) { Class klass = bean.getClass(); boolean includeSuperClass = klass.getClassLoader() != null; Method[] methods = includeSuperClass ? klass.getMethods() : klass.getDeclaredMethods(); Map targetMap = target.getMap(); for( int i = 0; i < methods.length; ++i ) { try { Method method = methods[i]; if ( Modifier.isPublic( method.getModifiers() ) ) { String key = JavaBeans.getGetterMethodKeyName( method ); if( key == null ) { continue; } if ( key.length() > 0 && Character.isUpperCase( key.charAt(0) ) && method.getParameterTypes().length == 0 ) { key = JavaBeans.methodKeyNameLowerCaseNormalize( key ); method.setAccessible( true ); Object result = method.invoke( bean, (Object[])null ); this.putValue( targetMap, key, result, bRecursive ); } } } catch ( InvocationTargetException | IllegalAccessException e ) { e.printStackTrace(); // Do nothing. } } } protected void putValue( Map targetMap, String key, Object result, boolean bRecursive ) { if ( result != null ) { Object v = JSONUtils.wrapValue( result, bRecursive ); if( v == null ) { v = result; } targetMap.put( key, v ); } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/BeanColonist.java ================================================ package com.pinecone.framework.util.json.homotype; import java.util.Set; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; public interface BeanColonist extends Pinenut { BeanColonist WrappedColonist = new WrappedBeanColonist(); BeanColonist DirectColonist = new DirectBeanColonist(); void populate( Object bean, JSONObject target, boolean bRecursive ) ; default void populate( Object bean, JSONObject target ) { this.populate( bean, target, true ); } default JSONObject populate( Object bean, boolean bRecursive ) { JSONObject jo = new JSONMaptron(); this.populate( bean, jo, bRecursive ); return jo; } default JSONObject populate( Object bean ) { return this.populate( bean, true ); } default void populate( Object bean, JSONObject target, boolean bRecursive, Set exceptedKeys ) { this.populate( bean, target, bRecursive ); target.removeAll( exceptedKeys ); } default void populate( Object bean, JSONObject target, Set exceptedKeys ) { this.populate( bean, target, true, exceptedKeys ); } default JSONObject populate( Object bean, boolean bRecursive, Set exceptedKeys ) { JSONObject jo = new JSONMaptron(); this.populate( bean, jo, bRecursive, exceptedKeys ); return jo; } default JSONObject populate( Object bean, Set exceptedKeys ) { return this.populate( bean, true, exceptedKeys ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/BeanJSONEncoder.java ================================================ package com.pinecone.framework.util.json.homotype; import java.io.IOException; import java.io.Writer; import java.util.Set; import com.pinecone.framework.system.prototype.Pinenut; public interface BeanJSONEncoder extends Pinenut { BeanJSONEncoder BasicEncoder = new GenericBeanJSONEncoder(); String encode( Object bean ); String encode( Object bean, Set exceptedKeys ); void encode( Object bean, Writer writer, int nIndentFactor ) throws IOException; void encode( Object bean, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException; default void encode( Object bean, Writer writer ) throws IOException { this.encode( bean, writer, 0 ); } void valueJsonify( Object val, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException; String valueJsonify( Object val ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/BeanMapDecoder.java ================================================ package com.pinecone.framework.util.json.homotype; import java.util.Map; import java.util.Set; import com.pinecone.framework.system.prototype.Pinenut; public interface BeanMapDecoder extends Pinenut { BeanMapDecoder BasicDecoder = new GenericBeanMapDecoder(); static boolean trialHomogeneity( Object that ) { return JSONInjector.trialHomogeneity( that ) || that instanceof Map; } Object decode( Object bean, Map jo, Set exceptedKeys, boolean bRecursive ); Object decode( Object bean, Map jo, boolean bRecursive ) ; Object decode( Object bean, Map jo, Set exceptedKeys ); Object decode( Object bean, Map jo ) ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/DirectBeanColonist.java ================================================ package com.pinecone.framework.util.json.homotype; import java.util.Map; public class DirectBeanColonist extends ArchBeanColonist { public DirectBeanColonist() { super(); } @Override protected void putValue( Map targetMap, String key, Object result, boolean bRecursive ) { targetMap.put( key, result ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/DirectJSONInjector.java ================================================ package com.pinecone.framework.util.json.homotype; public class DirectJSONInjector extends JSONInjector { public DirectJSONInjector() { super(); } public static JSONInjector instance() { return new DirectJSONInjector(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/DirectObjectInjector.java ================================================ package com.pinecone.framework.util.json.homotype; import com.pinecone.framework.system.functions.Function; public class DirectObjectInjector extends ObjectInjector { protected String mszFieldNS = "" ; protected boolean mbUsingHungary = false ; protected Function mfnGetFieldName = null ; public DirectObjectInjector( String szFieldNS, Class stereotype ){ super( stereotype ); this.mszFieldNS = szFieldNS; } public DirectObjectInjector( boolean bUsingHungary, Class stereotype ) { super( stereotype ); this.mbUsingHungary = bUsingHungary; if( this.mbUsingHungary ){ this.mszFieldNS = "m"; } } public DirectObjectInjector( Function fnGetFieldName, Class stereotype ) { super( stereotype ); this.mfnGetFieldName = fnGetFieldName; } public DirectObjectInjector( Class stereotype ) { this( "", stereotype ); } @Override protected String getFieldName( String szKey ){ if( this.mfnGetFieldName != null ) { try { szKey = (String) this.mfnGetFieldName.invoke( szKey ); } catch ( Exception e ) { return szKey; } } else { if( !this.mszFieldNS.isEmpty() ){ StringBuilder sb = new StringBuilder(); sb.append( szKey ); if( this.mbUsingHungary ){ sb.setCharAt( 0, Character.toUpperCase( sb.charAt(0) ) ); } szKey = this.mszFieldNS + sb.toString(); } } return szKey; } public String getFieldNamespace() { return this.mszFieldNS; } public void setFieldNamespace( String ns ) { this.mszFieldNS = ns; } public static DirectObjectInjector instance( boolean bUsingHungary, Class stereotype ) { return new DirectObjectInjector( bUsingHungary, stereotype ); } public static DirectObjectInjector instance( String szFieldNS, Class stereotype ) { return new DirectObjectInjector( szFieldNS, stereotype ); } public static DirectObjectInjector instance( Function fnGetFieldName, Class stereotype ) { return new DirectObjectInjector( fnGetFieldName, stereotype ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/GenericBeanJSONEncoder.java ================================================ package com.pinecone.framework.util.json.homotype; import java.io.IOException; import java.io.Writer; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.Set; import com.pinecone.framework.system.stereotype.JavaBeans; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.json.GenericJSONEncoder; import com.pinecone.framework.util.json.JSON; import com.pinecone.framework.util.json.JSONEncoder; public class GenericBeanJSONEncoder implements BeanJSONEncoder { public GenericBeanJSONEncoder() { } @Override public String valueJsonify( Object val ) { return JSON.stringify( val ); } @Override public void valueJsonify( Object val, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { JSONEncoder.BASIC_JSON_ENCODER.write( val, writer, nIndentFactor, nIndentBlankNum ); } @Override public String encode( Object bean, Set exceptedKeys ) { Class klass = bean.getClass(); boolean includeSuperClass = klass.getClassLoader() != null; Method[] methods = includeSuperClass ? klass.getMethods() : klass.getDeclaredMethods(); StringBuilder sb = new StringBuilder( "{" ); for ( int i = 0; i < methods.length; ++i ) { try { Method method = methods[i]; if ( Modifier.isPublic( method.getModifiers() ) ) { String key = JavaBeans.getGetterMethodKeyName( method ); if ( !StringUtils.isEmpty( key ) ) { if ( Character.isUpperCase( key.charAt(0) ) && method.getParameterTypes().length == 0 ) { key = JavaBeans.methodKeyNameLowerCaseNormalize( key ); if( exceptedKeys != null && exceptedKeys.contains( key ) ) { continue; } Object val; try { val = method.invoke( bean ); sb.append( '\"' ).append( key ).append( "\":" ); } catch ( IllegalAccessException | InvocationTargetException e ) { continue; } sb.append( this.valueJsonify( val ) ); sb.append( ',' ); } } } } catch ( Exception e ) { e.printStackTrace(); // Do nothing. } } if( sb.charAt( sb.length() - 1 ) == ',' ) { sb.deleteCharAt( sb.length() - 1 ); } sb.append( '}' ); return sb.toString(); // StringWriter w = new StringWriter(); // try { // synchronized( w.getBuffer() ) { // this.encode( bean, w ); // return w.toString(); // } // } // catch ( IOException e ){ // return null; // } } @Override public String encode( Object bean ) { return this.encode( bean, (Set) null ); } @Override public void encode( Object bean, Writer writer, int nIndentFactor ) throws IOException { this.encode( bean, writer, nIndentFactor, 0 ); } @Override public void encode( Object bean, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { Class klass = bean.getClass(); boolean includeSuperClass = klass.getClassLoader() != null; Method[] methods = includeSuperClass ? klass.getMethods() : klass.getDeclaredMethods(); writer.write( "{" ); boolean isFirst = true; for ( int i = 0; i < methods.length; ++i ) { try { Method method = methods[i]; if ( Modifier.isPublic( method.getModifiers() ) ) { String key = JavaBeans.getGetterMethodKeyName( method ); if ( !StringUtils.isEmpty( key ) ) { if ( Character.isUpperCase( key.charAt( 0 ) ) && method.getParameterTypes().length == 0 ) { key = JavaBeans.methodKeyNameLowerCaseNormalize( key ); if ( !isFirst ) { writer.write( "," ); } int nNewIndent = nIndentBlankNum + nIndentFactor; if ( nNewIndent > 0 ) { writer.write('\n'); } GenericJSONEncoder.indentBlank( writer, nNewIndent ); Object val; try { val = method.invoke( bean ); writer.write( "\"" + key + "\":" ); } catch ( IllegalAccessException | InvocationTargetException e ) { continue; } this.valueJsonify( val, writer, nIndentFactor, nNewIndent ); isFirst = false; GenericJSONEncoder.indentBlank( writer, nIndentBlankNum ); } } } } catch ( Exception e ) { e.printStackTrace(); // Do nothing. } } if ( nIndentFactor > 0 ) { writer.write( '\n' ); } writer.write( "}" ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/GenericBeanMapDecoder.java ================================================ package com.pinecone.framework.util.json.homotype; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.Map; import java.util.Set; import com.pinecone.framework.system.stereotype.JavaBeans; import com.pinecone.framework.util.StringUtils; public class GenericBeanMapDecoder implements BeanMapDecoder { @SuppressWarnings( "unchecked" ) protected Object decode0( Object bean, Map jo, Set exceptedKeys, boolean bRecursive ) { return this.decode( bean, (Map)jo, exceptedKeys, bRecursive ); } @Override public Object decode( Object bean, Map jo, Set exceptedKeys, boolean bRecursive ) { if( jo == null ) { return bean; } Class klass = bean.getClass(); boolean includeSuperClass = klass.getClassLoader() != null; Method[] methods = includeSuperClass ? klass.getMethods() : klass.getDeclaredMethods(); for( int i = 0; i < methods.length; ++i ) { try { Method method = methods[i]; if ( Modifier.isPublic( method.getModifiers() ) ) { String legKey = JavaBeans.getSetterMethodKeyName( method ); String key = legKey; if( !StringUtils.isEmpty( key ) ) { if ( Character.isUpperCase( key.charAt(0) ) && method.getParameterTypes().length == 1 ) { key = JavaBeans.methodKeyNameLowerCaseNormalize( key ); Object desiredVal = jo.get( key ); if( desiredVal == null ) { continue; } else if( exceptedKeys != null && exceptedKeys.contains( key ) ) { continue; } try { Object recursiveBean = null; if( bRecursive ) { String szGetterMethod = JavaBeans.MethodMajorKeyGet + legKey; Method curGetter = bean.getClass().getMethod( szGetterMethod ); if( curGetter != null ) { recursiveBean = curGetter.invoke( bean ); if( !BeanMapDecoder.trialHomogeneity( recursiveBean ) ) { recursiveBean = null; // Not a bean. } } } if( recursiveBean == null ) { method.invoke( bean, desiredVal ); } else { if( desiredVal instanceof Map ) { this.decode0( recursiveBean, (Map)desiredVal, exceptedKeys, bRecursive ); } } } catch ( IllegalAccessException | InvocationTargetException ignore ) { ignore.printStackTrace(); // Do nothing. } } } } } catch ( Exception ignore ) { ignore.printStackTrace(); // Do nothing. } } return bean; } @Override public Object decode( Object bean, Map jo, boolean bRecursive ) { return this.decode( bean, jo, (Set) null, bRecursive ); } @Override public Object decode( Object bean, Map jo, Set exceptedKeys ) { return this.decode( bean, jo, exceptedKeys, false ); } @Override public Object decode( Object bean, Map jo ) { return this.decode( bean, jo, false ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/GenericStructJSONDecoder.java ================================================ package com.pinecone.framework.util.json.homotype; import java.lang.reflect.Field; import java.util.Map; import java.util.Set; import com.pinecone.framework.util.StringUtils; public class GenericStructJSONDecoder implements StructJSONDecoder { @SuppressWarnings( "unchecked" ) protected Object decode0( Object struct, Map jo, Set exceptedKeys, boolean bRecursive ) { return this.decode( struct, (Map)jo, exceptedKeys, bRecursive ); } @Override public Object decode( Object struct, Map jo, Set exceptedKeys, boolean bRecursive ) { if( jo == null ) { return struct; } Class klass = struct.getClass(); boolean includeSuperClass = klass.getClassLoader() != null; Field[] fields = includeSuperClass ? klass.getFields() : klass.getDeclaredFields(); for( int i = 0; i < fields.length; ++i ) { try { Field field = fields[i]; field.setAccessible( true ); String key = field.getName(); if( !StringUtils.isEmpty( key ) ) { Object desiredVal = jo.get( key ); if( desiredVal == null ) { continue; } else if( exceptedKeys != null && exceptedKeys.contains( key ) ) { continue; } try { Object recursiveBean = null; if( bRecursive ) { Field curField = struct.getClass().getField( key ); if( curField != null ) { recursiveBean = curField.get( struct ); if( !BeanMapDecoder.trialHomogeneity( recursiveBean ) ) { recursiveBean = null; // Not a struct. } } } if( recursiveBean == null ) { field.set( struct, desiredVal ); } else { if( desiredVal instanceof Map ) { this.decode0( recursiveBean, (Map)desiredVal, exceptedKeys, bRecursive ); } } } catch ( IllegalAccessException | IllegalArgumentException ignore ) { ignore.printStackTrace(); // Do nothing. } } } catch ( Exception ignore ) { ignore.printStackTrace(); // Do nothing. } } return struct; } @Override public Object decode( Object struct, Map jo, boolean bRecursive ) { return this.decode( struct, jo, (Set) null, bRecursive ); } @Override public Object decode( Object struct, Map jo, Set exceptedKeys ) { return this.decode( struct, jo, exceptedKeys, false ); } @Override public Object decode( Object struct, Map jo ) { return this.decode( struct, jo, false ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/GenericStructJSONEncoder.java ================================================ package com.pinecone.framework.util.json.homotype; import java.io.IOException; import java.io.Writer; import java.lang.reflect.Field; import java.util.Set; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.json.GenericJSONEncoder; import com.pinecone.framework.util.json.JSON; import com.pinecone.framework.util.json.JSONEncoder; public class GenericStructJSONEncoder implements StructJSONEncoder { public GenericStructJSONEncoder() { } @Override public String valueJsonify( Object val ) { return JSON.stringify( val ); } @Override public void valueJsonify( Object val, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { JSONEncoder.BASIC_JSON_ENCODER.write( val, writer, nIndentFactor, nIndentBlankNum ); } @Override public String encode( Object struct, Set exceptedKeys, boolean bAllFields ) { Class klass = struct.getClass(); boolean includeSuperClass = klass.getClassLoader() != null; Field[] fields; if( bAllFields ) { fields = klass.getDeclaredFields(); } else { fields = includeSuperClass ? klass.getFields() : klass.getDeclaredFields(); } StringBuilder sb = new StringBuilder( "{" ); for( int i = 0; i < fields.length; ++i ) { try { Field field = fields[i]; field.setAccessible( true ); String key = field.getName(); if( !StringUtils.isEmpty( key ) ) { if( exceptedKeys != null && exceptedKeys.contains( key ) ) { continue; } Object val; try { val = field.get( struct ); sb.append( '\"' ).append( key ).append( "\":" ); } catch ( IllegalAccessException | IllegalArgumentException e ) { continue; } sb.append( this.valueJsonify( val ) ); sb.append( ',' ); } } catch ( Exception e ) { e.printStackTrace(); // Do nothing. } } if( sb.charAt( sb.length() - 1 ) == ',' ) { sb.deleteCharAt( sb.length() - 1 ); } return sb.append( '}' ).toString(); } @Override public String encode( Object struct ) { return this.encode( struct, (Set) null ); } @Override public void encode( Object struct, Writer writer, int nIndentFactor ) throws IOException { this.encode0( struct, writer, nIndentFactor, 0 ); } protected void encode0( Object struct, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException { Class klass = struct.getClass(); boolean includeSuperClass = klass.getClassLoader() != null; Field[] fields = includeSuperClass ? klass.getFields() : klass.getDeclaredFields(); writer.write( "{" ); boolean isFirst = true; for( int i = 0; i < fields.length; ++i ) { try { Field field = fields[i]; field.setAccessible( true ); String key = field.getName(); if( !StringUtils.isEmpty( key ) ) { int nNewIndent = nIndentBlankNum + nIndentFactor; if ( !isFirst ) { writer.write( "," ); } if ( nNewIndent > 0 ) { writer.write('\n'); } GenericJSONEncoder.indentBlank( writer, nNewIndent ); Object val; try { val = field.get( struct ); writer.write( "\"" + key + "\":" ); } catch ( IllegalAccessException | IllegalArgumentException e ) { continue; } this.valueJsonify( val, writer, nIndentFactor, nNewIndent ); isFirst = false; GenericJSONEncoder.indentBlank( writer, nIndentBlankNum ); } } catch ( Exception e ) { e.printStackTrace(); // Do nothing. } } if ( nIndentFactor > 0 ) { writer.write( '\n' ); } writer.write( "}" ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/JSONGet.java ================================================ package com.pinecone.framework.util.json.homotype; import java.lang.annotation.*; @Target({ElementType.FIELD}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface JSONGet { String value() default ""; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/JSONInjector.java ================================================ package com.pinecone.framework.util.json.homotype; import com.pinecone.framework.system.functions.Executable; import com.pinecone.framework.system.functions.Executor; import com.pinecone.framework.system.functions.Function; import com.pinecone.framework.system.homotype.HomoInjector; import com.pinecone.framework.util.ReflectionUtils; import com.pinecone.framework.util.json.*; import java.lang.reflect.Array; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.concurrent.Callable; public abstract class JSONInjector implements HomoInjector { public JSONInjector(){ } @Override public boolean isHomogeneity( Object that ) { return JSONInjector.trialHomogeneity( that ); } protected String getFieldName( Field field ){ return field.getName(); } @Override public Object inject( Object data ) throws IllegalArgumentException { return this.inject( data, true ); } @Override public Object inject ( Object that, Object instance ) throws Exception { return this.inject( that, that.getClass(), instance ); } @Override public Object inject ( Object that, Class stereotype, Object instance ) throws Exception { return this.inject( true, true, instance ); } public Object inject ( Object data, boolean bRecursive ) throws IllegalArgumentException { return this.inject( data, bRecursive, null ); } public Object inject ( Object data, boolean bRecursive, Object instance ) throws IllegalArgumentException { if ( data != null ) { if ( JSONInjector.trialHomogeneity( data ) ) { return data; } else if ( data instanceof Map) { return new JSONMaptron((Map)data); } else if ( data instanceof List) { return new JSONArraytron((List) data); } else if ( data instanceof Collection ) { return new JSONArraytron((Collection)data); } else if ( data instanceof JSONString ) { return ( (JSONString) data ).toJSONString(); } else { return this.javaObjectInject( data, bRecursive, instance ); } } return JSON.NULL; } public Object injectArray ( Object data, boolean bRecursive, JSONArray instance ) throws IllegalArgumentException { if( data.getClass().getComponentType().isPrimitive() ){ for ( int i = 0; i < Array.getLength(data); i++ ) { instance.put( this.inject( Array.get( data, i ), bRecursive ) ); } } else { for ( Object row : (Object[]) data ) { instance.put( this.inject( row, bRecursive ) ); } } return instance; } public Object injectObject ( Object data, boolean bRecursive, JSONObject instance ) throws IllegalArgumentException { Field[] fields = data.getClass().getDeclaredFields(); for ( Field field : fields ) { ReflectionUtils.makeAccessible( field ); String szKey = this.getFieldName( field ); if( szKey == null ) { continue; } Object value; try{ value = field.get( data ); } catch ( IllegalAccessException e ){ value = null; } instance.embed( this.getFieldName( field ), this.inject( value, bRecursive ) ); } return instance; } public Object javaObjectInject ( Object data, boolean bRecursive, Object instance ) throws IllegalArgumentException { if( data != null ){ if( data.getClass().isArray() ){ JSONArray array = (JSONArray) instance; if( instance == null ) { array = new JSONArraytron(); } return this.injectArray( data, bRecursive, array ); } else if( data.getClass().isEnum() ) { return data.toString(); } else if( data instanceof Method ){ throw new IllegalArgumentException( "Method cannot survive without its mother." ); } else if( data instanceof Runnable ){ return new Executor() { Runnable proto = (Runnable) data; public Runnable reveal(){ return this.proto; } @Override public void execute() throws Exception { this.proto.run(); } }; } else if( data instanceof Callable ){ return new Function() { Callable proto = (Callable) data; public Callable reveal(){ return this.proto; } @Override public Object invoke( Object... obj ) throws Exception { return this.proto.call(); } }; } else { JSONObject object = (JSONObject) instance; if( instance == null ) { object = new JSONMaptron(); } return this.injectObject( data, bRecursive, object ); } } return JSON.NULL; } public static Number inject( Number data ){ return data; } public static Boolean inject( Boolean data ){ return data; } public static String inject( String data ){ return data; } public static Executable inject( Executable data ){ return data; } public static JSONObject inject( JSONObject data ){ return data; } public static JSONArray inject( JSONArray data ){ return data; } public static boolean trialHomogeneity( Object that ) { return that instanceof Number || that instanceof Boolean || that instanceof String || that == JSON.NULL || that instanceof JSONObject || that instanceof JSONArray || that instanceof Executable; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/MapStructure.java ================================================ package com.pinecone.framework.util.json.homotype; import java.lang.annotation.*; @Target({ElementType.FIELD}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface MapStructure { String value() default ""; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/ObjectInjector.java ================================================ package com.pinecone.framework.util.json.homotype; import com.pinecone.framework.system.functions.Executable; import com.pinecone.framework.system.homotype.HomoInjector; import com.pinecone.framework.system.homotype.StereotypicInjector; import com.pinecone.framework.system.prototype.Prototype; import com.pinecone.framework.util.ReflectionUtils; import com.pinecone.framework.util.json.JSON; import com.pinecone.framework.util.json.JSONArray; import java.lang.reflect.Array; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.Set; import java.util.Map; import java.util.List; import java.util.Collection; import java.util.LinkedHashMap; import java.util.HashSet; import java.util.Queue; import java.util.LinkedList; import java.util.ArrayList; import java.util.concurrent.Callable; public abstract class ObjectInjector implements HomoInjector, StereotypicInjector { protected Class mType; protected String getFieldName( String szKey ){ return szKey; } public ObjectInjector( Class type ) { this.mType = type; } @SuppressWarnings( "unchecked" ) protected Object inject ( Object that, Class type ) { if ( ObjectInjector.trialHomogeneity( that ) && !type.isEnum() ){ return that; } else if( type == Object.class ){ return that; } else if( that instanceof Executable ){ return this.inject( (Executable) that ); } else if ( that instanceof Collection ){ return this.inject( (Collection) that, type ); } else if ( that instanceof Map ){ return this.inject( (Map) that, type ); } else if ( type.isEnum() ){ try{ return Enum.valueOf( (Class) type, that.toString() ); } catch ( RuntimeException e ) { return that; } } return that; } public static Collection newDefaultCollection( Class type ) { if( type == null || List.class.isAssignableFrom( type ) ){ return new ArrayList(); } else if( Set.class.isAssignableFrom( type ) ){ return new HashSet(); } else if( Queue.class.isAssignableFrom( type ) ){ return new LinkedList(); } else { return new ArrayList(); } } public Object injectArray ( Collection that, Class type, Object instance ) { Class innerType = type.getComponentType(); if( innerType.isPrimitive() ){ int i = 0; for ( Object o : that ) { Array.set( instance, i, this.inject( o, innerType ) ); ++i; } return instance; } else { Object[] objects = (Object[]) instance; int i = 0; for ( Object o : that ) { objects[ i ] = this.inject( o, innerType ); ++i; } return objects; } } public Object injectCollection ( Collection that, Class type, Collection instance ) { for( Object row : that ){ instance.add( this.inject( row, row.getClass() ) ); } return instance; } protected Object inject ( Collection that, Class type ) { if( type != null && type.isAssignableFrom( that.getClass() ) ){ return that; } else if( type == null || Collection.class.isAssignableFrom( type ) ){ Collection instance; if( type == null || type.isInterface() || Prototype.isAbstract( type ) ){ // Motherfucker condition... instance = ObjectInjector.newDefaultCollection( type ); } else { try{ instance = (Collection)type.getDeclaredConstructor().newInstance(); } catch ( InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e ) { instance = ObjectInjector.newDefaultCollection( type ); } } return this.injectCollection( that, type, instance ); } else if( type.isArray() ){ Class innerType = type.getComponentType(); Object instance = Array.newInstance( innerType, that.size() ); return this.injectArray( that, type, instance ); } return null; // WHat fuck could be ??? asking jesus... } public Object inject ( Collection that, Class type, Object instance ) { if( type == JSONArray.class || type == Object.class ){ return that; } else if( type == null || Collection.class.isAssignableFrom( type ) ){ return this.injectCollection( that, type, (Collection) instance ); } else if( type.isArray() ){ return this.injectArray( that, type, instance ); } return null; // WHat fuck could be ??? asking jesus... } protected Object inject ( Map that, Class type ) { if( type != null && type.isAssignableFrom( that.getClass() ) ){ return that; } else if( type == null || Map.class.isAssignableFrom( type ) ){ Map map; if( type == null || type.isInterface() || Prototype.isAbstract( type ) ){ // Motherfucker condition... map = new LinkedHashMap(); } else { try{ map = (Map)type.getDeclaredConstructor().newInstance(); } catch ( InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e ) { map = new LinkedHashMap(); } } for( Object row : that.entrySet() ){ Map.Entry kv = (Map.Entry) row; map.put( kv.getKey(), this.inject( kv.getValue(), type ) ); } return map; } else { Object obj; try{ Constructor constructor = type.getConstructor(); ReflectionUtils.makeAccessible( constructor ); obj = constructor.newInstance(); } catch ( NoSuchMethodException | InvocationTargetException | InstantiationException | IllegalAccessException e ) { return null; } return this.inject( that, obj ); } } public Object inject ( Map that, Class type, Object instance ) { for( Object row : that.entrySet() ){ Map.Entry kv = (Map.Entry) row; Field field; try{ field = type.getDeclaredField( this.getFieldName( kv.getKey().toString() ) ); } catch ( NoSuchFieldException e ){ try { field = type.getDeclaredField( kv.getKey().toString() ); } catch ( NoSuchFieldException e1 ){ field = null; } } if( field != null ){ ReflectionUtils.makeAccessible( field ); try { try { Object j = this.inject( kv.getValue() , field.getType() ); field.set( instance, j ); } catch ( IllegalArgumentException e ){ field = null; } } catch ( IllegalAccessException e ){ throw new IllegalStateException(e); // This should never be happened. } } } /* Field[] fields = type.getClass().getDeclaredFields(); for ( Field field : fields ) { ReflectionUtils.makeAccessible( field ); try { Object val = that.opt( this.getFieldName( field.getName() ) ); if( val == null ){ val = that.opt( field.getName() ); } try { Object j = this.inject( val , field.getType() ); field.set( type, j ); } catch ( IllegalArgumentException e ){ e.printStackTrace(); } } catch ( IllegalAccessException e ){ throw new IllegalStateException(e); // This should never be happened. } }*/ return instance; } public Object inject ( Map that, Object instance ) { if( this.mType != null ) { return this.typeInject( that, instance ); } return this.inject( that, instance.getClass(), instance ); } public Object typeInject ( Map that, Object instance ) { return this.inject( that, this.mType, instance ); } public Object inject ( Collection that ){ return this.inject( that, this.mType ); } public Object inject ( Map that ){ return this.inject( that, this.mType ); } public Object inject ( Executable data ) { Method fn; try { fn = data.getClass().getMethod( "reveal" ); try { return fn.invoke( data ); } catch ( Exception e ){ throw new IllegalArgumentException( "Executable `reveal` function should never be modified.", e ); // What fuck was that, did you modified it ? } } catch ( NoSuchMethodException e ){ return data; } } public Number inject ( Number data ){ return data; } public Boolean inject ( Boolean data ){ return data; } public String inject ( String data ){ return data; } public Runnable inject ( Runnable data ){ return data; } public Callable inject ( Callable data ){ return data; } public Method inject ( Method data ){ return data; } @Override public Object inject ( Object that ){ return this.inject( that, this.mType ); } @Override public Object inject ( Object that, Object instance ) throws Exception { if( this.mType != null ) { return this.inject( that, this.mType, instance ); } return this.inject( that, instance.getClass(), instance ); } @Override @SuppressWarnings( "unchecked" ) public Object inject ( Object that, Class type, Object instance ) throws Exception { if ( ObjectInjector.trialHomogeneity( that ) && !type.isEnum() ){ return that; } else if( type == Object.class ){ return that; } else if( that instanceof Executable){ return this.inject( (Executable) that ); } else if ( that instanceof Collection ){ return this.inject( (Collection) that, type, instance ); } else if ( that instanceof Map ){ return this.inject( (Map) that, type, instance ); } else if ( type.isEnum() ){ try{ return Enum.valueOf( (Class) type, that.toString() ); } catch ( RuntimeException e ) { return that; } } return that; } @Override public boolean isHomogeneity ( Object that ){ return ObjectInjector.trialHomogeneity( that ); } @Override public Class getStereotype() { return this.mType; } @Override public void setStereotype( Class type ) { this.mType = type; } public static boolean trialHomogeneity( Object that ) { return that instanceof Number || that instanceof Boolean || that instanceof String || that == JSON.NULL || that instanceof Callable || that instanceof Runnable || that instanceof Method; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/StructJSONDecoder.java ================================================ package com.pinecone.framework.util.json.homotype; import java.util.Map; import java.util.Set; import com.pinecone.framework.system.prototype.Pinenut; public interface StructJSONDecoder extends Pinenut { StructJSONDecoder BasicDecoder = new GenericStructJSONDecoder(); static boolean trialHomogeneity( Object that ) { return JSONInjector.trialHomogeneity( that ) || that instanceof Map; } Object decode( Object struct, Map jo, Set exceptedKeys, boolean bRecursive ); Object decode( Object struct, Map jo, boolean bRecursive ) ; Object decode( Object struct, Map jo, Set exceptedKeys ); Object decode( Object struct, Map jo ) ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/StructJSONEncoder.java ================================================ package com.pinecone.framework.util.json.homotype; import java.io.IOException; import java.io.Writer; import java.util.Set; import com.pinecone.framework.system.prototype.Pinenut; public interface StructJSONEncoder extends Pinenut { StructJSONEncoder BasicEncoder = new GenericStructJSONEncoder(); String encode( Object struct ); String encode( Object struct, Set exceptedKeys, boolean bAllFields ); default String encode( Object struct, Set exceptedKeys ) { return this.encode( struct, exceptedKeys, false ); } default String encode( Object struct, boolean bAllFields ) { return this.encode( struct, null, bAllFields ); } void encode( Object struct, Writer writer, int nIndentFactor ) throws IOException; default void encode( Object struct, Writer writer ) throws IOException { this.encode( struct, writer, 0 ); } void valueJsonify( Object val, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException; String valueJsonify( Object val ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/WrappedBeanColonist.java ================================================ package com.pinecone.framework.util.json.homotype; public class WrappedBeanColonist extends ArchBeanColonist { public WrappedBeanColonist() { super(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ArchClassScopeLoader.java ================================================ package com.pinecone.framework.util.lang; import com.pinecone.framework.system.ProvokeHandleException; import com.pinecone.framework.util.name.MultiNamespace; import com.pinecone.framework.util.name.Name; import java.util.ArrayList; import java.util.List; public abstract class ArchClassScopeLoader implements MultiClassScopeLoader { protected ClassScope mClassScope; protected ClassLoader mClassLoader; protected List mIncludeFilters; protected List mExcludeFilters; protected ArchClassScopeLoader( ClassScope classScope, ClassLoader classLoader ) { this.mClassScope = classScope; this.mClassLoader = classLoader; this.mIncludeFilters = new ArrayList<>(); this.mExcludeFilters = new ArrayList<>(); } @Override public void addIncludeFilter( ClassFilter includeFilter ) { this.mIncludeFilters.add(includeFilter); } @Override public void addExcludeFilter( ClassFilter excludeFilter ) { this.mExcludeFilters.add(0, excludeFilter); } @Override public void resetFilters( boolean useDefaultFilters ) { this.mIncludeFilters.clear(); this.mExcludeFilters.clear(); if ( useDefaultFilters ) { this.registerDefaultFilters(); } } protected void registerDefaultFilters() { } protected boolean filter( Class clazz ) { for( ClassFilter filter : this.mIncludeFilters ) { if( !filter.match( clazz, this ) ){ return true; } } for( ClassFilter filter : this.mExcludeFilters ) { if( filter.match( clazz, this ) ){ return true; } } return false; } @Override public Class load( Name name ) throws ClassNotFoundException { return (Class) this.loads0( name, true ); } @Override public List loads( Name name ) { try{ return (List) this.loads0( name, false ); } catch ( ClassNotFoundException e ) { return null; // This should never be happened. } } protected List expandNamespace( Name name ) { if( name instanceof MultiNamespace) { return ((MultiNamespace) name).getFullNames(); } return List.of( name.getFullName() ) ; } protected abstract Class loadSingleByFullClassName( String szFullClassName ); protected Object loads0( Name name, boolean bOnlyFirst ) throws ClassNotFoundException { List > batch = null; if( !bOnlyFirst ) { batch = new ArrayList<>(); } List ns = this.expandNamespace( name ); for ( ScopedPackage scope : this.mClassScope.getAllScopes() ) { String className = scope.packageName() + NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR; for( String szNS : ns ) { String szCN = className + szNS; Class ste = this.loadSingleByFullClassName( szCN ); if( bOnlyFirst ) { if( ste == null ) { throw new ClassNotFoundException( "Servgram class not found: " + szCN ); } return ste; } else { if( ste != null ) { batch.add( ste ); } } } } return batch; } protected void handleIgnoreException( Exception e ) throws ProvokeHandleException { // Just ignore them. } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ArchClassScopeSet.java ================================================ package com.pinecone.framework.util.lang; import java.util.ArrayList; import java.util.List; import java.util.Set; public abstract class ArchClassScopeSet implements ClassScope { protected Set mScopes; protected ClassLoader mClassLoader; protected ArchClassScopeSet( Set scope, ClassLoader classLoader ) { this.mScopes = scope; this.mClassLoader = classLoader; } @Override public void addScope( String szPackageName ) { ScopedPackage pkg = ScopedPackage.defaultInstance( szPackageName, this.mClassLoader ); this.mScopes.add( pkg ); } @Override public void addScope( ScopedPackage scope ) { this.mScopes.add( scope ); } @Override public void removeScope( String szPackageName ) { ScopedPackage that = this.getPackageByName( szPackageName ); if( that != null ) { this.mScopes.remove( that ); } } @Override public void removeScope( ScopedPackage scope ) { this.mScopes.remove( scope ); } @Override public boolean containsScope( String szPackageName ) { return this.getPackageByName( szPackageName ) != null; } @Override public boolean containsScope( ScopedPackage scope ) { return this.mScopes.contains( scope ); } @Override public ScopedPackage getPackageByName( String szPackageName ) { for( ScopedPackage pkg : this.mScopes ) { if( pkg.packageName().equals( szPackageName ) ){ return pkg; } } return null; } @Override public List getAllScopes() { return new ArrayList<>( this.mScopes ); } @Override public List getAllNameScopes() { List list = new ArrayList<>(); for( ScopedPackage pkg : this.mScopes ) { list.add( pkg.packageName() ); } return list; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ArchDynamicFactory.java ================================================ package com.pinecone.framework.util.lang; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.HashMap; import java.util.Map; public abstract class ArchDynamicFactory implements DynamicFactory { protected static final Map, Class > PrimitiveToWrapper = new HashMap<>(); static { ArchDynamicFactory.PrimitiveToWrapper.put( boolean.class, Boolean.class ); ArchDynamicFactory.PrimitiveToWrapper.put( byte.class, Byte.class ); ArchDynamicFactory.PrimitiveToWrapper.put( char.class, Character.class ); ArchDynamicFactory.PrimitiveToWrapper.put( double.class, Double.class ); ArchDynamicFactory.PrimitiveToWrapper.put( float.class, Float.class ); ArchDynamicFactory.PrimitiveToWrapper.put( int.class, Integer.class ); ArchDynamicFactory.PrimitiveToWrapper.put( long.class, Long.class ); ArchDynamicFactory.PrimitiveToWrapper.put( short.class, Short.class ); ArchDynamicFactory.PrimitiveToWrapper.put( void.class, Void.class ); } protected ClassLoader mClassLoader ; protected ClassScope mClassScope ; protected ArchDynamicFactory( ClassLoader classLoader, ClassScope classScope ) { this.mClassLoader = classLoader ; this.mClassScope = classScope ; } @Override public ClassLoader getClassLoader() { return this.mClassLoader; } @Override public ClassScope getClassScope() { return this.mClassScope; } protected Object beforeInstantiate( Class that, Class[] stereotypes, Object[] args ) { return null; } @Override public Object newInstance ( Class that, Class[] stereotypes, Object[] args ) throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException { Object s = this.beforeInstantiate( that, stereotypes, args ); if ( s != null ) { return s; } Constructor[] constructors = that.getConstructors(); boolean bUsingSetAccess = false; if( constructors.length == 0 ) { constructors = that.getDeclaredConstructors(); bUsingSetAccess = true; } for ( Constructor constructor : constructors ) { Class[] paramTypes = constructor.getParameterTypes(); int nArgsLength = 0; if( args != null ) { nArgsLength = args.length; } if ( paramTypes.length == nArgsLength ) { boolean matches = true; for ( int i = 0; i < paramTypes.length; ++i ) { if( stereotypes != null ) { if ( !paramTypes[i].isAssignableFrom( stereotypes[i] ) ) { matches = false; break; } } else { Class paramType = paramTypes[i]; if ( !paramType.isInstance( args[i] ) ) { if( paramType.isPrimitive() ) { Class wrapperType = ArchDynamicFactory.PrimitiveToWrapper.get( paramType ); if ( wrapperType != null && wrapperType.isInstance( args[i] ) ) { continue; } } matches = false; break; } } } if ( matches ) { if( bUsingSetAccess ) { try{ return constructor.newInstance( args ); } catch ( IllegalAccessException e ) { constructor.setAccessible( true ); Object ins = constructor.newInstance( args ); constructor.setAccessible( false ); return ins; } } else { return constructor.newInstance( args ); } } } } return null; } @Override public Object loadInstance( String szClassFullName, Class[] stereotypes, Object[] args ) throws ClassNotFoundException, InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException { Class clazz = this.mClassLoader.loadClass( szClassFullName ); return this.newInstance( clazz, stereotypes, args ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ArchMultiProtocolNamespaceFetcher.java ================================================ package com.pinecone.framework.util.lang; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.List; public abstract class ArchMultiProtocolNamespaceFetcher implements NamespaceCollector { protected List mCollectors; protected ClassLoader mClassLoader; protected PathNamespaceCollectum mFileAdapter; protected PathNamespaceCollectum mJarAdapter; public ArchMultiProtocolNamespaceFetcher ( List collectors, ClassLoader classLoader ) { this.mCollectors = collectors; this.mClassLoader = classLoader; for( PathNamespaceCollectum collectum : collectors ) { if( collectum.matched( NamespaceCollector.KEY_FILE_PROTOCOL ) ) { this.mFileAdapter = collectum; } else if( collectum.matched( NamespaceCollector.KEY_JAR_PROTOCOL ) ) { this.mJarAdapter = collectum; } } } public ArchMultiProtocolNamespaceFetcher ( List collectors ) { this( collectors, Thread.currentThread().getContextClassLoader() ); } public ArchMultiProtocolNamespaceFetcher ( PathNamespaceCollectum fileAdapter, PathNamespaceCollectum jarAdapter, ClassLoader classLoader ) { this( new ArrayList<>(), classLoader ); this.mFileAdapter = fileAdapter; this.mJarAdapter = jarAdapter; } @Override public ClassLoader getClassLoader() { return this.mClassLoader; } @Override public void fetch ( String szNSName, List collections, boolean bCollectChildPackage ) { this.fetch0( szNSName, collections, bCollectChildPackage ); } @Override public String fetchFirst( String szNSName ) { return this.fetch0( szNSName, null, false ); } public String fetch0 ( String szNSName, List collections, boolean bCollectChildPackage ) { String packagePath = szNSName.replace ( NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR, NamespaceCollector.RESOURCE_NAME_SEPARATOR ); URL url = this.mClassLoader.getResource ( packagePath ); if ( url == null ) { if( this.mClassLoader instanceof URLClassLoader ) { String first = this.fetchByURLs( ((URLClassLoader) this.mClassLoader).getURLs(), szNSName, packagePath, collections, bCollectChildPackage ); if( collections == null ) { return first; } } else { return null; } } if( collections != null ) { this.fetch( url, szNSName, collections, bCollectChildPackage ); } else { return this.fetchFirst( url, szNSName ); } return null; } public String fetchByURLs( URL[] urls, String szNSName, String szPackagePath, List collections, boolean bCollectChildPackage ) { if ( urls != null ) { for ( int i = 0; i < urls.length; i++ ) { URL url = urls[i]; String urlPath = url.getPath(); if ( urlPath.endsWith( "classes/" ) ) { continue; } String jarPath = urlPath + "!/" + szPackagePath; //List subList = UnitUtils.spawnExtendParent( collections ); if( collections != null ) { this.mJarAdapter.collect( jarPath, szNSName, collections, bCollectChildPackage ); } else { return this.mJarAdapter.collectFirst( jarPath, szNSName ); } //classNames.addAll( subList ); } } return null; } @Override public void fetch( URL url, String szNSName, List collections, boolean bCollectChildPackage ) { String protocol = url.getProtocol (); if ( protocol.equals ( NamespaceCollector.KEY_FILE_PROTOCOL ) ) { this.mFileAdapter.collect ( url.getPath (), szNSName, collections, bCollectChildPackage ); } else if ( protocol.equals ( NamespaceCollector.KEY_JAR_PROTOCOL ) ) { this.mJarAdapter.collect ( url.getPath (), szNSName, collections, bCollectChildPackage ); } else { for( PathNamespaceCollectum collectum : this.mCollectors ) { if( collectum.matched( protocol ) ) { collectum.collect( url.getPath (), szNSName, collections, bCollectChildPackage ); } } } } @Override public String fetchFirst( URL url, String szNSName ) { String protocol = url.getProtocol (); if ( protocol.equals ( NamespaceCollector.KEY_FILE_PROTOCOL ) ) { return this.mFileAdapter.collectFirst ( url.getPath (), szNSName ); } else if ( protocol.equals ( NamespaceCollector.KEY_JAR_PROTOCOL ) ) { return this.mJarAdapter.collectFirst ( url.getPath (), szNSName ); } else { for( PathNamespaceCollectum collectum : this.mCollectors ) { if( collectum.matched( protocol ) ) { return collectum.collectFirst( url.getPath (), szNSName ); } } } return null; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ClassCandidateScanner.java ================================================ package com.pinecone.framework.util.lang; public class ClassCandidateScanner extends ObjectCandidateScanner { public ClassCandidateScanner ( ClassScope searchScope, ClassLoader classLoader, NSProtocolIteratorsFactoryAdapter iteratorsFactory ) { super( searchScope, classLoader, iteratorsFactory ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ClassFilter.java ================================================ package com.pinecone.framework.util.lang; import com.pinecone.framework.system.prototype.Pinenut; public interface ClassFilter extends Pinenut { boolean match( Class clazz, ClassScopeLoader loader ) ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ClassNameFetcher.java ================================================ package com.pinecone.framework.util.lang; import java.util.List; public class ClassNameFetcher extends ArchMultiProtocolNamespaceFetcher implements NamespaceCollector { protected PathNamespaceCollectum mFileAdapter; protected PathNamespaceCollectum mJarAdapter; public ClassNameFetcher ( List collectors, ClassLoader classLoader ) { super( collectors, classLoader ); } public ClassNameFetcher ( List collectors ) { super( collectors ); } public ClassNameFetcher ( PathNamespaceCollectum fileAdapter, PathNamespaceCollectum jarAdapter, ClassLoader classLoader ) { super( fileAdapter, jarAdapter, classLoader ); } public ClassNameFetcher ( ClassLoader classLoader ) { this( new FileClassCollectorAdapter(), new JarClassCollectorAdapter(), classLoader ); } public ClassNameFetcher () { this( Thread.currentThread().getContextClassLoader() ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ClassScanner.java ================================================ package com.pinecone.framework.util.lang; import com.pinecone.framework.util.lang.iterator.NamespaceIterator; import java.io.IOException; import java.util.List; public interface ClassScanner extends ObjectScanner { void addIncludeFilter ( TypeFilter filter ); void addExcludeFilter ( TypeFilter filter ); void addIterator ( NamespaceIterator classIter, NamespaceIterator packageIter ) ; void scan( String szNSName, boolean bCollectChildPackage, List candidates ) throws IOException ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ClassScope.java ================================================ package com.pinecone.framework.util.lang; import com.pinecone.framework.system.prototype.Pinenut; import java.util.List; public interface ClassScope extends Pinenut { void addScope ( String szPackageName ); void addScope ( ScopedPackage scope ); void removeScope ( String szPackageName ); void removeScope ( ScopedPackage scope ); boolean containsScope ( String szPackageName ); boolean containsScope ( ScopedPackage scope ); ScopedPackage getPackageByName ( String szPackageName ); List getAllScopes (); List getAllNameScopes (); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ClassScopeLoader.java ================================================ package com.pinecone.framework.util.lang; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.name.Name; public interface ClassScopeLoader extends Pinenut { Class load ( Name simpleName ) throws ClassNotFoundException ; void clearCache(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ClassScopeNSProtocolIteratorsFactory.java ================================================ package com.pinecone.framework.util.lang; public class ClassScopeNSProtocolIteratorsFactory extends GenericScopeNSProtocolIteratorsFactory { public ClassScopeNSProtocolIteratorsFactory( ClassLoader classLoader, ClassScope searchScope ) { super( classLoader, searchScope, ".class" ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/DynamicFactory.java ================================================ package com.pinecone.framework.util.lang; import com.pinecone.framework.system.prototype.Factory; import com.pinecone.framework.util.name.Name; import java.lang.reflect.InvocationTargetException; import java.util.List; public interface DynamicFactory extends Factory { DynamicFactory DefaultFactory = new GenericDynamicFactory(); @Override ClassLoader getClassLoader(); ClassScope getClassScope(); Object loadInstance ( String szClassFullName, Class[] stereotypes, Object[] args ) throws ClassNotFoundException, InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException; Object newInstance ( Class that, Class[] stereotypes, Object[] args ) throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException; // No exception, but with null. default Object optNewInstance ( Class that, Class[] stereotypes, Object[] args ) { try{ return this.newInstance( that, stereotypes, args ); } catch ( InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) { return null; } } // No exception, but with null. default Object optLoadInstance ( String szClassFullName, Class[] stereotypes, Object[] args ) { try{ return this.loadInstance( szClassFullName, stereotypes, args ); } catch ( ClassNotFoundException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) { return null; } } // No exception, but with null. default Object optLoadInstance ( Name classFullName, Class[] stereotypes, Object[] args ) { return this.optLoadInstance( classFullName.getFullName(), stereotypes, args ); } // No exception, but with null. default Object optNewInstance ( Class that, Object[] args ) { return this.optNewInstance( that, null, args ); } // No exception, but with null. default Object optLoadInstance ( String szClassFullName, Object[] args ) { return this.optLoadInstance( szClassFullName, null, args ); } // No exception, but with null. default Object optLoadInstance ( Name classFullName, Object[] args ) { return this.optLoadInstance( classFullName.getFullName(), args ); } // No exception, but with null. default Object optLoadInstanceFromScope ( String szClassSimpleName, Class[] stereotypes, Object[] args ) { ClassScope scope = this.getClassScope(); List nss = scope.getAllNameScopes(); for( String ns : nss ) { if( !ns.endsWith( "." ) ) { ns = ns + "."; } Object neo = this.optLoadInstance( ns + szClassSimpleName, stereotypes, args ); if( neo != null ){ return neo; } } return null; } default Object optLoadInstanceFromScope ( String szClassSimpleName, Object[] args ) { return this.optLoadInstance( szClassSimpleName, null, args ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/FileClassCollectorAdapter.java ================================================ package com.pinecone.framework.util.lang; import java.io.File; import java.util.List; public class FileClassCollectorAdapter implements PathNamespaceCollectum { @Override public boolean matched ( String szProtocol ) { return szProtocol.toLowerCase().equals( NamespaceCollector.KEY_FILE_PROTOCOL ); } @Override public void collect ( String szResourcePath, String szNSName, List classNames, boolean bCollectChildren ) { this.collect0( szResourcePath, szNSName, classNames, bCollectChildren ); } @Override public String collectFirst ( String szResourcePath, String szNSName ) { return this.collect0( szResourcePath, szNSName, null, false ); } protected String collect0 ( String szResourcePath, String szNSName, List classNames, boolean bCollectChildren ) { File file = new File( szResourcePath ); File[] childFiles = file.listFiles (); if( childFiles != null ) { String szPackageNamePathFmt = szNSName.replace( NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR, File.separator ); for ( File childFile : childFiles ) { if ( childFile.isDirectory () ) { if ( bCollectChildren && classNames != null ) { this.collect ( childFile.getPath (), szNSName, classNames, bCollectChildren ); } } else { String childFilePath = childFile.getPath (); if ( childFilePath.endsWith ( ".class" ) ) { String szPackageSegment = childFilePath.substring( childFilePath.indexOf( szPackageNamePathFmt ) ); String szChildPackage = szPackageSegment.replace( File.separator, NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR ).replace( ".class", "" ); if( classNames == null ) { return szChildPackage; } else { classNames.add ( szChildPackage ); } } } } } return null; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/FilePackageCollectorAdapter.java ================================================ package com.pinecone.framework.util.lang; import java.io.File; import java.util.List; public class FilePackageCollectorAdapter implements PathNamespaceCollectum { @Override public boolean matched( String szProtocol ) { return szProtocol.toLowerCase().equals( NamespaceCollector.KEY_FILE_PROTOCOL ); } @Override public void collect ( String szResourcePath, String szNSName, List packageNames, boolean bCollectChildren ) { this.collect0( szResourcePath, szNSName, packageNames, bCollectChildren ); } @Override public String collectFirst ( String szResourcePath, String szNSName ) { return this.collect0( szResourcePath, szNSName, null, false ); } protected String collect0 ( String szResourcePath, String szNSName, List packageNames, boolean bCollectChildren ) { File file = new File ( szResourcePath ); File[] childFiles = file.listFiles (); if( childFiles != null ) { String szPackageNamePathFmt = szNSName.replace( NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR, File.separator ); for ( File childFile : childFiles ) { if ( childFile.isDirectory () ) { String szPackageSegment = childFile.getPath ().substring( childFile.getPath ().indexOf( szPackageNamePathFmt ) ); String szChildPackage = szPackageSegment.replace( File.separator, NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR ); if( packageNames == null ) { return szChildPackage; } else { packageNames.add ( szChildPackage ); } if ( bCollectChildren ) { this.collect ( childFile.getPath (), szPackageSegment, packageNames, bCollectChildren ); } } } } return null; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/GenericClassScopeSet.java ================================================ package com.pinecone.framework.util.lang; import java.util.LinkedHashSet; import java.util.Set; public class GenericClassScopeSet extends ArchClassScopeSet { public GenericClassScopeSet( Set scope, ClassLoader classLoader ) { super( scope, classLoader ); } public GenericClassScopeSet( ClassLoader classLoader ) { this( new LinkedHashSet<>(), classLoader ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/GenericDynamicFactory.java ================================================ package com.pinecone.framework.util.lang; public class GenericDynamicFactory extends ArchDynamicFactory { public GenericDynamicFactory( ClassLoader classLoader, ClassScope classScope ) { super( classLoader, classScope ); } public GenericDynamicFactory( ClassLoader classLoader ) { this( classLoader, new GenericClassScopeSet( classLoader ) ); } public GenericDynamicFactory() { this( Thread.currentThread().getContextClassLoader() ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/GenericScopeNSProtocolIteratorsFactory.java ================================================ package com.pinecone.framework.util.lang; import java.io.IOException; import java.net.URL; import java.util.Enumeration; import java.util.List; import com.pinecone.framework.util.lang.iterator.DirectoryFileIterator; import com.pinecone.framework.util.lang.iterator.DirectoryPackageIterator; import com.pinecone.framework.util.lang.iterator.JarFileIterator; import com.pinecone.framework.util.lang.iterator.JarPackageIterator; public class GenericScopeNSProtocolIteratorsFactory implements NSProtocolIteratorsFactoryAdapter { protected ClassLoader mClassLoader ; protected ClassScope mSearchScope ; protected String mszSuffix ; public GenericScopeNSProtocolIteratorsFactory( ClassLoader classLoader, ClassScope searchScope, String szSuffix ) { this.mClassLoader = classLoader; this.mSearchScope = searchScope; this.mszSuffix = szSuffix; } protected NamespaceIteratorPair newIteratorPair ( URL url, String szNSName ) throws IOException { String protocol = url.getProtocol (); if ( protocol.equals ( NamespaceCollector.KEY_FILE_PROTOCOL ) ) { return new NamespaceIteratorPair( new DirectoryFileIterator( url.getPath (), szNSName, this.mszSuffix ), new DirectoryPackageIterator( url.getPath (), szNSName, this.mszSuffix ) ); } else if ( protocol.equals ( NamespaceCollector.KEY_JAR_PROTOCOL ) ) { return new NamespaceIteratorPair( new JarFileIterator( url.getPath (), this.mszSuffix ), new JarPackageIterator( url.getPath (), this.mszSuffix ) ); } return null; } @Override public void prepareScopeIterators ( String szNSName, List pairs ) throws IOException { List scope = null; if( this.mSearchScope != null ) { scope = this.mSearchScope.getAllScopes(); for( ScopedPackage pkg : scope ) { this.prepareIterators( pkg.packageName() + NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR + szNSName, pairs ); } } if( this.mSearchScope == null || scope.isEmpty() ) { this.prepareIterators( szNSName, pairs ); } } @Override public void prepareIterators ( String szNSName, List pairs ) throws IOException { String packagePath = szNSName.replace ( NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR, NamespaceCollector.RESOURCE_NAME_SEPARATOR ); Enumeration resources = this.mClassLoader.getResources( packagePath ); if ( !resources.hasMoreElements() ) { return; } while ( resources.hasMoreElements() ) { URL url = resources.nextElement(); NamespaceIteratorPair pair = this.newIteratorPair( url, szNSName ); if( pair != null ) { pairs.add( pair ); } } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/InnerMetadataReader.java ================================================ package com.pinecone.framework.util.lang; public class InnerMetadataReader implements MetadataReader { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/JarClassCollectorAdapter.java ================================================ package com.pinecone.framework.util.lang; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.util.lang.iterator.JarEntryIterator; import java.io.IOException; import java.util.Enumeration; import java.util.List; import java.util.jar.JarEntry; public class JarClassCollectorAdapter implements PathNamespaceCollectum { @Override public boolean matched( String szProtocol ) { return szProtocol.toLowerCase().equals( NamespaceCollector.KEY_JAR_PROTOCOL ); } @Override public void collect ( String szResourcePath, String szPackageName, List classNames, boolean bCollectChildren ) { this.collect0( szResourcePath, szPackageName, classNames, bCollectChildren ); } @Override public String collectFirst ( String szResourcePath, String szPackageName ) { return this.collect0( szResourcePath, szPackageName, null, false ); } public String collect0 ( String szResourcePath, String szPackageName, List classNames, boolean bCollectChildren ) { try { JarEntryIterator iterator = new JarEntryIterator( szResourcePath, ".class" ); Enumeration entries = iterator.entries (); String packagePath = iterator.getPackagePath(); String classesScopePath = iterator.getClassesScopePath(); while ( entries.hasMoreElements () ) { JarEntry jarEntry = entries.nextElement (); String entryName = jarEntry.getName (); if ( entryName.endsWith ( ".class" ) ) { if ( bCollectChildren && classNames != null ) { // [@Harald Notice] No need for recursion, for JAR files, this flag is usually processed in a tiled manner if ( entryName.startsWith ( packagePath ) ) { entryName = entryName.replace ( NamespaceCollector.RESOURCE_NAME_SEPARATOR, "." ).substring ( 0, entryName.lastIndexOf ( "." ) ); classNames.add ( entryName ); } } else { int index = entryName.lastIndexOf ( NamespaceCollector.RESOURCE_NAME_SEPARATOR ); String myPackagePath; if ( index != -1 ) { myPackagePath = entryName.substring ( 0, index ); } else { myPackagePath = entryName; } boolean bQualified = false; if( classesScopePath == null ) { if( myPackagePath.equals( packagePath ) ) { bQualified = true; } } else { if ( myPackagePath.startsWith( classesScopePath ) && myPackagePath.endsWith( packagePath ) ) { bQualified = true; } } if ( bQualified ) { entryName = JarUtils.normalizeJarClassName( entryName, classesScopePath ); if( classNames == null ) { return entryName; } else { classNames.add ( entryName ); } } } } } } catch ( IOException e ) { throw new ProxyProvokeHandleException( e ); } return null; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/JarPackageCollectorAdapter.java ================================================ package com.pinecone.framework.util.lang; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.lang.iterator.JarEntryIterator; import java.io.IOException; import java.util.Enumeration; import java.util.List; import java.util.jar.JarEntry; public class JarPackageCollectorAdapter implements PathNamespaceCollectum { @Override public boolean matched( String szProtocol ) { return szProtocol.toLowerCase().equals( NamespaceCollector.KEY_JAR_PROTOCOL ); } @Override public void collect ( String szResourcePath, String szPackageName, List packageNames, boolean bCollectChildren ) { this.collect0( szResourcePath, szPackageName, packageNames, bCollectChildren ); } @Override public String collectFirst ( String szResourcePath, String szNSName ) { return this.collect0( szResourcePath, szNSName, null, false ); } public String collect0 ( String szResourcePath, String szNSName, List packageNames, boolean bCollectChildren ) { try { JarEntryIterator iterator = new JarEntryIterator( szResourcePath, ".class" ); Enumeration entries = iterator.entries (); String packagePath = iterator.getPackagePath(); String classesScopePath = iterator.getClassesScopePath(); while ( entries.hasMoreElements () ) { JarEntry jarEntry = entries.nextElement (); String entryName = jarEntry.getName (); if( jarEntry.isDirectory() ) { if( classesScopePath != null && entryName.startsWith( classesScopePath ) ) { entryName = entryName.replace( classesScopePath, "" ); } if ( bCollectChildren && packageNames != null ) { // [@Harald Notice] No need for recursion, for JAR files, this flag is usually processed in a tiled manner if ( entryName.startsWith ( packagePath ) && !entryName.equals( packagePath + NamespaceCollector.RESOURCE_NAME_SEPARATOR ) ) { entryName = entryName.replace ( NamespaceCollector.RESOURCE_NAME_SEPARATOR, NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR ); entryName = entryName.substring ( 0, entryName.lastIndexOf ( NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR ) ); packageNames.add ( entryName ); } } else { //Debug.trace( jarEntry.getName(),packagePath ); if ( entryName.startsWith ( packagePath ) ) { String childSegment = entryName.substring ( packagePath.length() ); if( StringUtils.countOccurrencesOf( childSegment, NamespaceCollector.RESOURCE_NAME_SEPARATOR, 3 ) > 2 ) { continue; } if( entryName.equals( packagePath + NamespaceCollector.RESOURCE_NAME_SEPARATOR ) ) { // Self path continue; } entryName = entryName.replace ( NamespaceCollector.RESOURCE_NAME_SEPARATOR, NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR ); entryName = entryName.substring ( 0, entryName.lastIndexOf ( NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR ) ); if( packageNames == null ) { return entryName; } else { packageNames.add ( entryName ); } } } } } } catch ( IOException e ) { throw new ProxyProvokeHandleException( e ); } return null; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/JarUtils.java ================================================ package com.pinecone.framework.util.lang; import java.io.IOException; import java.util.Enumeration; import java.util.Iterator; import java.util.LinkedList; import java.util.jar.JarEntry; import java.util.jar.JarInputStream; public final class JarUtils { public static Enumeration fetchEnumeration( JarInputStream jarInputStream ) throws IOException { LinkedList buf = new LinkedList<>(); JarEntry jarEntry; while ( ( jarEntry = jarInputStream.getNextJarEntry() ) != null ) { buf.add( jarEntry ); jarInputStream.closeEntry(); } return new Enumeration() { private Iterator iterator = buf.iterator(); @Override public boolean hasMoreElements() { return this.iterator.hasNext(); } @Override public JarEntry nextElement() { return this.iterator.next(); } }; } public static String normalizeJarClassName( String entryName, String classesScopePath ) { if( classesScopePath != null && entryName.startsWith( classesScopePath ) ) { entryName = entryName.replace( classesScopePath, "" ); } return entryName.replace( NamespaceCollector.RESOURCE_NAME_SEPARATOR, NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR ).substring( 0, entryName.lastIndexOf( NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR_C ) ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/LazyScopedPackage.java ================================================ package com.pinecone.framework.util.lang; import java.util.ArrayList; import java.util.List; public class LazyScopedPackage implements ScopedPackage { protected String mszPackageName; protected ClassLoader mClassLoader; protected NamespaceCollector mPackageCollector; protected NamespaceCollector mClassCollector; protected ScopedPackage mParent; protected List mChildren; protected Package mPackage; public LazyScopedPackage( String packageName, ScopedPackage parent, ClassLoader classLoader, NamespaceCollector packageCollector, NamespaceCollector classCollector ) { this.mParent = parent; this.mszPackageName = packageName; this.mClassLoader = classLoader; this.mPackageCollector = packageCollector; this.mClassCollector = classCollector; } public LazyScopedPackage( String packageName, ScopedPackage parent, ClassLoader classLoader ) { this( packageName, parent, classLoader, null, null ); this.mPackageCollector = new PackageNameFetcher( this.mClassLoader ); } public LazyScopedPackage( String packageName, ScopedPackage parent ) { this( packageName, parent, Thread.currentThread().getContextClassLoader() ); } public LazyScopedPackage( String packageName, ClassLoader classLoader ) { this( packageName, null, classLoader ); } public LazyScopedPackage( String packageName ) { this( packageName, (ScopedPackage) null ); } @Override public String parentName() { int lastDotIndex = mszPackageName.lastIndexOf('.'); if ( lastDotIndex == -1 ) { return null; } return mszPackageName.substring( 0, lastDotIndex ); } @Override public ScopedPackage parent() { if( this.mParent == null ) { String parentName = this.parentName(); if ( parentName == null ) { return null; } this.mParent = new LazyScopedPackage( parentName, null, this.mClassLoader, this.mPackageCollector, this.mClassCollector ); } return this.mParent; } @Override public List children() { if( this.mChildren == null ) { this.mChildren = new ArrayList<>(); List namesList = this.getPackageCollector().fetch( this.packageName(), false ); for( String name : namesList ) { this.mChildren.add( new LazyScopedPackage( name, this, this.mClassLoader, this.mPackageCollector, this.mClassCollector ) ); } } return this.mChildren; } @Override public List fetchChildrenNames() { List children = this.mChildren; if( children == null ) { children = this.children(); } List namesList = new ArrayList<>(); for( ScopedPackage scopedPackage : children ) { namesList.add( scopedPackage.packageName() ); } return namesList; } @Override public List fetchChildrenClassNames() { if( this.mClassCollector == null ) { this.mClassCollector = new ClassNameFetcher( this.getClassLoader() ); } return this.mClassCollector.fetch( this.packageName(), false ); } @Override public String fetchFirstClassName() { if( this.mClassCollector == null ) { this.mClassCollector = new ClassNameFetcher( this.getClassLoader() ); } return this.mClassCollector.fetchFirst( this.packageName() ); } @Override public String packageName() { return mszPackageName; } @Override public ClassLoader getClassLoader() { return this.mClassLoader; } @Override public NamespaceCollector getPackageCollector() { return this.mPackageCollector; } @Override public boolean hasLoaded() { if( this.mPackage != null ) { return true; } Package pkg = this.getClassLoader().getDefinedPackage( this.packageName() ); if ( pkg != null ) { this.mPackage = pkg; return true; } return false; } @Override public Package tryLoad() { if( this.mPackage != null ) { return this.mPackage; } String szFirstClass = this.fetchFirstClassName(); if( szFirstClass != null ) { try{ Class cls = this.mClassLoader.loadClass( szFirstClass ); if( cls == null ) { return null; } this.mPackage = this.getPackage(); } catch ( Exception e ) { this.mPackage = null; } } return this.mPackage; } @Override public Package getPackage() { if( this.mPackage == null ) { this.mPackage = this.getClassLoader().getDefinedPackage( this.packageName() ); } return this.mPackage; } @Override public String toString() { return this.mszPackageName; } @Override public String toJSONString() { return "\"" + this.toString() + "\""; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/MetadataReader.java ================================================ package com.pinecone.framework.util.lang; import com.pinecone.framework.system.prototype.Pinenut; public interface MetadataReader extends Pinenut { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/MultiClassScopeLoader.java ================================================ package com.pinecone.framework.util.lang; import com.pinecone.framework.util.name.Name; import java.util.List; public interface MultiClassScopeLoader extends ClassScopeLoader { List loads( Name name ) ; void addIncludeFilter( ClassFilter includeFilter ) ; void addExcludeFilter( ClassFilter excludeFilter ) ; void resetFilters ( boolean useDefaultFilters ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/NSProtocolIteratorsFactoryAdapter.java ================================================ package com.pinecone.framework.util.lang; import com.pinecone.framework.system.prototype.Pinenut; import java.io.IOException; import java.util.List; public interface NSProtocolIteratorsFactoryAdapter extends Pinenut { void prepareScopeIterators ( String szNSName, List pairs ) throws IOException; void prepareIterators ( String szNSName, List pairs ) throws IOException; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/NamespaceCollector.java ================================================ package com.pinecone.framework.util.lang; import com.pinecone.framework.system.prototype.Pinenut; import java.net.URL; import java.util.ArrayList; import java.util.List; public interface NamespaceCollector extends Pinenut { String KEY_FILE_PROTOCOL = "file"; String KEY_JAR_PROTOCOL = "jar"; String RESOURCE_NAME_SEPARATOR = "/"; String JAVA_PKG_CLASS_SEPARATOR = "."; char JAVA_PKG_CLASS_SEPARATOR_C = '.'; default List fetch ( String szNSName ) { return this.fetch( szNSName, true ); } default List fetch ( String szNSName, boolean bCollectChildPackage ) { List list = new ArrayList<>(); this.fetch( szNSName, list,bCollectChildPackage ); return list; } default void fetch ( String szNSName, List collections ) { this.fetch( szNSName, collections, true ); } void fetch ( String szNSName, List collections, boolean bCollectChildPackage ) ; String fetchFirst ( String szNSName ) ; default List fetch ( URL url, String szNSName ) { return this.fetch( url, szNSName, true ); } default List fetch ( URL url, String szNSName, boolean bCollectChildPackage ) { List list = new ArrayList<>(); this.fetch( url, szNSName, list,bCollectChildPackage ); return list; } default void fetch ( URL url, String szNSName, List collections ) { this.fetch( url, szNSName, collections, true ); } void fetch ( URL url, String szNSName, List collections, boolean bCollectChildPackage ) ; String fetchFirst ( URL url, String szNSName ) ; ClassLoader getClassLoader(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/NamespaceIteratorPair.java ================================================ package com.pinecone.framework.util.lang; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.lang.iterator.NamespaceIterator; public class NamespaceIteratorPair implements Pinenut { public NamespaceIterator classIter; public NamespaceIterator packageIter; NamespaceIteratorPair( NamespaceIterator classIter, NamespaceIterator packageIter ) { this.classIter = classIter; this.packageIter = packageIter; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ObjectCandidateScanner.java ================================================ package com.pinecone.framework.util.lang; import java.io.IOException; import java.util.ArrayList; import java.util.List; import com.pinecone.framework.util.lang.iterator.NamespaceIterator; public class ObjectCandidateScanner implements ClassScanner { protected ClassLoader mClassLoader ; protected ClassScope mSearchScope ; protected List mIncludeFilters ; protected List mExcludeFilters ; protected List mIterators ; protected NSProtocolIteratorsFactoryAdapter mIteratorsFactory ; public ObjectCandidateScanner ( ClassScope searchScope, ClassLoader classLoader, NSProtocolIteratorsFactoryAdapter iteratorsFactory ) { this.mSearchScope = searchScope ; this.mClassLoader = classLoader ; this.mIncludeFilters = new ArrayList<>() ; this.mExcludeFilters = new ArrayList<>() ; this.mIterators = new ArrayList<>() ; this.mIteratorsFactory = iteratorsFactory ; } public ObjectCandidateScanner ( ClassScope searchScope, ClassLoader classLoader ) { this( searchScope, classLoader, new ClassScopeNSProtocolIteratorsFactory( classLoader, searchScope ) ); } @Override public void addIncludeFilter ( TypeFilter filter ) { this.mIncludeFilters.add( filter ); } @Override public void addExcludeFilter ( TypeFilter filter ) { this.mExcludeFilters.add( filter ); } @Override public void addIterator ( NamespaceIterator classIter, NamespaceIterator packageIter ) { this.addIterator( new NamespaceIteratorPair( classIter, packageIter ) ); } protected void addIterator ( NamespaceIteratorPair iteratorPair ) { this.mIterators.add( iteratorPair ); } @Override public void scan( String szNSName, boolean bCollectChildPackage, List candidates ) throws IOException { if ( this.mIterators.isEmpty() ) { this.mIteratorsFactory.prepareScopeIterators( szNSName, this.mIterators ); } this.scan0( this.mIterators, bCollectChildPackage, candidates ); } protected void scan0( List pairs, boolean bCollectChildPackage, List candidates ) throws IOException { for ( NamespaceIteratorPair pair : pairs ) { NamespaceIterator classIter = pair.classIter; NamespaceIterator pkgIter = pair.packageIter; while ( classIter.hasNext() ) { String szClassName = classIter.next(); if ( !this.filter( szClassName ) ) { candidates.add( szClassName ); } } if ( bCollectChildPackage ) { while ( pkgIter.hasNext() ) { String szPackageName = pkgIter.next(); List chridren = new ArrayList<>(); this.mIteratorsFactory.prepareIterators( szPackageName, chridren ); if ( !chridren.isEmpty() ) { this.scan0( chridren, bCollectChildPackage, candidates ); } } } } // Clear cache, to prevent next unexpected iteration. pairs.clear(); } protected boolean filter( String szClassName ) { try { for ( TypeFilter filter : this.mIncludeFilters ) { if ( filter.match( szClassName, null ) ) { return false; } } for ( TypeFilter filter : this.mExcludeFilters ) { if ( filter.match( szClassName, null ) ) { return true; } } } catch ( IOException e ) { return true; } return false; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ObjectScanner.java ================================================ package com.pinecone.framework.util.lang; import java.io.IOException; import java.util.List; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.lang.iterator.NamespaceIterator; public interface ObjectScanner extends Pinenut { void addIncludeFilter ( TypeFilter filter ); void addExcludeFilter ( TypeFilter filter ); void addIterator ( NamespaceIterator classIter, NamespaceIterator packageIter ) ; void scan( String szNSName, boolean bCollectChildPackage, List candidates ) throws IOException; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/PackageNameFetcher.java ================================================ package com.pinecone.framework.util.lang; import java.util.List; public class PackageNameFetcher extends ArchMultiProtocolNamespaceFetcher implements NamespaceCollector { protected PathNamespaceCollectum mFileAdapter; protected PathNamespaceCollectum mJarAdapter; public PackageNameFetcher ( List collectors, ClassLoader classLoader ) { super( collectors, classLoader ); } public PackageNameFetcher ( List collectors ) { super( collectors ); } public PackageNameFetcher ( PathNamespaceCollectum fileAdapter, PathNamespaceCollectum jarAdapter, ClassLoader classLoader ) { super( fileAdapter, jarAdapter, classLoader ); } public PackageNameFetcher ( ClassLoader classLoader ) { this( new FilePackageCollectorAdapter(), new JarPackageCollectorAdapter(), classLoader ); } public PackageNameFetcher () { this( Thread.currentThread().getContextClassLoader() ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/PathNamespaceCollectum.java ================================================ package com.pinecone.framework.util.lang; import com.pinecone.framework.system.prototype.Pinenut; import java.util.List; public interface PathNamespaceCollectum extends Pinenut { boolean matched ( String szProtocol ); void collect ( String szResourcePath, String szPackageName, List classNames, boolean bCollectChildren ); String collectFirst ( String szResourcePath, String szPackageName ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ScopedPackage.java ================================================ package com.pinecone.framework.util.lang; import com.pinecone.framework.system.prototype.Pinenut; import java.util.List; public interface ScopedPackage extends Pinenut, Comparable { String parentName() ; ScopedPackage parent() ; List children() ; List fetchChildrenNames() ; String packageName() ; ClassLoader getClassLoader() ; NamespaceCollector getPackageCollector() ; List fetchChildrenClassNames(); String fetchFirstClassName(); Package getPackage(); boolean hasLoaded(); Package tryLoad(); static ScopedPackage defaultInstance( String packageName, ClassLoader classLoader ) { return new LazyScopedPackage( packageName, classLoader ); } static ScopedPackage defaultInstance( String packageName ) { return new LazyScopedPackage( packageName ); } @Override default int compareTo( ScopedPackage o ){ return this.packageName().compareTo( o.packageName() ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/TypeFilter.java ================================================ package com.pinecone.framework.util.lang; import com.pinecone.framework.system.prototype.Pinenut; import java.io.IOException; public interface TypeFilter extends Pinenut { boolean match( String szClassName, Object pool ) throws IOException; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/iterator/ArchJarEntryIterator.java ================================================ package com.pinecone.framework.util.lang.iterator; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.lang.JarUtils; import com.pinecone.framework.util.lang.NamespaceCollector; import java.io.IOException; import java.util.LinkedList; import java.util.Deque; import java.util.Enumeration; import java.util.NoSuchElementException; import java.util.jar.JarEntry; import java.util.jar.JarFile; import java.util.jar.JarInputStream; import java.util.zip.ZipEntry; public abstract class ArchJarEntryIterator implements Pinenut { protected JarFile mJarFile; protected Enumeration mEntries; protected String mPackagePath; protected String mClassesScopePath = null; // e.g. BOOT-INF/lib/ protected JarEntry mCurrentEntry; protected String mszSuffix ; public ArchJarEntryIterator( String szResourcePath, String szSuffix ) throws IOException { this.mszSuffix = szSuffix; String[] jarInfo = szResourcePath.split ( "!" ); String jarFilePath = jarInfo[0].substring ( jarInfo[0].indexOf ( NamespaceCollector.RESOURCE_NAME_SEPARATOR ) ); this.mJarFile = new JarFile( jarFilePath ); boolean bUsingFile = true; String packagePath = szResourcePath; if( jarInfo.length > 1 ) { String szCurrentFragName = jarInfo[1].substring(1); if( jarInfo.length > 2 && szCurrentFragName.endsWith(".jar") ) { ZipEntry zipEntry = this.mJarFile.getEntry( szCurrentFragName ); if( zipEntry == null ) { throw new IOException( "Illegal resource path: " + szResourcePath ); } Deque streamStack = new LinkedList<>(); JarInputStream jarInputStream = new JarInputStream( this.mJarFile.getInputStream( zipEntry ) ); streamStack.addFirst( jarInputStream ); try{ if( jarInfo.length > 3 ) { for ( int i = 2; i < jarInfo.length - 1; ++i ) { if ( !jarInfo[i].toLowerCase().endsWith(".jar") ) { break; } szCurrentFragName = jarInfo[i].substring(1); JarEntry jarEntry; while ( ( jarEntry = jarInputStream.getNextJarEntry() ) != null ) { String szJarEntryName = jarEntry.getName(); if ( !jarEntry.isDirectory() && szJarEntryName.equals( szCurrentFragName ) ) { jarInputStream = new JarInputStream( this.mJarFile.getInputStream( jarEntry ) ); streamStack.addFirst( jarInputStream ); break; } jarInputStream.closeEntry(); } } } bUsingFile = false; // [@Harald Notice] Using a temporary enumeration to prevent unexpected resource leaks. this.mEntries = JarUtils.fetchEnumeration( jarInputStream ); } finally { JarInputStream t; // [@Harald Notice] All `JarInputStream` should be close and release in the nested scenario. while ( ( t = streamStack.peek() ) != null ) { t.close(); streamStack.pop(); } } } else{ if( jarInfo.length != 2 ) { this.mClassesScopePath = szCurrentFragName + "/"; } } packagePath = jarInfo[ jarInfo.length - 1 ].substring ( 1 ); } this.mPackagePath = packagePath; if( bUsingFile ) { this.mEntries = this.mJarFile.entries(); } this.skipEntries(); } public boolean hasNext() { return this.mCurrentEntry != null; } public Object next() { if ( !this.hasNext() ) { throw new NoSuchElementException(); } return this.mCurrentEntry; } public String getClassesScopePath() { return this.mClassesScopePath; } public String getPackagePath() { return this.mPackagePath; } public Enumeration entries() { return this.mEntries; } protected abstract void skipEntries() ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/iterator/DirectoryFileIterator.java ================================================ package com.pinecone.framework.util.lang.iterator; import com.pinecone.framework.util.lang.NamespaceCollector; import java.io.File; import java.util.NoSuchElementException; import java.util.Objects; import java.util.function.Consumer; public class DirectoryFileIterator implements NamespaceIterator { protected File mFile ; protected File[] mChildFiles ; protected String mNSNamePathFmt ; protected String mszSuffix ; protected int mCursor = 0; protected int mLastRet = -1; public DirectoryFileIterator( String szResourcePath, String szNSName, String szSuffix ) { this.mszSuffix = szSuffix; this.mFile = new File( szResourcePath ); this.mChildFiles = this.mFile.listFiles(); this.mNSNamePathFmt = szNSName.replace( NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR, File.separator ); if( this.mChildFiles == null ) { this.mChildFiles = new File[0]; } this.skipEntities(); } @Override public boolean hasNext() { return this.mCursor < this.mChildFiles.length; } protected String replacePathName( String sz ) { String szPackageSegment = sz.substring( sz.indexOf( this.mNSNamePathFmt ) ); return szPackageSegment.replace( File.separator, NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR ).replace( this.mszSuffix, "" ); } public String next() { if ( !this.hasNext() ) { throw new NoSuchElementException(); } this.mLastRet = this.mCursor; String childFilePath = this.mChildFiles[ this.mCursor++ ].getPath(); this.skipEntities(); return this.replacePathName( childFilePath ); } @Override public void forEachRemaining( Consumer action ) { Objects.requireNonNull(action); final int size = this.mChildFiles.length; int i = this.mCursor; if ( i < size ) { for ( ; i < size ; i++ ) { if( this.sift( this.mChildFiles[i] ) ) { continue; } action.accept( this.mChildFiles[i].getPath() ); } this.mCursor = i; this.mLastRet = i - 1; } } protected boolean sift( File file ) { return file.isDirectory() && ! file.getPath().endsWith ( this.mszSuffix ); } protected void skipEntities() { while ( this.mCursor < this.mChildFiles.length && this.sift( this.mChildFiles[ this.mCursor ] ) ) { ++this.mCursor; } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/iterator/DirectoryPackageIterator.java ================================================ package com.pinecone.framework.util.lang.iterator; import java.io.File; import com.pinecone.framework.util.lang.NamespaceCollector; public class DirectoryPackageIterator extends DirectoryFileIterator { public DirectoryPackageIterator( String szResourcePath, String szNSName, String szSuffix ) { super( szResourcePath, szNSName, szSuffix ); } @Override protected String replacePathName( String sz ) { String szPackageSegment = sz.substring( sz.indexOf( this.mNSNamePathFmt ) ); return szPackageSegment.replace( File.separator, NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR ); } @Override protected boolean sift( File file ) { return !file.isDirectory() ; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/iterator/JarEntryIterator.java ================================================ package com.pinecone.framework.util.lang.iterator; import java.io.IOException; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Objects; import java.util.function.Consumer; import java.util.jar.JarEntry; public class JarEntryIterator extends ArchJarEntryIterator implements Iterator { public JarEntryIterator( String szResourcePath, String szSuffix ) throws IOException { super( szResourcePath, szSuffix ); } @Override public boolean hasNext() { return this.mEntries.hasMoreElements(); } @Override public JarEntry next() { if ( !this.hasNext() ) { throw new NoSuchElementException(); } this.mCurrentEntry = this.mEntries.nextElement(); return this.mCurrentEntry; } @Override public void forEachRemaining( Consumer action ) { Objects.requireNonNull( action ); while ( this.hasNext() ) { action.accept( this.next() ); } } @Override protected void skipEntries() { } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/iterator/JarFileIterator.java ================================================ package com.pinecone.framework.util.lang.iterator; import com.pinecone.framework.util.lang.JarUtils; import com.pinecone.framework.util.lang.NamespaceCollector; import java.io.IOException; import java.util.NoSuchElementException; import java.util.Objects; import java.util.function.Consumer; import java.util.jar.JarEntry; public class JarFileIterator extends ArchJarEntryIterator implements NamespaceIterator { public JarFileIterator( String szResourcePath, String szSuffix ) throws IOException { super( szResourcePath, szSuffix ); } @Override public boolean hasNext() { return this.mCurrentEntry != null; } @Override public String next() { if ( !this.hasNext() ) { throw new NoSuchElementException(); } String entryName = this.mCurrentEntry.getName(); String className = JarUtils.normalizeJarClassName( entryName, this.mClassesScopePath ); this.skipEntries(); return className; } @Override public void forEachRemaining( Consumer action ) { Objects.requireNonNull( action ); while ( this.hasNext() ) { action.accept( this.next() ); } } @Override protected void skipEntries() { while ( this.mEntries.hasMoreElements() ) { JarEntry entry = this.mEntries.nextElement(); String entryName = entry.getName(); //Debug.trace( entryName ); if ( entryName.endsWith( this.mszSuffix ) ) { int index = entryName.lastIndexOf( NamespaceCollector.RESOURCE_NAME_SEPARATOR ); String myPackagePath; if ( index == -1 ) { myPackagePath = entryName; } else { myPackagePath = entryName.substring( 0, index ); } if( this.mClassesScopePath == null ) { if ( myPackagePath.equals( this.mPackagePath ) ) { this.mCurrentEntry = entry; return; } } else { if ( myPackagePath.startsWith( this.mClassesScopePath ) && myPackagePath.endsWith( this.mPackagePath ) ) { this.mCurrentEntry = entry; return; } } } } this.mCurrentEntry = null; // No more valid entries } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/iterator/JarPackageIterator.java ================================================ package com.pinecone.framework.util.lang.iterator; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.lang.NamespaceCollector; import java.io.IOException; import java.util.NoSuchElementException; import java.util.jar.JarEntry; public class JarPackageIterator extends JarFileIterator { public JarPackageIterator( String szResourcePath, String szSuffix ) throws IOException { super( szResourcePath, szSuffix ); } @Override protected void skipEntries() { while ( this.mEntries.hasMoreElements() ) { JarEntry entry = this.mEntries.nextElement(); String entryName = entry.getName(); if ( entry.isDirectory() ) { if( this.mClassesScopePath != null && entryName.startsWith( this.mClassesScopePath ) ) { entryName = entryName.replace( this.mClassesScopePath, "" ); } if ( entryName.startsWith( this.mPackagePath ) ) { String childSegment = entryName.substring( this.mPackagePath.length() ); if( StringUtils.countOccurrencesOf( childSegment, NamespaceCollector.RESOURCE_NAME_SEPARATOR, 3 ) > 2 ) { continue; } if( entryName.equals( this.mPackagePath + NamespaceCollector.RESOURCE_NAME_SEPARATOR ) ) { // Self path continue; } this.mCurrentEntry = entry; return; } } } this.mCurrentEntry = null; // No more valid entries } @Override public String next() { if ( !this.hasNext() ) { throw new NoSuchElementException(); } String entryName = this.mCurrentEntry.getName(); if( this.mClassesScopePath != null ) { if( entryName.startsWith( this.mClassesScopePath ) ) { entryName = entryName.replace( this.mClassesScopePath, "" ); } } String packageName = entryName.replace( NamespaceCollector.RESOURCE_NAME_SEPARATOR, NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR ).substring( 0, entryName.length() - 1 ); this.skipEntries(); return packageName; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/iterator/NamespaceIterator.java ================================================ package com.pinecone.framework.util.lang.iterator; import com.pinecone.framework.system.prototype.Pinenut; import java.util.Iterator; public interface NamespaceIterator extends Iterator, Pinenut { boolean hasNext(); String next(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lock/ReentrantReadWriteSpinLock.java ================================================ package com.pinecone.framework.util.lock; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; public class ReentrantReadWriteSpinLock implements ReadWriteLock { private final AtomicInteger mutexSignal = new AtomicInteger( 0 ); private static final int WRITE_LOCK_MASK = 1 << 16; private Thread writingThread = null; private int writeReentrantCount = 0; private final ThreadLocal readReentrantCount = ThreadLocal.withInitial(() -> 0); private final Lock readLock = new ReadLock(); private final Lock writeLock = new WriteLock(); @Override public Lock readLock() { return this.readLock; } @Override public Lock writeLock() { return this.writeLock; } private class ReadLock implements Lock { @Override public void lock() { while ( !this.tryLock() ) { } } @Override public boolean tryLock() { int currentState = mutexSignal.get(); if ( ( currentState & WRITE_LOCK_MASK ) != 0 && writingThread != Thread.currentThread() ) { return false; } if ( mutexSignal.compareAndSet( currentState, currentState + 1 ) ) { readReentrantCount.set( readReentrantCount.get() + 1 ); return true; } return false; } @Override public void lockInterruptibly() throws InterruptedException { while ( !this.tryLock() ) { if ( Thread.interrupted() ) { throw new InterruptedException( "Thread was interrupted while attempting to acquire read lock." ); } } } @Override public boolean tryLock(long time, TimeUnit unit) throws InterruptedException { long deadline = System.nanoTime() + unit.toNanos(time); while ( !this.tryLock() ) { if ( Thread.interrupted() ) { throw new InterruptedException( "Thread was interrupted while attempting to acquire read lock." ); } if ( System.nanoTime() > deadline ) { return false; } } return true; } @Override public void unlock() { if (readReentrantCount.get() <= 0) { throw new IllegalMonitorStateException( "Read lock not held by current thread." ); } readReentrantCount.set( readReentrantCount.get() - 1 ); mutexSignal.decrementAndGet(); } @Override public Condition newCondition() { throw new UnsupportedOperationException(); } } private class WriteLock implements Lock { @Override public void lock() { Thread currentThread = Thread.currentThread(); if ( writingThread == currentThread ) { ++writeReentrantCount; return; } while ( true ) { int currentState = mutexSignal.get(); if ( currentState == 0 ) { if ( mutexSignal.compareAndSet( 0, WRITE_LOCK_MASK ) ) { writingThread = currentThread; writeReentrantCount = 1; break; } } } } @Override public boolean tryLock() { Thread currentThread = Thread.currentThread(); if ( writingThread == currentThread ) { writeReentrantCount++; return true; } if ( mutexSignal.compareAndSet(0, WRITE_LOCK_MASK) ) { writingThread = currentThread; writeReentrantCount = 1; return true; } return false; } @Override public void lockInterruptibly() throws InterruptedException { while ( !this.tryLock() ) { if ( Thread.interrupted() ) { throw new InterruptedException("Thread was interrupted while attempting to acquire write lock"); } } } @Override public boolean tryLock(long time, TimeUnit unit) throws InterruptedException { long deadline = System.nanoTime() + unit.toNanos(time); while ( !this.tryLock() ) { if ( Thread.interrupted() ) { throw new InterruptedException("Thread was interrupted while attempting to acquire write lock"); } if ( System.nanoTime() > deadline ) { return false; } } return true; } @Override public void unlock() { if ( writingThread != Thread.currentThread() ) { throw new IllegalMonitorStateException("Write lock not held by current thread"); } if ( --writeReentrantCount == 0 ) { writingThread = null; mutexSignal.set(0); } } @Override public Condition newCondition() { throw new UnsupportedOperationException(); } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lock/ReentrantSpinLock.java ================================================ package com.pinecone.framework.util.lock; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; public class ReentrantSpinLock implements Lock { private final AtomicBoolean mMutexSignal = new AtomicBoolean( false ); private Thread mOwningThread = null; private int mnReentrantCount = 0; @Override public void lock() { Thread currentThread = Thread.currentThread(); if ( currentThread == this.mOwningThread ) { ++this.mnReentrantCount; return; } while ( !this.mMutexSignal.compareAndSet( false, true ) ) { } this.mOwningThread = currentThread; this.mnReentrantCount = 1; } @Override public void unlock() { Thread currentThread = Thread.currentThread(); if ( currentThread != this.mOwningThread ) { return; //throw new IllegalMonitorStateException( "Calling thread has not locked this lock" ); } --this.mnReentrantCount; if ( this.mnReentrantCount == 0 ) { this.mOwningThread = null; this.mMutexSignal.set( false ); } } @Override public boolean tryLock() { Thread currentThread = Thread.currentThread(); if ( currentThread == this.mOwningThread ) { ++this.mnReentrantCount; return true; } if ( this.mMutexSignal.compareAndSet( false, true ) ) { this.mOwningThread = currentThread; this.mnReentrantCount = 1; return true; } return false; } @Override public void lockInterruptibly() throws InterruptedException { Thread currentThread = Thread.currentThread(); if ( currentThread == this.mOwningThread ) { ++this.mnReentrantCount; return; } while ( !this.mMutexSignal.compareAndSet( false, true ) ) { if ( Thread.interrupted() ) { throw new InterruptedException(); } } this.mOwningThread = currentThread; this.mnReentrantCount = 1; } @Override public boolean tryLock(long time, java.util.concurrent.TimeUnit unit) throws InterruptedException { long endTime = System.nanoTime() + unit.toNanos(time); Thread currentThread = Thread.currentThread(); if ( currentThread == this.mOwningThread ) { this.mnReentrantCount++; return true; } while ( !this.mMutexSignal.compareAndSet( false, true ) ) { if ( System.nanoTime() > endTime ) { return false; } if ( Thread.interrupted() ) { throw new InterruptedException(); } } this.mOwningThread = currentThread; this.mnReentrantCount = 1; return true; } @Override public Condition newCondition() { throw new UnsupportedOperationException( "ReentrantSpinLock does not support conditions." ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lock/SpinLock.java ================================================ package com.pinecone.framework.util.lock; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; public class SpinLock implements Lock { private final AtomicBoolean mMutexSignal = new AtomicBoolean( false ); @Override public void lock() { while ( !this.mMutexSignal.compareAndSet(false, true) ) { } } @Override public void unlock() { this.mMutexSignal.set(false); } @Override public boolean tryLock() { return this.mMutexSignal.compareAndSet( false, true ); } @Override public Condition newCondition() { throw new UnsupportedOperationException( "SpinLock does not support conditions." ); } @Override public void lockInterruptibly() throws InterruptedException { while ( !this.mMutexSignal.compareAndSet( false, true ) ) { if (Thread.currentThread().isInterrupted()) { throw new InterruptedException(); } } } @Override public boolean tryLock( long time, java.util.concurrent.TimeUnit unit ) throws InterruptedException { long endTime = System.nanoTime() + unit.toNanos( time ); while ( !this.mMutexSignal.compareAndSet(false, true) ) { if ( System.nanoTime() > endTime ) { return false; } if ( Thread.currentThread().isInterrupted() ) { throw new InterruptedException(); } } return true; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/math/BigNumberMath.java ================================================ package com.pinecone.framework.util.math; import com.pinecone.Pinecone; import java.math.BigDecimal; public abstract class BigNumberMath { private PrecisionHolder precision; protected int mp = 0; private int lastPrecision = 0; private int stp = 0; protected BigNumberMath(PrecisionHolder precision) { this.precision = precision; } protected int precision() { if (this.mp != 0) { return this.lastPrecision + this.mp; } else { this.lastPrecision = this.precision.getPrecision(); this.stp = this.lastPrecision / 5; return this.lastPrecision; } } protected void applyForCachePrecision() { this.mp += this.stp; } protected void clearCachePrecision() { this.mp -= this.stp; if (this.mp < 0) { this.mp = 0; } } public static BigNumberMath getDefaultBigNumberMath(PrecisionHolder precision) { return new BigNumberMathAchieve(precision); } public static BigNumberMath getDefaultBigNumberMath(int precision) { PrecisionHolder precisionHolder = new PrecisionHolder(){ @Override public int getPrecision() { return precision; } }; return new BigNumberMathAchieve(precisionHolder); } public static BigNumberMath getDefaultBigNumberMath() { PrecisionHolder precisionHolder = new PrecisionHolder(){ @Override public int getPrecision() { return Pinecone.FLOAT_ACCURACY; } }; return new BigNumberMathAchieve(precisionHolder); } public abstract BigDecimal sin(BigDecimal decimal); public abstract BigDecimal cos(BigDecimal decimal); public abstract BigDecimal tan(BigDecimal decimal); public abstract BigDecimal asin(BigDecimal decimal); public abstract BigDecimal acos(BigDecimal decimal); public abstract BigDecimal atan(BigDecimal decimal); public abstract BigDecimal pow(BigDecimal decimal, BigDecimal decimal2); public abstract BigDecimal pow(double var1, double var2); public abstract BigDecimal sqrt(BigDecimal decimal); public abstract BigDecimal cbrt(BigDecimal decimal); public abstract BigDecimal root(BigDecimal decimal, BigDecimal decimal2); public abstract BigDecimal log10(BigDecimal decimal); public abstract BigDecimal log(BigDecimal decimal, BigDecimal decimal2); public abstract BigDecimal ln(BigDecimal decimal); public abstract BigDecimal exp(BigDecimal decimal); public abstract BigDecimal sinh(BigDecimal decimal); public abstract BigDecimal cosh(BigDecimal decimal); public abstract BigDecimal tanh(BigDecimal decimal); public abstract BigDecimal asinh(BigDecimal decimal); public abstract BigDecimal acosh(BigDecimal decimal); public abstract BigDecimal atanh(BigDecimal decimal); public abstract BigDecimal deg(BigDecimal decimal); public abstract BigDecimal rad(BigDecimal decimal); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/math/BigNumberMathAchieve.java ================================================ package com.pinecone.framework.util.math; import java.math.BigDecimal; import java.math.BigInteger; final class BigNumberMathAchieve extends BigNumberMath { private static BigDecimal E = new BigDecimal("2.7182818284590452353602874713526624977572470936999595749669676277240766"); private static BigDecimal PI = new BigDecimal("3.1415926535897932384626433832795028841971693993751058209749445923078164"); private static BigDecimal PI2; private static BigDecimal bim; private static BigDecimal B180; private static BigDecimal N1; private static BigDecimal B2; static BigDecimal[] PREC_CACHE; static { PI2 = PI.multiply(BigDecimal.valueOf(2L)); bim = BigDecimal.valueOf(2147483647L); B180 = BigDecimal.valueOf(180L); N1 = BigDecimal.valueOf(-1L); B2 = BigDecimal.valueOf(2L); PREC_CACHE = new BigDecimal[101]; BigDecimal current = BigDecimal.ONE; for(int i = 0; i < PREC_CACHE.length; ++i) { PREC_CACHE[i] = current; current = current.divide(BigDecimal.TEN); } } BigNumberMathAchieve(PrecisionHolder precision) { super(precision); } public BigDecimal sin(BigDecimal x) { int precision = super.precision(); x = x.remainder(PI2); BigDecimal P = this.ofPrecision(precision); boolean neg = x.compareTo(BigDecimal.ZERO) == -1; if (neg) { x = x.abs(); } BigDecimal result = BigDecimal.ONE; BigDecimal cons = x.subtract(PI.divide(BigDecimal.valueOf(2L), precision + 2, 6)).pow(2); BigDecimal curf = BigDecimal.valueOf(2L); BigDecimal curr = cons; int i = 3; boolean n = true; while(true) { BigDecimal temp; if (n) { temp = result.subtract(curr.divide(curf, precision + 2, 6)); } else { temp = result.add(curr.divide(curf, precision + 2, 6)); } if (result.subtract(temp).abs().compareTo(P) != 1) { result = temp; if (neg) { result = temp.negate(); } return result.setScale(precision, 6); } result = temp; curr = curr.multiply(cons).setScale(precision + 2, 6); n = !n; curf = curf.multiply(BigDecimal.valueOf((long)(i++ * i++))); } } public BigDecimal cos(BigDecimal x) { int precision = super.precision(); x = x.remainder(PI2); BigDecimal P = this.ofPrecision(precision); x = x.abs(); BigDecimal result = BigDecimal.ONE; BigDecimal cons = x.pow(2); BigDecimal curf = BigDecimal.valueOf(2L); BigDecimal curr = cons; int i = 3; boolean n = true; while(true) { BigDecimal temp; if (n) { temp = result.subtract(curr.divide(curf, precision + 2, 6)); } else { temp = result.add(curr.divide(curf, precision + 2, 6)); } if (result.subtract(temp).abs().compareTo(P) != 1) { return temp.setScale(precision, 6); } result = temp; curr = curr.multiply(cons).setScale(precision + 2, 6); n = !n; curf = curf.multiply(BigDecimal.valueOf((long)(i++ * i++))); } } public BigDecimal tan(BigDecimal x) { int precision = super.precision(); super.applyForCachePrecision(); BigDecimal result = this.sin(x).divide(this.cos(x), precision, 6); super.clearCachePrecision(); return result.setScale(precision); } public BigDecimal asin(BigDecimal x) { if (x.abs().compareTo(BigDecimal.ONE) == 1) { try { throw new Exception("Illegal input of asin(x)"); } catch ( Exception e ) { e.printStackTrace(); } } int precision = super.precision(); super.applyForCachePrecision(); BigDecimal P = this.ofPrecision(precision); BigDecimal result = BigDecimal.valueOf(Math.asin(x.doubleValue())); while(true) { BigDecimal temp = result.subtract(this.sin(result).subtract(x).divide(this.cos(result), precision + 3, 6)); if (result.subtract(temp).abs().compareTo(P) != 1) { super.clearCachePrecision(); return temp.setScale(precision, 6); } result = temp; } } public BigDecimal acos(BigDecimal x) { if (x.abs().compareTo(BigDecimal.ONE) == 1) { try { throw new Exception("Illegal input of acos(x)"); } catch (Exception var6) { var6.printStackTrace(); } } int precision = super.precision(); super.applyForCachePrecision(); BigDecimal P = this.ofPrecision(precision); BigDecimal result = BigDecimal.valueOf(Math.acos(x.doubleValue())); while(true) { BigDecimal temp = result.subtract(this.cos(result).subtract(x).divide(this.sin(result).negate(), precision + 3, 6)); if (result.subtract(temp).abs().compareTo(P) != 1) { super.clearCachePrecision(); return temp.setScale(precision, 6); } result = temp; } } public BigDecimal atan(BigDecimal x) { int precision = super.precision(); super.applyForCachePrecision(); BigDecimal P = this.ofPrecision(precision); BigDecimal result = BigDecimal.valueOf(Math.atan(x.doubleValue())); while(true) { BigDecimal temp = result.subtract(this.tan(result).subtract(x).multiply(this.cos(result).pow(2))); if (result.subtract(temp).abs().compareTo(P) != 1) { super.clearCachePrecision(); return temp.setScale(precision, 6); } result = temp.setScale(precision + 3, 6); } } public BigDecimal pow(BigDecimal a, BigDecimal b) { int precision = super.precision(); super.applyForCachePrecision(); BigDecimal P = this.ofPrecision(precision); if (b.abs().compareTo(bim) == 1) { throw new IllegalArgumentException("计算幂的过程中指数的绝对值太大!"); } else if (b.compareTo(BigDecimal.ZERO) == 0) { if (a.compareTo(BigDecimal.ZERO) == 0) { throw new IllegalArgumentException("计算幂的过程中遇到0的0次方"); } else { return BigDecimal.ONE; } } else if (b.compareTo(BigDecimal.ONE) == 0) { return a; } else if (b.compareTo(N1) == 0) { return BigDecimal.ONE.divide(a, precision, 6); } else { boolean below = b.signum() == -1; b = b.abs(); if (b.stripTrailingZeros().precision() <= 0) { return a.pow(b.intValue()); } else { BigDecimal result = a.pow(b.intValue()); BigDecimal constant = BigDecimal.ONE.add(b.multiply(this.ln(a))); if (result.equals(BigDecimal.ZERO)) { result = BigDecimal.ONE; } while(true) { BigDecimal temp = result.multiply(constant.subtract(this.ln(result))); if (temp.subtract(result).compareTo(P) != 1) { if (below) { return BigDecimal.ONE.divide(temp, precision, 6); } else { super.clearCachePrecision(); return temp.setScale(precision, 6); } } result = temp; } } } } public BigDecimal pow(double a, double b) { return this.pow(new BigDecimal(a),new BigDecimal(b)); } public BigDecimal sqrt(BigDecimal x) { int precision = super.precision(); BigDecimal n = BigDecimal.ONE; BigDecimal l = BigDecimal.ZERO; BigDecimal P = this.ofPrecision(precision); BigDecimal B = new BigDecimal(2); while(true) { n = n.subtract(n.pow(2).subtract(x).divide(B.multiply(n), precision + 4, 6)); if (n.subtract(l).abs().compareTo(P) != 1) { return n.setScale(precision, 6); } l = n; } } public BigDecimal cbrt(BigDecimal x) { int precision = super.precision(); BigDecimal n = BigDecimal.ONE; BigDecimal l = BigDecimal.ZERO; BigDecimal P = this.ofPrecision(precision); BigDecimal B = new BigDecimal(3); while(true) { n = n.subtract(n.pow(3).subtract(x).divide(B.multiply(n.pow(2)), precision + 4, 6)); if (n.subtract(l).abs().compareTo(P) != 1) { return n.setScale(precision, 6); } l = n; } } public BigDecimal root(BigDecimal a, BigDecimal b) { int precision = super.precision(); super.applyForCachePrecision(); BigDecimal result = this.pow(a, BigDecimal.ONE.divide(b, precision, 6)); super.clearCachePrecision(); return result.setScale(precision, 6); } public BigDecimal log10(BigDecimal x) { return this.log(x, BigDecimal.TEN); } public BigDecimal log(BigDecimal a, BigDecimal b) { int precision = super.precision(); super.applyForCachePrecision(); BigDecimal result = this.ln(a).divide(this.ln(b), precision, 6); super.clearCachePrecision(); return result.setScale(precision, 6); } public BigDecimal ln(BigDecimal x) { if (x.signum() != 1) { throw new IllegalArgumentException("Invalid input of ln(x)"); } else { int precision = super.precision(); super.applyForCachePrecision(); BigDecimal sc = this.ofPrecision(precision); int btl = x.toBigInteger().bitLength(); BigDecimal result = BigDecimal.valueOf((double)btl - Math.ceil((double)(3 * (btl - 3) / 10 + 1))); while(true) { BigDecimal bpk = this.exp(result); BigDecimal tmp = result.subtract(bpk.subtract(x).divide(bpk, precision + 4, 6)); if (tmp.subtract(result).abs().compareTo(sc) != 1) { super.clearCachePrecision(); return tmp.setScale(precision, 6); } result = tmp; } } } public BigDecimal exp(BigDecimal x) { int precision = super.precision(); super.applyForCachePrecision(); BigDecimal P = this.ofPrecision(precision); if (x.abs().compareTo(bim) == 1) { throw new IllegalArgumentException("计算幂的过程中指数的绝对值太大!"); } else { boolean ng = x.signum() == -1; x = x.abs(); BigDecimal eix = E.setScale(precision + 3, 4).pow(x.setScale(0, 0).intValue()).setScale(precision + 3, 4); BigDecimal p0 = x.setScale(0, 0); BigDecimal cons = x.subtract(p0); BigDecimal curr = BigDecimal.ONE; BigDecimal curr2 = cons; BigDecimal result = eix; int var11 = 2; while(true) { BigDecimal temp = result.add(eix.multiply(curr2).divide(curr, precision + 5, 6)); if (temp.subtract(result).abs().compareTo(P) != 1) { super.clearCachePrecision(); return ng ? BigDecimal.ONE.divide(temp, precision, 6) : temp.setScale(precision, 6); } curr2 = curr2.multiply(cons); curr = curr.multiply(BigDecimal.valueOf((long)(var11++))); result = temp; } } } public BigDecimal sinh(BigDecimal x) { int precision = super.precision(); super.applyForCachePrecision(); BigDecimal result = this.exp(x).subtract(this.exp(x.negate())).divide(B2, precision, 6); super.clearCachePrecision(); return result.setScale(precision, 6); } public BigDecimal cosh(BigDecimal x) { int precision = super.precision(); super.applyForCachePrecision(); BigDecimal result = this.exp(x).add(this.exp(x.negate())).divide(B2, precision, 6); super.clearCachePrecision(); return result.setScale(precision, 6); } public BigDecimal tanh(BigDecimal x) { int precision = super.precision(); super.applyForCachePrecision(); BigDecimal e2 = this.exp(x); BigDecimal eN2 = BigDecimal.ONE.divide(e2, precision + 3, 6); BigDecimal result = e2.subtract(eN2).divide(e2.add(eN2), precision + 3, 6); super.clearCachePrecision(); return result.setScale(precision, 6); } public BigDecimal asinh(BigDecimal x) { int precision = super.precision(); super.applyForCachePrecision(); BigDecimal P = this.ofPrecision(precision); boolean ng = x.signum() == -1; if (x.signum() == 0) { return BigDecimal.ZERO; } else { x = x.abs(); BigDecimal result = this.ln(x); while(true) { BigDecimal temp = result.subtract(this.sinh(result).subtract(x).divide(this.cosh(result), precision + 3, 6)); if (result.subtract(temp).abs().compareTo(P) != 1) { result = temp; if (ng) { result = temp.negate(); } super.clearCachePrecision(); return result.setScale(precision, 6); } result = temp; } } } public BigDecimal acosh(BigDecimal x) { if (x.compareTo(BigDecimal.ONE) == -1) { try { throw new Exception("Illegal input of acosh(x)"); } catch (Exception var6) { var6.printStackTrace(); } } if (x.compareTo(BigDecimal.ONE) == 0) { return BigDecimal.ZERO; } else { int precision = super.precision(); super.applyForCachePrecision(); BigDecimal P = this.ofPrecision(precision); BigDecimal result = this.ln(x); while(true) { BigDecimal temp = result.subtract(this.cosh(result).subtract(x).divide(this.sinh(result), precision + 3, 6)); if (result.subtract(temp).abs().compareTo(P) != 1) { super.clearCachePrecision(); return temp.setScale(precision, 6); } result = temp; } } } public BigDecimal atanh(BigDecimal x) { if (x.abs().compareTo(BigDecimal.ONE) != -1) { try { throw new Exception("Illegal input of atanh(x)"); } catch (Exception var6) { var6.printStackTrace(); } } int precision = super.precision(); super.applyForCachePrecision(); BigDecimal P = this.ofPrecision(precision); BigDecimal result = x.setScale(precision, 6); while(true) { BigDecimal temp = result.subtract(this.tanh(result).subtract(x).multiply(this.cosh(result).pow(2))); if (result.subtract(temp).abs().compareTo(P) != 1) { super.clearCachePrecision(); return temp.setScale(precision, 6); } result = temp.setScale(precision + 5, 6); } } public BigDecimal deg(BigDecimal x) { return x.multiply(B180).divide(PI, super.precision(), 6); } public BigDecimal rad(BigDecimal x) { return x.multiply(PI).divide(B180, super.precision(), 6); } private BigDecimal ofPrecision(int precision) { if (precision <= 0) { return PREC_CACHE[0]; } else { return precision < PREC_CACHE.length ? PREC_CACHE[precision] : BigDecimal.ONE.divide(new BigDecimal(BigInteger.TEN.pow(precision))); } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/math/PrecisionHolder.java ================================================ package com.pinecone.framework.util.math; public interface PrecisionHolder { int getPrecision(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/math/Vectorizer.java ================================================ package com.pinecone.framework.util.math; import java.util.*; public class Vectorizer { private Vector > tokenArrays; private HashSet unionTokenSet; private Vector > tokenVectors; private Vectorizer() { this.unionTokenSet = new HashSet<>(); this.tokenArrays = new Vector<>(); this.tokenVectors = new Vector<>(); } public Vectorizer( Vector >tokenArrays ){ this(); this.apply( tokenArrays ); } private void apply( Vector >tokenArrays ) { this.tokenArrays = tokenArrays; for ( int i = 0; i < this.tokenArrays.size(); i++ ) { this.unionTokenSet.addAll( tokenArrays.get(i) ); this.tokenVectors.add( new Vector<>() ); } this.analysis(); } private Vector > singlify( Vector tokenArrayA, Vector tokenArrayB ) { Vector > single = new Vector<>(); single.add( tokenArrayA ); single.add( tokenArrayB ); return single; } public Vectorizer( Vector tokenArrayA, Vector tokenArrayB ){ this(); this.apply( this.singlify( tokenArrayA, tokenArrayB ) ); } public Vector > getResult(){ return this.tokenVectors; } private void tokenMapify( Vector proto, Map map ) { for ( T item : proto ){ double tempInt = 0.0; if( map.containsKey(item) ){ tempInt = map.get(item); }else { map.put(item,0.0); } map.replace(item,++tempInt); } } private void analysis(){ ArrayList > tokenMaps = new ArrayList<>(); for ( int i = 0; i < this.tokenArrays.size(); i++ ) { tokenMaps.add( new HashMap<>() ); this.tokenMapify( this.tokenArrays.get(i), tokenMaps.get(i) ); } for( T item : this.unionTokenSet ){ for ( int i = 0; i < tokenMaps.size(); i++ ) { if( !tokenMaps.get(i).containsKey(item) ){ tokenMaps.get(i).put( item,0.0 ); } } for ( int j = 0; j < this.tokenVectors.size(); j++ ) { this.tokenVectors.get(j).add( tokenMaps.get(j).get(item) ); } } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/ArchName.java ================================================ package com.pinecone.framework.util.name; public abstract class ArchName implements Name { protected String mszName; protected ArchName( String szName ) { this.mszName = szName; } @Override public void setName( String szName ){ this.mszName = szName; } @Override public void asStandardizedName( String szStandardizedName ) { this.mszName = szStandardizedName; } @Override public String toJSONString() { return "\"" + this.toString() + "\""; } @Override public boolean equals( Object obj ) { if( obj instanceof Name ) { if( obj instanceof Namespace ) { return this.getFullName().equals( ( (Namespace)obj ).getFullName() ); } return this.getName().equals( ( (Name)obj ).getName() ); } return false; } @Override public int hashCode() { return this.getFullName().hashCode(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/ArchNamespaceNode.java ================================================ package com.pinecone.framework.util.name; public abstract class ArchNamespaceNode extends ArchName implements Namespace { protected String mszSeparator; protected ArchNamespaceNode( String szName, String separator ) { super( szName ); this.mszSeparator = separator; } @Override public String getNodeName() { return this.mszName; } @Override public String getSeparator() { return this.mszSeparator; } @Override public void setSeparator( String separator ) { this.mszSeparator = separator; } @Override public String toString() { return this.getSimpleName(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/FixScopeName.java ================================================ package com.pinecone.framework.util.name; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.name.ArchName; import com.pinecone.framework.util.name.MultiScopeName; import java.util.ArrayList; import java.util.Arrays; import java.util.List; public class FixScopeName extends ArchName implements MultiScopeName { protected List mDomains; protected List mSuffixes; public FixScopeName( String szName, String szDomain ) { this( szName ); this.mDomains.add ( szDomain ); } public FixScopeName( String szName, String szDomain, String szSuffix ) { this( szName ); this.mDomains.add ( szDomain ); this.mSuffixes.add ( szSuffix ); } public FixScopeName( String szName, @Nullable String[] domains, @Nullable String[] suffixes ) { this( szName ); if ( domains != null ) { this.mDomains.addAll( Arrays.asList( domains ) ); } if ( suffixes != null ) { this.mSuffixes.addAll( Arrays.asList( suffixes ) ); } } public FixScopeName( String szName, List domains, List suffixes ) { super( szName ); this.mDomains = domains; this.mSuffixes = suffixes; } public FixScopeName( String szName ) { super( szName ); this.mDomains = new ArrayList<>(); this.mSuffixes = new ArrayList<>(); } @Override public String getName() { return this.mszName; } @Override public String getFullName() { if ( !this.mDomains.isEmpty() && !this.mSuffixes.isEmpty() ) { return this.mDomains.get( 0 ) + this.mszName + this.mSuffixes.get( 0 ); } else if ( !this.mDomains.isEmpty() ) { return this.mDomains.get( 0 ) + this.mszName; } else if ( !this.mSuffixes.isEmpty() ) { return this.mszName + this.mSuffixes.get( 0 ); } else { return this.mszName; } } @Override public String getDomain(){ if ( !this.mDomains.isEmpty() ) { return this.mDomains.get( 0 ); } else { return ""; } } @Override public String toString() { return this.getFullName(); } @Override public List getFullNames() { List fullNames = new ArrayList<>(); if ( this.mDomains.isEmpty() && this.mSuffixes.isEmpty() ) { fullNames.add( this.mszName ); } else if ( this.mDomains.isEmpty() ) { for ( String suffix : this.mSuffixes ) { fullNames.add( this.mszName + suffix ); } } else if ( this.mSuffixes.isEmpty() ) { for ( String domain : this.mDomains ) { fullNames.add( domain + this.mszName ); } } else { for ( String domain : this.mDomains ) { for ( String suffix : this.mSuffixes ) { fullNames.add( domain + this.mszName + suffix ); } } } return fullNames; } public void addDomain( String domain ) { this.mDomains.add( domain ); } public void addSuffix( String suffix ) { this.mSuffixes.add( suffix ); } public List getDomains() { return this.mDomains; } public List getSuffixes() { return this.mSuffixes; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/GenericMultiNamespace.java ================================================ package com.pinecone.framework.util.name; import com.pinecone.framework.unit.LinkedTreeSet; import java.util.*; public class GenericMultiNamespace extends ArchNamespaceNode implements MultiNamespace { protected Set mParents; public GenericMultiNamespace( String name ) { this( name, new LinkedTreeSet<>( MultiNamespace.DefaultSetNameComparator ), Namespace.DEFAULT_SEPARATOR ); } public GenericMultiNamespace( String name, Set parents ) { this( name, parents, Namespace.DEFAULT_SEPARATOR ); } public GenericMultiNamespace( String name, Set parents, String separator ) { super( name, separator ); this.mParents = parents != null ? parents : new LinkedTreeSet<>( MultiNamespace.DefaultSetNameComparator ); } public GenericMultiNamespace( String name, MultiNamespace parent ) { this( name, parent, Namespace.DEFAULT_SEPARATOR ); } public GenericMultiNamespace( String name, MultiNamespace parent, String separator ) { super( name, separator ); LinkedTreeSet set = new LinkedTreeSet<>( MultiNamespace.DefaultSetNameComparator ); if( parent != null ) { set.add( parent ); } this.mParents = set; } public GenericMultiNamespace( String name, String separator ) { this( name, new LinkedTreeSet<>( MultiNamespace.DefaultSetNameComparator ), separator ); } @Override public Collection getParents() { return this.mParents; } @Override public Namespace parent() { return this.getFirstParent(); } @Override public void setParent( Namespace parent ) { this.mParents.clear(); this.addParent( (MultiNamespace)parent ); } @Override public MultiNamespace getFirstParent() { return this.mParents.isEmpty() ? null : this.mParents.iterator().next(); } @Override public String getFullName() { Namespace firstParent = this.getFirstParent(); if ( firstParent == null ) { return this.mszName; } return firstParent.getFullName() + this.getSeparator() + this.mszName; } @Override public List getFullNames() { List fullNames = new ArrayList<>(); if( this.mParents.isEmpty() ) { fullNames.add( this.getNodeName() ); } else { for ( MultiNamespace parent : this.mParents ) { this.addFullNames( parent, fullNames ); } } return fullNames; } protected void addFullNames( MultiNamespace namespace, List fullNames ) { List parentFullNames = namespace.getFullNames(); for ( String parentFullName : parentFullNames ) { String fullName = parentFullName + this.getSeparator() + this.getNodeName(); fullNames.add( fullName ); } } @Override public String getFullNameByNS( String szNS ) { for ( Namespace parent : this.mParents ) { if ( parent.getNodeName().equals( szNS ) ) { return parent.getFullName() + this.getSeparator() + this.mszName; } } return null; } @Override public void addParent( MultiNamespace parent ) { this.mParents.add( parent ); } @Override public boolean hasOwnParent( MultiNamespace parent ) { return this.mParents.contains( parent ); } @Override public boolean hasOwnParentNS( String szNS ) { return this.mParents.contains( szNS ); } @Override public MultiNamespace getParentByNS( String szNS ) { for ( MultiNamespace parent : this.mParents ) { if ( parent.getNodeName().equals(szNS) ) { return parent; } } return null; } @Override public void removeParent( MultiNamespace parent ) { this.mParents.remove( parent ); } @Override public int parentsSize() { return this.mParents.size(); } @Override public String getSimpleName() { return getNodeName(); } @Override public boolean equals( Object obj ) { if( obj instanceof MultiNamespace ) { return this.getFullNames().equals( ((MultiNamespace) obj).getFullNames() ); } return false; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/GenericNamespaceParser.java ================================================ package com.pinecone.framework.util.name; import com.pinecone.framework.system.PineRuntimeException; import java.util.ArrayList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; public class GenericNamespaceParser implements NamespaceParser { protected Class namespaceClass; public GenericNamespaceParser() { this( UniNamespace.class ); } public GenericNamespaceParser( Class namespaceClass ) { this.namespaceClass = namespaceClass; } public void setNamespaceClass( Class namespaceClass ) { this.namespaceClass = namespaceClass; } @Override public Namespace parse( String szNamespaceStr, Pattern pattern ) { if ( pattern == null ) { throw new IllegalArgumentException( "Pattern cannot be null" ); } List parts = new ArrayList<>(); List usedSeparators = new ArrayList<>(); Matcher matcher = pattern.matcher( szNamespaceStr ); int lastIndex = 0; while ( matcher.find() ) { String part = szNamespaceStr.substring( lastIndex, matcher.start() ); parts.add(part); usedSeparators.add(matcher.group()); lastIndex = matcher.end(); } // Add the last one. parts.add( szNamespaceStr.substring( lastIndex ) ); // Create the Namespace tree. Namespace current = null; String currentSeparator = null; for ( int i = 0; i < parts.size(); ++i ) { String part = parts.get(i); String nextSeparator = i < usedSeparators.size() ? usedSeparators.get(i) : Namespace.DEFAULT_SEPARATOR; current = this.newNamespaceInstance( part, current, currentSeparator ); currentSeparator = nextSeparator; } return current; } protected Namespace newNamespaceInstance( String name, Namespace parent, String separator ) { try { return this.namespaceClass.getConstructor( String.class, Namespace.class, String.class ).newInstance( name, parent, separator ); } catch ( Exception e ) { try { return this.namespaceClass.getConstructor( String.class, MultiNamespace.class, String.class ).newInstance( name, (MultiNamespace)parent, separator ); } catch ( Exception e2 ) { throw new PineRuntimeException( "Failed to instantiate namespace class", e2 ); } } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/MultiNamespace.java ================================================ package com.pinecone.framework.util.name; import java.util.Collection; import java.util.Comparator; import java.util.List; public interface MultiNamespace extends Namespace, MultiScopeName { Collection getParents(); MultiNamespace getFirstParent(); String getFullName(); String getFullNameByNS ( String szNS ); void addParent ( MultiNamespace parent ); boolean hasOwnParent ( MultiNamespace parent ); boolean hasOwnParentNS ( String szNS ); MultiNamespace getParentByNS ( String szNS ); void removeParent ( MultiNamespace parent ); int parentsSize(); Comparator DefaultSetNameComparator = new Comparator<>() { @Override public int compare( Object o1, Object o2 ) { if ( o1 instanceof Namespace && o2 instanceof Namespace ) { return ( (Namespace) o1 ).getFullName().compareTo( ( (Namespace) o2 ).getFullName() ); } else if ( o1 instanceof String && o2 instanceof String ) { return ((String) o1).compareTo( (String) o2 ); } else if ( o1 instanceof Namespace && o2 instanceof String ) { return ( (Namespace) o1 ).getNodeName().compareTo( (String) o2 ); } else if ( o1 instanceof String && o2 instanceof Namespace ) { return ( (String) o1 ).compareTo( ( (Namespace) o2 ).getNodeName() ); } else { throw new IllegalArgumentException( "Objects are not of type Namespace or String" ); } } }; @Override default int compareTo( Namespace o ) { return this.getNodeName().compareTo( o.getNodeName() ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/MultiScopeName.java ================================================ package com.pinecone.framework.util.name; import java.util.List; public interface MultiScopeName extends Name { List getFullNames(); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/Name.java ================================================ package com.pinecone.framework.util.name; import com.pinecone.framework.system.prototype.Pinenut; public interface Name extends Pinenut { String getName(); String getFullName(); String getDomain(); void setName( String szName ); void asStandardizedName( String szStandardizedName ); default boolean isStandardizedName() { return this.getName().equals( this.getFullName() ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/Namespace.java ================================================ package com.pinecone.framework.util.name; public interface Namespace extends Name, Comparable { String DEFAULT_SEPARATOR = "."; Namespace parent(); void setParent( Namespace parent ); default Namespace root(){ Namespace p = this; Namespace c = p; while ( p != null ) { c = p; p = p.parent(); } return c; } default String rootName() { return this.root().getNodeName(); } String getSeparator(); String getNodeName(); String getSimpleName(); String getFullName(); void setSeparator ( String separator ); @Override default String getName() { return this.getNodeName(); } @Override default String getDomain() { Namespace p = this.parent(); if( p != null ) { return p.getFullName(); } return ""; } @Override default int compareTo( Namespace o ) { return this.getFullName().compareTo( o.getFullName() ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/NamespaceParser.java ================================================ package com.pinecone.framework.util.name; import com.pinecone.framework.system.prototype.Pinenut; import java.util.List; import java.util.regex.Pattern; public interface NamespaceParser extends Pinenut { default Namespace parse( String szNamespaceStr, List separators ) { if ( separators == null || separators.isEmpty() ) { throw new IllegalArgumentException( "Separators list cannot be null or empty" ); } StringBuilder regexBuilder = new StringBuilder(); for ( String sep : separators ) { regexBuilder.append( Pattern.quote(sep) ).append( "|" ); } String regex = regexBuilder.substring( 0, regexBuilder.length() - 1 ); return this.parse( szNamespaceStr, Pattern.compile( regex ) ); } default Namespace parse( String szNamespaceStr, String szSeparatorsRegex ) { if ( szSeparatorsRegex == null || szSeparatorsRegex.isEmpty() ) { throw new IllegalArgumentException( "Regex string cannot be null or empty" ); } return this.parse( szNamespaceStr, Pattern.compile( szSeparatorsRegex ) ); } Namespace parse( String namespaceStr, Pattern separatorsPattern ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/ScopeName.java ================================================ package com.pinecone.framework.util.name; public class ScopeName extends ArchName implements Name { protected String mszDomain; public ScopeName( String szName, String szDomain ) { super( szName ); this.mszDomain = szDomain; } public ScopeName( String szName ) { this( szName, "" ); } @Override public String getName() { return this.mszName; } @Override public String getFullName(){ return this.getDomain() + this.getName(); } @Override public String getDomain(){ return this.mszDomain; } @Override public String toString() { return this.getFullName(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/UniNamespace.java ================================================ package com.pinecone.framework.util.name; public class UniNamespace extends ArchNamespaceNode implements Namespace { protected Namespace mParent; public UniNamespace( String name ) { this( name, null, Namespace.DEFAULT_SEPARATOR ); } public UniNamespace( String name, Namespace parent ) { this( name, parent, Namespace.DEFAULT_SEPARATOR ); } public UniNamespace( String name, Namespace parent, String separator ) { super( name, separator ); this.mParent = parent; } public UniNamespace( String name, String separator ) { this( name, null, separator ); } @Override public Namespace parent() { return mParent; } @Override public void setParent( Namespace parent ) { this.mParent = parent; } @Override public String getSimpleName() { return this.getNodeName(); } @Override public String getFullName() { if ( this.mParent == null ) { return this.mszName; } return this.mParent.getFullName() + this.mszSeparator + this.mszName; } @Override public boolean equals( Object that ) { if( that instanceof Namespace ) { return this.getFullName().equals( ((Namespace) that).getFullName() ); } return false; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/path/BasicPathResolver.java ================================================ package com.pinecone.framework.util.name.path; import java.util.ArrayList; import java.util.List; public class BasicPathResolver implements PathResolver { protected String mszSepRegex; protected String mszSeparator; public BasicPathResolver( String szSeparator, String szSepRegex ) { this.mszSeparator = szSeparator; this.mszSepRegex = szSepRegex; } public BasicPathResolver() { this( "/", "/" ); } @Override public List resolvePath( String[] parts ) { ArrayList resolvedParts = new ArrayList<>(); for (String part : parts) { if ( part.equals(".") || part.isEmpty() ) { continue; } if ( part.equals("..") ) { if ( !resolvedParts.isEmpty() ) { resolvedParts.remove( resolvedParts.size() - 1 ); } } else { resolvedParts.add( part ); } } return resolvedParts; } @Override public String resolvePath( String path ) { String[] parts = this.processPath( path ).split( this.mszSepRegex ); return this.assemblePath( this.resolvePath( parts ) ); } @Override public List resolvePathParts( String path ) { return this.resolvePath( this.segmentPathParts( path ) ); } @Override public String[] segmentPathParts( String path ) { return this.processPath( path ).split( this.mszSepRegex ); } @Override public String assemblePath( List parts ) { if ( parts == null || parts.size() == 0 ) { return ""; } StringBuilder path = new StringBuilder(); for ( int i = 0; i < parts.size(); ++i ) { if ( i > 0 ) { path.append( this.mszSeparator ); } path.append( parts.get( i ) ); } return path.toString(); } protected String processPath( String path ) { return path; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/path/PathResolver.java ================================================ package com.pinecone.framework.util.name.path; import java.util.List; import com.pinecone.framework.system.prototype.Pinenut; public interface PathResolver extends Pinenut { List resolvePath ( String[] parts ) ; String resolvePath ( String path ); List resolvePathParts ( String path ) ; String[] segmentPathParts ( String path ) ; String assemblePath ( List parts ) ; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/rdb/ArchRDBExecutor.java ================================================ package com.pinecone.framework.util.rdb; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import com.pinecone.framework.util.json.JSONArray; import com.pinecone.framework.util.json.JSONArraytron; import com.pinecone.framework.util.json.JSONMaptron; public abstract class ArchRDBExecutor implements MappedExecutor { private RDBHost mRDBSQLHost; private MappedSQLSplicer mSimpleSQLSpawner = null ; private void init() { this.mSimpleSQLSpawner = new MappedSQLSplicer(); } @Override public RDBHost getRDBSQLHost() { this.init(); return this.mRDBSQLHost; } public ArchRDBExecutor( RDBHost rdbHost ) { this.init(); this.mRDBSQLHost = rdbHost; } protected Statement createStatement() throws SQLException { return this.mRDBSQLHost.createStatement(); } @Override public ResultSession query( String szSQL ) throws SQLException { //this.affirmCurrentStatement(); Statement statement = this.createStatement(); ResultSet resultSet = statement.executeQuery( szSQL ); return new DirectResultSession( this.mRDBSQLHost, statement, resultSet ); } @Override public long execute( String szSQL, boolean bIgnoreNoAffected ) throws SQLException { //this.affirmCurrentStatement(); Statement statement = this.createStatement(); statement.execute( szSQL ); if( bIgnoreNoAffected ){ return 1; } long n = statement.getUpdateCount(); statement.close(); return n; } public long execute( String szSQL ) throws SQLException { return this.execute( szSQL, false ); } public int countFromTable( String szSQL ){ try{ ResultSession session = this.query(szSQL); ResultSet resultSet = session.getResultSet(); resultSet.next(); int n = resultSet.getInt("COUNT(*)"); session.close(); return n; } catch ( Exception E ){ return 0; } } public int getSumFromTable( String szTableName ){ return this.countFromTable( "SELECT COUNT(*) FROM `" + szTableName + "`" ); } /** Fetch Function **/ public String[] fetchAllColumn ( String szTable ) throws SQLException { String szSQL = "SHOW COLUMNS FROM `" + szTable + "`"; ResultSession session = this.query( szSQL ); ResultSet resultSet = session.getResultSet(); resultSet.last(); int nRow = resultSet.getRow(); resultSet.beforeFirst(); String[] columns = new String[ nRow ]; int j = 0; while ( resultSet.next() ){ columns[ j++ ] = resultSet.getString( 1 ); } session.close(); return columns; } public static String[] column2Array( ResultSet resultSet )throws SQLException { ResultSetMetaData metaData = resultSet.getMetaData(); int nColumnCount = metaData.getColumnCount(); String[] columns = new String[nColumnCount]; for ( int i = 1, j = 0; i <= nColumnCount; i++ ) { columns[j++] = metaData.getColumnLabel(i); } return columns; } public List> fetchAssoc(String szSQL ) throws SQLException { ResultSession session = this.query( szSQL ); ResultSet resultSet = session.getResultSet(); ResultSetMetaData metaData = resultSet.getMetaData(); int sizeofRowSet = metaData.getColumnCount(); ArrayList > queryResult = new ArrayList<>(); int jc = 0; while ( resultSet.next() ){ queryResult.add( new LinkedHashMap<>() ); for ( int i = 1; i <= sizeofRowSet; i++ ) { queryResult.get(jc).put( metaData.getColumnLabel( i ), resultSet.getObject( i ) ); } jc++; } session.close(); return queryResult; } @Override public JSONArray fetch (String szSQL ) throws SQLException { ResultSession session = this.query( szSQL ); ResultSet resultSet = session.getResultSet(); ResultSetMetaData metaData = resultSet.getMetaData(); int sizeofRowSet = metaData.getColumnCount(); JSONArray queryResult = new JSONArraytron(); int jc = 0; while ( resultSet.next() ){ queryResult.put( new JSONMaptron() ); for ( int i = 1; i <= sizeofRowSet; i++ ) { queryResult.getJSONObject( jc ).put( metaData.getColumnLabel( i ), resultSet.getObject( i ) ); } jc++; } session.close(); return queryResult; } /** * Using java class to store query result if these data operated particular frequently (> 1e6) * According to trail result, if calculation scale beyond (1e6) there is a significant * performance gap between java native object and the HashMap based com.pinecone::JSONObject. * ***************************************************************************************** * Experiment At [Intel(R) Core(TM) i7-9750H CPU @ 2.60GHz (Single Thread)]: * Trail At 1e6 : HashMap [11ms], Java Native Object [<10ms] the difference is tiny. * Trail At 1e7 : HashMap [~100ms], Java Native Object [10ms ~ 20ms] the difference is huge but still acceptable. * Trail At 1e8 : HashMap [>1000ms], Java Native Object [80ms ~ 150ms] the difference is huge but unacceptable. * ***************************************************************************************** * JSONObject Mode was recommended to be used in temporary query object or normal condition. * NativeObject Mode was recommended to be used in the query result will be manipulated frequently. */ // public Object selectJavaify ( String szSQL, Class antetype ) throws SQLException { // // } /** Insert Function **/ public long insertWithArray ( String szSimpleTable, Map dataMap, boolean bReplace ) throws SQLException { if ( dataMap != null ) { return this.execute( this.mSimpleSQLSpawner.spliceInsertSQL( szSimpleTable, dataMap, bReplace ) ); } return -1; } @Override public long insertWithArray ( String szSimpleTable, Map dataMap ) throws SQLException { return insertWithArray(szSimpleTable,dataMap,false); } /** Update Function **/ @Override public long updateWithArray ( String szSimpleTable, Map dataMap, List conditionMap, String szConditionGlue ) throws SQLException { if ( dataMap != null ) { return this.execute( this.mSimpleSQLSpawner.spliceUpdateSQL ( szSimpleTable, dataMap, conditionMap, szConditionGlue ), true ); } return -1; } public long updateWithArray ( String szSimpleTable, Map dataMap, List conditionMap ) throws SQLException { return this.updateWithArray( szSimpleTable, dataMap, conditionMap, "AND" ); } @Override public long updateWithArray ( String szSimpleTable, Map dataMap, Map conditionMap, String szConditionGlue ) throws SQLException { if ( dataMap != null ) { return this.execute( this.mSimpleSQLSpawner.spliceUpdateSQL ( szSimpleTable, dataMap, conditionMap, szConditionGlue ), true ); } return -1; } public long updateWithArray ( String szSimpleTable, Map dataMap, Map conditionMap ) throws SQLException { return this.updateWithArray( szSimpleTable, dataMap, conditionMap, "AND" ); } public long updateWithArray ( String szSimpleTable, Map dataMap, String szConditionSQL ) throws SQLException { if ( dataMap != null ) { StringBuilder sqlStream = new StringBuilder(); sqlStream.append( this.mSimpleSQLSpawner.spliceNoConditionUpdateSQL( szSimpleTable,dataMap ) ); if ( szConditionSQL!= null ) { if( !szConditionSQL.toLowerCase().contains("where")){ sqlStream.append(" WHERE "); } sqlStream.append( szConditionSQL ); } return this.execute( sqlStream.toString(), true ); } return -1; } public long updateWithArray ( String szSimpleTable, Map dataMap ) throws SQLException { return updateWithArray( szSimpleTable, dataMap, (Map) null, "AND" ); } /** Delete Function **/ @Override public long deleteWithArray ( String szSimpleTable, List conditionMap, String szConditionGlue ) throws SQLException { if ( conditionMap != null ) { return this.execute( this.mSimpleSQLSpawner.spliceDeleteSQL( szSimpleTable, conditionMap, szConditionGlue ) ); } return this.execute("TRUNCATE `" + szSimpleTable + '`'); } @Override public long deleteWithArray ( String szSimpleTable, Map conditionMap, String szConditionGlue ) throws SQLException { if ( conditionMap != null ) { return this.execute( this.mSimpleSQLSpawner.spliceDeleteSQL( szSimpleTable, conditionMap, szConditionGlue ) ); } return this.execute("TRUNCATE `" + szSimpleTable + '`'); } public long deleteWithArray ( String szSimpleTable, List conditionMap ) throws SQLException { return this.deleteWithArray( szSimpleTable,conditionMap,"AND" ); } public long deleteWithArray ( String szSimpleTable, Map conditionMap ) throws SQLException { return this.deleteWithArray( szSimpleTable,conditionMap,"AND" ); } public long deleteWithSQL ( String szSimpleTable, String szConditionSQL ) throws SQLException { StringBuilder sqlStream = new StringBuilder(); sqlStream .append( "DELETE FROM `" ).append( szSimpleTable ).append( "`" ); if ( szConditionSQL!= null ) { if( !szConditionSQL.toLowerCase().contains("where")){ sqlStream.append(" WHERE "); } sqlStream.append( szConditionSQL ); } return this.execute( sqlStream.toString() ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/rdb/DirectResultSession.java ================================================ package com.pinecone.framework.util.rdb; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; public class DirectResultSession implements ResultSession { protected RDBHost mHost; protected Statement mStatement; protected ResultSet mResultSet; public DirectResultSession( RDBHost host, Statement statement, ResultSet resultSet ) { this.mHost = host; this.mStatement = statement; this.mResultSet = resultSet; } @Override public RDBHost getHost() { return this.mHost; } @Override public Statement getStatement() { return this.mStatement; } @Override public ResultSet getResultSet() { return this.mResultSet; } @Override public void close() throws SQLException { this.mStatement.close(); this.mResultSet.close(); this.mHost = null; } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/rdb/MappedExecutor.java ================================================ package com.pinecone.framework.util.rdb; import com.pinecone.framework.util.json.JSONArray; import java.sql.SQLException; import java.util.Map; import java.util.List; public interface MappedExecutor { RDBHost getRDBSQLHost(); long execute( String szSQL, boolean bIgnoreNoAffected ) throws SQLException; ResultSession query( String szSQL ) throws SQLException; JSONArray fetch ( String szSQL ) throws SQLException ; long insertWithArray ( String szSimpleTable, Map dataMap ) throws SQLException; long updateWithArray ( String szSimpleTable, Map dataMap, List conditionMap, String szConditionGlue ) throws SQLException; long updateWithArray ( String szSimpleTable, Map dataMap, Map conditionMap, String szConditionGlue ) throws SQLException; long deleteWithArray (String szSimpleTable, List conditionMap, String szConditionGlue ) throws SQLException; long deleteWithArray ( String szSimpleTable, Map conditionMap, String szConditionGlue ) throws SQLException; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/rdb/MappedSQLSplicer.java ================================================ package com.pinecone.framework.util.rdb; import java.util.Map; import java.util.List; public class MappedSQLSplicer implements SQLSplicer { public String spliceSingleKeyValueSequence( Object szKey, Object szValue, String szGlue ){ return String.format(" `%s` = '%s' %s ",szKey,szValue, szGlue ); } public String spliceSimpleKeyValuesSequence( List keyValues, String szGlue ) { if ( keyValues!= null ) { StringBuilder sqlStream = new StringBuilder(); int i = 0, mapSize = keyValues.size(); for ( Map.Entry item : keyValues ) { sqlStream.append( this.spliceSingleKeyValueSequence( item.getKey(),item.getValue(), (i++ != mapSize - 1) ? szGlue : "" ) ); } return sqlStream.toString(); } return ""; } public String spliceSimpleKeyValuesSequence( Map keyValues, String szGlue ) { if ( keyValues!= null ) { StringBuilder sqlStream = new StringBuilder(); int i = 0, mapSize = keyValues.size(); for ( Object each : keyValues.entrySet() ) { Map.Entry item = (Map.Entry) each; sqlStream.append(this.spliceSingleKeyValueSequence(item.getKey(), item.getValue(), (i++ != mapSize - 1) ? szGlue : "")); } return sqlStream.toString(); } return ""; } public String spliceInsertSQL ( String szFullNameTable, Map dataMap, boolean bReplace ) { if ( dataMap != null ) { StringBuilder sqlStream = new StringBuilder(); sqlStream.append( String.format( bReplace ? "REPLACE INTO `%s` " : "INSERT INTO `%s` ", szFullNameTable ) ) ; int i = 0, mapSize = dataMap.size(); StringBuilder sql_key = new StringBuilder(); StringBuilder sql_value = new StringBuilder(); for ( Object each : dataMap.entrySet() ) { Map.Entry item = (Map.Entry) each; sql_key .append( "`" ) .append( item.getKey() ).append( "`" ).append((i != mapSize - 1) ? "," : ""); sql_value .append("'" ) .append( item.getValue() ).append( "'" ).append ((i++ != mapSize - 1) ? "," : ""); } sqlStream .append( " ( " ).append( sql_key.toString() ).append( " ) VALUES ( " ).append( sql_value.toString() ).append( " )" ); return sqlStream.toString(); } return ""; } public String spliceNoConditionUpdateSQL( String szFullNameTable, Map dataMap ) { if ( dataMap != null ) { return String.format( "UPDATE `%s` SET %s ", szFullNameTable, this.spliceSimpleKeyValuesSequence(dataMap, ",") ); } return ""; } public String spliceUpdateSQL ( String szFullNameTable, Map dataMap, List conditionKeyValues, String szConditionGlue ) { if ( dataMap != null ) { String szConditionSQL = this.spliceSimpleKeyValuesSequence( conditionKeyValues, szConditionGlue ); if( !szConditionSQL.isEmpty() ){ return this.spliceNoConditionUpdateSQL( szFullNameTable, dataMap ) + " WHERE " + szConditionSQL; } } return ""; } public String spliceUpdateSQL ( String szFullNameTable, Map dataMap, Map conditionKeyValues, String szConditionGlue ) { if ( dataMap != null ) { String szConditionSQL = this.spliceSimpleKeyValuesSequence( conditionKeyValues, szConditionGlue ); if( !szConditionSQL.isEmpty() ){ return this.spliceNoConditionUpdateSQL( szFullNameTable, dataMap ) + " WHERE " + szConditionSQL; } } return ""; } public String spliceDeleteSQL ( String szFullNameTable, List conditionKeyValues, String szConditionGlue ) { StringBuilder sqlStream = new StringBuilder(); sqlStream .append( String.format( "DELETE FROM `%s` ", szFullNameTable ) ); if ( conditionKeyValues != null ) { sqlStream.append( " WHERE " ); sqlStream.append( this.spliceSimpleKeyValuesSequence( conditionKeyValues, szConditionGlue ) ); } return sqlStream.toString(); } public String spliceDeleteSQL ( String szFullNameTable, Map conditionKeyValues, String szConditionGlue ) { StringBuilder sqlStream = new StringBuilder(); sqlStream .append( String.format( "DELETE FROM `%s` ", szFullNameTable ) ); if ( conditionKeyValues != null ) { sqlStream.append( " WHERE " ); sqlStream.append( this.spliceSimpleKeyValuesSequence( conditionKeyValues, szConditionGlue ) ); } return sqlStream.toString(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/rdb/RDBHost.java ================================================ package com.pinecone.framework.util.rdb; import java.sql.*; public interface RDBHost { Connection getConnection(); boolean isClosed() ; void connect() throws SQLException; void close() throws SQLException; Statement createStatement() throws SQLException; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/rdb/ResultSession.java ================================================ package com.pinecone.framework.util.rdb; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; public interface ResultSession { RDBHost getHost(); default Connection getConnection() { return this.getHost().getConnection(); } Statement getStatement(); ResultSet getResultSet(); void close() throws SQLException; } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/rdb/SQLSplicer.java ================================================ package com.pinecone.framework.util.rdb; public interface SQLSplicer { } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/rdb/SQLStrings.java ================================================ package com.pinecone.framework.util.rdb; import com.pinecone.framework.util.StringUtils; import java.math.BigDecimal; import java.math.BigInteger; public final class SQLStrings { public static String format( Object val, boolean bIncludeBool ) { String sz; if ( val == null ) { return "null"; } else if ( val instanceof String ) { return "'" + StringUtils.addSlashes( (String) val ) + "'"; } else if ( val instanceof Number ) { if( val instanceof BigDecimal || val instanceof BigInteger ){ sz = val.toString(); } else { return val.toString(); } } else if ( val instanceof Boolean ) { if( bIncludeBool ){ return val.toString(); } else { return ( (boolean) val ) ? "1" : "0"; } } else { sz = val.toString(); } return "'" + StringUtils.addSlashes( sz ) + "'"; } public static String format( Object val ) { return SQLStrings.format( val, false ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/template/TemplateCursorParser.java ================================================ package com.pinecone.framework.util.template; import com.pinecone.framework.system.ParseException; import com.pinecone.framework.util.CursorParser; public class TemplateCursorParser implements CursorParser { protected TemplateParser mParser; protected TemplateCursorParser( TemplateParser parser ) { this.mParser = parser; } @Override public void back() throws ParseException { this.mParser.back(); } @Override public char next() throws ParseException { return this.mParser.next(); } @Override public String next( int n ) throws ParseException { return this.mParser.next(n); } @Override public Object nextValue() throws ParseException { return this.mParser.eval(); } @Override public Object nextValue( Object indexKey, Object parent, Object[] args ) throws ParseException { return this.nextValue(); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/template/TemplateParser.java ================================================ package com.pinecone.framework.util.template; import com.pinecone.framework.system.ParseException; import com.pinecone.framework.system.prototype.Objectom; import com.pinecone.framework.system.prototype.PinenutTraits; import com.pinecone.framework.util.CursorParser; import com.pinecone.framework.util.GeneralStrings; import java.io.BufferedReader; import java.io.IOException; import java.io.Reader; import java.io.StringReader; import java.math.BigDecimal; import java.math.BigInteger; import java.util.List; import java.util.Map; /** * Pinecone For Java TemplateParser [ Bean Nuts(R) Almond Dragon, Unify Template Language ] * Copyright © Bean Nuts Foundation ( Dragon King ) All rights reserved. [Harald.E / JH.W] * ***************************************************************************************** * ${xxx}, ${xxx.xxx}, ${xxx["xxx"]}, ${xxx[xxx]} * ***************************************************************************************** */ public class TemplateParser { protected static final String SYNTAX_ERROR = "Syntax error !"; //private String mszNowAt; // For debug only, which is to indict the current parse-at point. //private String mszRaw; // For debug only protected Reader mReader; protected char mcPrevious; protected long mnCharacter; protected boolean mbUsePrevious; protected int mnParseAt ; protected int mnLineAt; protected TokenType mTokenType; protected StringBuilder mCurrentToken; protected StringBuilder mRendered; protected boolean mbEvalMode; protected Objectom mVariableMap; protected CursorParser mThisCursor; enum TokenType { T_UNDEFINED, T_DELIMITER, T_IDENTIFIER, T_INTEGER, T_FLOAT, T_KEYWORD, T_TEMP, T_STRING, T_BLOCK, T_ENDLINE, T_UTL_TAG, T_PASS } public TemplateParser( Reader reader, Object valMap ) { this.mReader = (Reader)(reader.markSupported() ? reader : new BufferedReader(reader)); this.mVariableMap = Objectom.wrap( valMap ); this.mCurrentToken = new StringBuilder(); this.mRendered = new StringBuilder(); this.mThisCursor = new TemplateCursorParser(this); } public TemplateParser( String raw, Object valMap ) { this( new StringReader(raw), valMap ); //this.mszRaw = raw; } protected ParseException parseException( String message ) { return new ParseException( new ParseException( message + " at " + this.mnParseAt + " [character " + this.mnCharacter + " line " + this.mnLineAt + "]", (int)this.mnParseAt ) ); } public void back() throws ParseException { if (!this.mbUsePrevious && this.mnParseAt > 0L) { --this.mnParseAt; --this.mnCharacter; this.mbUsePrevious = true; this.mTokenType = TokenType.T_UNDEFINED; } else { throw new ParseException( "Stepping back two steps is not supported" ); } } public boolean end() { return this.mTokenType == TokenType.T_ENDLINE && !this.mbUsePrevious; } public char next() throws ParseException { int c; if ( this.mbUsePrevious ) { this.mbUsePrevious = false; c = this.mcPrevious; } else { try { c = this.mReader.read(); } catch ( IOException e ) { throw new ParseException( e, this.mnParseAt ); } if ( c <= 0 ) { this.mTokenType = TokenType.T_ENDLINE; c = 0; } } ++this.mnParseAt; if ( this.mcPrevious == '\r' ) { ++this.mnLineAt; this.mnCharacter = (long)(c == 10 ? 0 : 1); } else if ( c == '\n' ) { this.mnCharacter = 0L; ++this.mnLineAt; } else { ++this.mnCharacter; } // if ( c != 0 ) { // this.mszNowAt = this.mszRaw.substring(this.mnParseAt); // } this.mcPrevious = (char)c; return this.mcPrevious; } public String next(int n) throws ParseException { if ( n == 0 ) { return ""; } else { char[] chars = new char[n]; for( int pos = 0; pos < n; ++pos ) { chars[pos] = this.next(); if ( this.end() ) { throw this.parseException( "Error parser template string with substring bounds error." ); } } return new String(chars); } } protected void back_if_parenthesized(){ if( "]}".indexOf( this.mcPrevious ) >= 0 ) { this.back(); } } protected void devourUntilEL( char nextChar ) throws ParseException { while ( nextChar != 0 ) { if ( nextChar == '$' ) { int nextNextChar = this.next(); if ( nextNextChar == '{' ) { this.mCurrentToken.append((char) nextChar); this.mCurrentToken.append((char) nextNextChar); this.mTokenType = TokenType.T_UTL_TAG; this.mbEvalMode = true; return; } else { this.mRendered.append((char) nextChar); } } else { this.mRendered.append((char) nextChar); } nextChar = this.next(); } } public void getNextToken() throws ParseException { this.mTokenType = TokenType.T_UNDEFINED; StringBuilder temp = this.mCurrentToken; temp.setLength(0); char nextChar = this.next(); if ( this.end() ) { return; } if ( !this.mbEvalMode ) { this.devourUntilEL(nextChar); return; } while ( nextChar != 0 && Character.isWhitespace(nextChar) ) { nextChar = this.next(); } while ( nextChar == '\r' ) { nextChar = this.next(); if (nextChar == '\n') { nextChar = this.next(); } while ( nextChar != 0 && Character.isWhitespace(nextChar) ) { nextChar = this.next(); } } if ( this.mbEvalMode && nextChar == '}' ) { this.mCurrentToken.append((char) nextChar); this.mTokenType = TokenType.T_DELIMITER; this.mbEvalMode = false; return; } if ( nextChar == 0 ) { this.mTokenType = TokenType.T_ENDLINE; return; } boolean isDoubleQuote = true; if ( nextChar == '"' || nextChar == '\'' ) { if ( nextChar == '\'' ) { isDoubleQuote = false; } nextChar = this.next(); while ( (isDoubleQuote && nextChar != '"') || (!isDoubleQuote && nextChar != '\'') && nextChar != '\r' && nextChar != 0 ) { if ( nextChar == '\\' ) { nextChar = this.next(); GeneralStrings.transferCharParse( nextChar, this.mThisCursor, temp ); } else { this.mCurrentToken.append( nextChar ); } nextChar = this.next(); } if ( nextChar == '\r' || nextChar == 0 ) { throw this.parseException( "Unexpected End-line" ); } this.mTokenType = TokenType.T_STRING; return; } if ( " .[]{}=$".indexOf(nextChar) >= 0 ) { temp.append((char) nextChar); if (nextChar == '{') { temp.append((char) nextChar); this.mTokenType = TokenType.T_UTL_TAG; } else { this.mTokenType = TokenType.T_DELIMITER; } return; } if ( Character.isLetter( nextChar ) || nextChar == '_' ) { while (!(" .[]{}=$".indexOf(nextChar) >= 0 || nextChar == '\r' || nextChar == '\t' || nextChar == '\n' || nextChar == 0)) { temp.append( nextChar ); nextChar = this.next(); } if( ".[]{}=$".indexOf(nextChar) >= 0 ){ this.back(); } this.mTokenType = TokenType.T_TEMP; } if ( Character.isDigit(nextChar) || nextChar == '-' || nextChar == '+' ) { int dotTimes = 0; int nScientificFlag = 0; while ( Character.isDigit(nextChar) || ".+-eE".indexOf(nextChar) >= 0 ) { if( nextChar == '.' ) { ++dotTimes; } else if( nextChar == 'E' || nextChar == 'e' ) { ++nScientificFlag; } temp.append(nextChar); nextChar = this.next(); } if ( dotTimes > 0 || nScientificFlag > 0 ) { this.mTokenType = TokenType.T_FLOAT; } else { this.mTokenType = TokenType.T_INTEGER; } return; } String szCurrentToken = this.mCurrentToken.toString(); if ( this.mTokenType == TokenType.T_TEMP ) { if ( szCurrentToken.equals("undefined") || szCurrentToken.equals("null") || szCurrentToken.equals("false") || szCurrentToken.equals("true") || szCurrentToken.equals("this") ) { this.mTokenType = TokenType.T_KEYWORD; } else { this.mTokenType = TokenType.T_IDENTIFIER; } } if ( this.mTokenType == TokenType.T_UNDEFINED ) { throw this.parseException( "\nIllegal token found ! What-> \"" + this.mCurrentToken.toString() + "\"" ); } } protected void setPassToken() { this.mTokenType = TokenType.T_PASS; } public String eval() { do { this.getNextToken(); if ( this.mTokenType == TokenType.T_UTL_TAG ) { Object[] refDummy = new Object[1]; this.eval_anonymous_val(refDummy); Object dummy = refDummy[0]; if( dummy == null ) { this.mRendered.append( "null" ); } else { //this.mRendered.append( PinenutTraits.invokeToJSONString( dummy, "[object Unknown]") ); // Debug test this.mRendered.append( PinenutTraits.invokeToString(dummy, "[object Unknown]") ); } } } while ( this.mTokenType != TokenType.T_ENDLINE ); return mRendered.toString(); } public Object evalValue() { this.mbEvalMode = true; Object dummy; Object[] refDummy = new Object[1]; do { this.getNextToken(); if( this.mTokenType == TokenType.T_ENDLINE ) { break; } this.eval_exp_assign(refDummy); } while ( this.mTokenType != TokenType.T_ENDLINE && this.mTokenType != TokenType.T_DELIMITER ); dummy = refDummy[0]; return dummy; } private void eval_anonymous_val( Object[] jtVar ) { do { this.getNextToken(); this.eval_exp_assign(jtVar); } while ( this.mTokenType != TokenType.T_ENDLINE && !(this.mTokenType == TokenType.T_DELIMITER && ( this.mCurrentToken.length() > 0 && ( this.mCurrentToken.charAt(0) == '}' || this.mCurrentToken.charAt(0) == ']' ) ) ) ); } private void eval_exp_assign( Object[] jtVar ) { this.eval_exp_parenthesized(jtVar); } private void eval_exp_parenthesized( Object[] jtVar ) { if( this.eval_exp_obtain(jtVar) ){ return; } if ( this.mCurrentToken.length() > 0 && this.mCurrentToken.charAt(0) == '(' ) { this.getNextToken(); this.eval_exp_assign(jtVar); if ( this.mCurrentToken.length() > 0 && this.mCurrentToken.charAt(0) != ')' ) { throw this.parseException( "Syntax error Missing ')'." ); } } else if ( this.mCurrentToken.length() > 0 && this.mCurrentToken.charAt(0) == '[' ) { Object[] dummyKey = new Object[1]; this.eval_anonymous_val( dummyKey ); if( dummyKey[0] == null ) { throw this.parseException( "Undefined key." ); } if( jtVar[0] instanceof Map ) { Map m = (Map)jtVar[0]; jtVar[0] = m.get(dummyKey[0].toString()); } else if( jtVar[0] instanceof List ) { List m = (List)jtVar[0]; int id; if( dummyKey[0] instanceof Number ) { id = ( (Number)dummyKey[0] ).intValue(); } else if( dummyKey[0] instanceof String ) { id = Integer.parseInt( (String) dummyKey[0] ); } else { id = Integer.parseInt( dummyKey[0].toString() ); } jtVar[0] = m.get(id); } else { throw this.parseException( "Error variable status, should be Map." ); } if ( this.mCurrentToken.length() > 0 && this.mCurrentToken.charAt(0) != ']' ) { throw this.parseException( "Syntax error Missing ']'." ); } this.setPassToken(); } else { this.variable_obtain(jtVar); } } private boolean eval_exp_obtain( Object[] jtVar ) { if ( this.mTokenType == TokenType.T_DELIMITER ) { if ( this.mCurrentToken.toString().equals(".") ) { this.getNextToken(); if ( this.mTokenType == TokenType.T_IDENTIFIER ) { if( jtVar[0] instanceof Map ) { Map m = (Map)jtVar[0]; jtVar[0] = m.get( this.mCurrentToken.toString() ); } else if( jtVar[0] instanceof Objectom ) { Objectom m = (Objectom)jtVar[0]; jtVar[0] = m.get( this.mCurrentToken.toString() ); } else { throw this.parseException( "Error variable status, should be Map." ); } } else { throw this.parseException( "Illegal template offset" ); } return true; } } return false; } private void variable_obtain( Object[] jtVar ) { int i; String szCurrentToken = this.mCurrentToken.toString(); switch ( this.mTokenType ) { case T_IDENTIFIER : { if ( this.mVariableMap.containsKey(szCurrentToken) ) { jtVar[0] = this.mVariableMap.get(szCurrentToken); } //this.getNextToken(); break; } case T_INTEGER: { if (szCurrentToken.length() > 18) { jtVar[0] = new BigInteger(szCurrentToken); } else { jtVar[0] = Long.parseLong(szCurrentToken); } //this.getNextToken(); this.back_if_parenthesized(); break; } case T_FLOAT: { if ( szCurrentToken.length() > 18 ) { jtVar[0] = new BigDecimal(szCurrentToken); } else { double n; if ( szCurrentToken.equals("-INF") || szCurrentToken.equals("-Infinity") ) { n = Double.NEGATIVE_INFINITY; } else if (szCurrentToken.equals("+INF") || szCurrentToken.equals("+Infinity")) { n = Double.POSITIVE_INFINITY; } else { n = Double.parseDouble(szCurrentToken); } jtVar[0] = n; } //this.getNextToken(); this.back_if_parenthesized(); break; } case T_STRING: { jtVar[0] = szCurrentToken; //this.getNextToken(); break; } case T_DELIMITER: { if (".)]}".indexOf(szCurrentToken.charAt(0)) != -1) { break; } else { throw parseException( TemplateParser.SYNTAX_ERROR ); } } case T_KEYWORD: { if ( szCurrentToken.equalsIgnoreCase("true") ) { jtVar[0] = true; } else if ( szCurrentToken.equalsIgnoreCase("false") ) { jtVar[0] = false; } else if ( szCurrentToken.equalsIgnoreCase("null") || szCurrentToken.equalsIgnoreCase("undefined") ) { jtVar[0] = null; } else if ( szCurrentToken.equalsIgnoreCase("this") ) { if ( this.mVariableMap.containsKey("this") ) { jtVar[0] = this.mVariableMap.get("this"); } else { jtVar[0] = this.mVariableMap; } } else { throw this.parseException( TemplateParser.SYNTAX_ERROR ); } //this.getNextToken(); break; } case T_UTL_TAG: { this.eval_anonymous_val(jtVar); jtVar[0] = this.mVariableMap.get(jtVar); this.mbEvalMode = true; this.getNextToken(); if ( szCurrentToken.charAt(0) != '}' ) { this.back(); } break; } default: { throw this.parseException( TemplateParser.SYNTAX_ERROR ); } } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/template/UTRAlmondProvider.java ================================================ package com.pinecone.framework.util.template; import java.io.IOException; import java.io.Writer; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.system.prototype.Objectom; public class UTRAlmondProvider implements UniformTemplateRenderer { @Override public String render( String tpl, Objectom context ){ TemplateParser parser = new TemplateParser( tpl, context ); return parser.eval(); } @Override public void render( String tpl, Objectom context, Writer writer ) { // TODO try{ writer.write( this.render( tpl, context ) ); } catch ( IOException e ) { throw new ProxyProvokeHandleException( e ); } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/template/UniformTemplateRenderer.java ================================================ package com.pinecone.framework.util.template; import java.io.IOException; import java.io.Writer; import com.pinecone.framework.system.prototype.Objectom; import com.pinecone.framework.system.prototype.Pinenut; public interface UniformTemplateRenderer extends Pinenut { UniformTemplateRenderer DefaultRenderer = new UTRAlmondProvider(); String render( String tpl, Objectom context ); default String render( String tpl, Object context ) { return this.render( tpl, Objectom.wrap( context ) ); } void render( String tpl, Objectom context, Writer writer ) ; default void render( String tpl, Object context, Writer writer ) { this.render( tpl, Objectom.wrap( context ), writer ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/uoi/GenericUniformObjectLoaderFactory.java ================================================ package com.pinecone.framework.util.uoi; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.HashMap; import java.util.Map; import com.pinecone.framework.system.NoSuchProviderException; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.framework.util.lang.GenericDynamicFactory; public class GenericUniformObjectLoaderFactory implements UniformObjectLoaderFactory { protected Map > mUOLRegister = new HashMap<>(); protected ClassLoader mClassLoader; protected DynamicFactory mDynamicFactory; public GenericUniformObjectLoaderFactory( ClassLoader classLoader, DynamicFactory dynamicFactory ) { this.mClassLoader = classLoader; this.mDynamicFactory = dynamicFactory; this.mUOLRegister.put( UniformObjectLoaderFactory.DefaultJavaClassType, LocalUOIJavaClassProvider.class ); } public GenericUniformObjectLoaderFactory( ClassLoader classLoader ) { this( classLoader, new GenericDynamicFactory( classLoader )); } public GenericUniformObjectLoaderFactory() { this( Thread.currentThread().getContextClassLoader() ); } @Override public Class getUniformObjectLoader( String loaderName ) { return this.mUOLRegister.get( loaderName ); } @Override public void register( String loaderName, Class loader ) { this.mUOLRegister.put( loaderName, loader ); } @Override public void deregister( String loaderName ) { this.mUOLRegister.remove( loaderName ); } @Override public int size() { return this.mUOLRegister.size(); } @Override public boolean isEmpty() { return this.mUOLRegister.isEmpty(); } @Override public UniformObjectLoader newLoader( String loaderName ) throws NoSuchProviderException { Class clazz = this.getUniformObjectLoader( loaderName ); if( clazz == null ) { throw new NoSuchProviderException( loaderName ); } try{ Constructor constructor = clazz.getConstructor( ClassLoader.class, DynamicFactory.class ); return (UniformObjectLoader) constructor.newInstance( this.mClassLoader, this.mDynamicFactory ); } catch ( NoSuchMethodException | InvocationTargetException | IllegalAccessException | InstantiationException e ) { return null; } } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/uoi/LocalUOIJavaClassProvider.java ================================================ package com.pinecone.framework.util.uoi; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.framework.util.lang.GenericDynamicFactory; public class LocalUOIJavaClassProvider implements UniformObjectLoader { protected ClassLoader mClassLoader; protected DynamicFactory mDynamicFactory; public LocalUOIJavaClassProvider( ClassLoader classLoader, DynamicFactory dynamicFactory ) { this.mClassLoader = classLoader; this.mDynamicFactory = dynamicFactory; } public LocalUOIJavaClassProvider( ClassLoader classLoader ) { this( classLoader, new GenericDynamicFactory( classLoader )); } public LocalUOIJavaClassProvider() { this( Thread.currentThread().getContextClassLoader() ); } @Override public Class toClass( UOI uoi ) throws IllegalArgumentException { if( !StringUtils.isEmpty( uoi.getHost() ) ) { throw new IllegalArgumentException( "Remote host [" + uoi.getHost() + "] is not supported." ); } try{ return this.mClassLoader.loadClass( uoi.getObjectName() ); } catch ( ClassNotFoundException e ) { return null; } } @Override public Object newInstance( UOI uoi, Class[] paramTypes, Object... args ) { return this.mDynamicFactory.optNewInstance( this.toClass(uoi), paramTypes, args ); } @Override public Object newInstance( UOI uoi, Object... args ) { return this.mDynamicFactory.optNewInstance( this.toClass(uoi), null, args ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/uoi/UOI.java ================================================ package com.pinecone.framework.util.uoi; import java.net.URI; import com.pinecone.framework.system.NoSuchProviderException; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.system.prototype.Pinenut; public class UOI implements Pinenut { protected URI mResourceIdentifier; protected UniformObjectLoader mUniformObjectLoader; protected UniformObjectLoaderFactory mUniformObjectLoaderFactory; public UOI( URI uri, UniformObjectLoaderFactory factory ) { this.resolve( uri, factory ); } public UOI( String szURI, UniformObjectLoaderFactory factory ){ this.resolve( szURI, factory ); } public UOI( URI uri ) { this( uri, UniformObjectLoaderFactory.DefaultObjectLoaderFactory ); } public UOI( String szURI ) { this( szURI, UniformObjectLoaderFactory.DefaultObjectLoaderFactory ); } public UOI() { } public void resolve( URI uri, UniformObjectLoaderFactory factory ) throws ProxyProvokeHandleException { this.mResourceIdentifier = uri; this.mUniformObjectLoaderFactory = factory; try{ this.mUniformObjectLoader = factory.newLoader( this.getScheme() ); } catch ( NoSuchProviderException e ) { throw new ProxyProvokeHandleException( e ); } } public void resolve( String szURI, UniformObjectLoaderFactory factory ){ this.resolve( URI.create( szURI ), factory ); } public void resolve( URI uri ) { this.resolve( uri, UniformObjectLoaderFactory.DefaultObjectLoaderFactory ); } public void resolve( String str ) { this.resolve( str, UniformObjectLoaderFactory.DefaultObjectLoaderFactory ); } public String getObjectName() { String szPath = this.mResourceIdentifier.getPath(); if( szPath.startsWith( "/" ) ) { return szPath.substring( 1 ); } return szPath; } public Class toClass() { return this.mUniformObjectLoader.toClass( this ); } public Object newInstance( Class[] paramTypes, Object... args ) { return this.mUniformObjectLoader.newInstance( this, paramTypes, args ); } public Object newInstance( Object... args ) { return this.mUniformObjectLoader.newInstance( this, args ); } public String getScheme() { return this.mResourceIdentifier.getScheme(); } public String getSchemeSpecificPart() { return this.mResourceIdentifier.getSchemeSpecificPart(); } public String getRawSchemeSpecificPart() { return this.mResourceIdentifier.getRawSchemeSpecificPart(); } public String getUserInfo() { return this.mResourceIdentifier.getUserInfo(); } public String getRawUserInfo() { return this.mResourceIdentifier.getRawUserInfo(); } public String getHost() { return this.mResourceIdentifier.getHost(); } public int getPort() { return this.mResourceIdentifier.getPort(); } public String getPath() { return this.mResourceIdentifier.getPath(); } public String getRawPath() { return this.mResourceIdentifier.getRawPath(); } public String getQuery() { return this.mResourceIdentifier.getQuery(); } public String getRawQuery() { return this.mResourceIdentifier.getRawQuery(); } public String getFragment() { return this.mResourceIdentifier.getFragment(); } public String getRawFragment() { return this.mResourceIdentifier.getRawFragment(); } public boolean isAbsolute() { return this.mResourceIdentifier.isAbsolute(); } public boolean isOpaque() { return this.mResourceIdentifier.isOpaque(); } public URI normalize() { return this.mResourceIdentifier.normalize(); } public URI relativize(URI uri) { return this.mResourceIdentifier.relativize(uri); } @Override public boolean equals(Object obj) { return this.mResourceIdentifier.equals(obj); } @Override public int hashCode() { return this.mResourceIdentifier.hashCode(); } @Override public String toString() { return this.mResourceIdentifier.toString(); } @Override public String toJSONString() { return "\"" + this.toString() + "\""; } public String toASCIIString() { return this.mResourceIdentifier.toASCIIString(); } public static UOI create( String uri ) { return new UOI( uri ); } public static UOI create( String uri, UniformObjectLoaderFactory factory ) { return new UOI( uri, factory ); } } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/uoi/UniformObjectLoader.java ================================================ package com.pinecone.framework.util.uoi; import com.pinecone.framework.system.prototype.Pinenut; public interface UniformObjectLoader extends Pinenut { Class toClass( UOI uoi ) ; Object newInstance( UOI uoi, Class[] paramTypes, Object... args ); Object newInstance( UOI uoi, Object... args ); } ================================================ FILE: Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/uoi/UniformObjectLoaderFactory.java ================================================ package com.pinecone.framework.util.uoi; import com.pinecone.framework.system.NoSuchProviderException; import com.pinecone.framework.system.prototype.Pinenut; public interface UniformObjectLoaderFactory extends Pinenut { String DefaultJavaClassType = "java-class"; UniformObjectLoaderFactory DefaultObjectLoaderFactory = new GenericUniformObjectLoaderFactory(); Class getUniformObjectLoader( String loaderName ) ; void register( String loaderName, Class loader ) ; void deregister( String loaderName ); int size(); boolean isEmpty(); UniformObjectLoader newLoader( String loaderName ) throws NoSuchProviderException; } ================================================ FILE: Pinecones/Pinecone/src/test/java/com/system/SimpleCascadeComponentManager.java ================================================ package com.system; import com.pinecone.framework.system.architecture.ArchCascadeComponent; import com.pinecone.framework.system.architecture.ArchCascadeComponentManager; import com.pinecone.framework.system.architecture.CascadeComponent; import com.pinecone.framework.system.architecture.CascadeComponentManager; import com.pinecone.framework.util.name.Namespace; import com.pinecone.framework.util.name.UniNamespace; class SimpleCascadeComponent extends ArchCascadeComponent { public SimpleCascadeComponent( Namespace name, CascadeComponentManager manager, CascadeComponent parent ) { super( name, manager, parent ); } public SimpleCascadeComponent( String name, CascadeComponentManager manager ) { super( new UniNamespace( name ), manager, null ); } } public class SimpleCascadeComponentManager extends ArchCascadeComponentManager { } ================================================ FILE: Pinecones/Pinecone/src/test/java/com/system/TestComponent.java ================================================ package com.system; import com.pinecone.Pinecone; import com.pinecone.framework.util.Debug; public class TestComponent { public static void testAdd() throws Exception { SimpleCascadeComponentManager manager = new SimpleCascadeComponentManager(); SimpleCascadeComponent A = new SimpleCascadeComponent( "A", manager ); SimpleCascadeComponent B = new SimpleCascadeComponent( "B", manager ); SimpleCascadeComponent C = new SimpleCascadeComponent( "C", manager ); manager.addComponent( A ); manager.addComponent( B ); manager.addComponent( C ); SimpleCascadeComponent a1 = new SimpleCascadeComponent( "a1", manager ); SimpleCascadeComponent a2 = new SimpleCascadeComponent( "a2", manager ); A.addChildComponent( a1 ); A.addChildComponent( a2 ); SimpleCascadeComponent b1 = new SimpleCascadeComponent( "b1", manager ); SimpleCascadeComponent b2 = new SimpleCascadeComponent( "b2", manager ); B.addChildComponent( b1 ); B.addChildComponent( b2 ); SimpleCascadeComponent c1 = new SimpleCascadeComponent( "c1", manager ); C.addChildComponent( c1 ); Debug.fmt( 2,manager.getComponents() ); Debug.fmt( 2, manager.getComponentsRegisterList() ); } public static void testRefer() throws Exception { SimpleCascadeComponentManager manager = new SimpleCascadeComponentManager(); SimpleCascadeComponent A = new SimpleCascadeComponent( "A", manager ); SimpleCascadeComponent B = new SimpleCascadeComponent( "B", manager ); SimpleCascadeComponent C = new SimpleCascadeComponent( "C", manager ); manager.addComponent( A ); manager.addComponent( B ); manager.addComponent( C ); SimpleCascadeComponent a1 = new SimpleCascadeComponent( "a1", manager ); SimpleCascadeComponent a2 = new SimpleCascadeComponent( "a2", manager ); A.addChildComponent( a1 ); A.addChildComponent( a2 ); SimpleCascadeComponent b1 = new SimpleCascadeComponent( "b1", manager ); SimpleCascadeComponent b2 = new SimpleCascadeComponent( "b2", manager ); B.addChildComponent( b1 ); B.addChildComponent( b2 ); B.referChildComponent( a2 ); SimpleCascadeComponent c1 = new SimpleCascadeComponent( "c1", manager ); C.addChildComponent( c1 ); C.referChildComponent( b2 ); Debug.fmt( 2,manager.getComponents() ); Debug.fmt( 2, manager.getComponentsRegisterList() ); Debug.fmt( 2, C.children() ); } public static void testCascadeRemove() throws Exception { SimpleCascadeComponentManager manager = new SimpleCascadeComponentManager(); SimpleCascadeComponent A = new SimpleCascadeComponent( "A", manager ); SimpleCascadeComponent B = new SimpleCascadeComponent( "B", manager ); SimpleCascadeComponent C = new SimpleCascadeComponent( "C", manager ); manager.addComponent( A ); manager.addComponent( B ); manager.addComponent( C ); SimpleCascadeComponent a1 = new SimpleCascadeComponent( "a1", manager ); SimpleCascadeComponent a2 = new SimpleCascadeComponent( "a2", manager ); A.addChildComponent( a1 ); A.addChildComponent( a2 ); SimpleCascadeComponent b1 = new SimpleCascadeComponent( "b1", manager ); SimpleCascadeComponent b2 = new SimpleCascadeComponent( "b2", manager ); SimpleCascadeComponent b3 = new SimpleCascadeComponent( "b3", manager ); SimpleCascadeComponent b3_1 = new SimpleCascadeComponent( "b3_1", manager ); b3.addChildComponent( b3_1 ); b3.referChildComponent( a1 ); B.addChildComponent( b1 ); B.addChildComponent( b2 ); B.addChildComponent( b3 ); B.referChildComponent( a2 ); SimpleCascadeComponent c1 = new SimpleCascadeComponent( "c1", manager ); C.addChildComponent( c1 ); C.referChildComponent( a1 ); C.referChildComponent( b2 ); Debug.fmt( 2,manager.getComponents() ); Debug.fmt( 2, manager.getComponentsRegisterList() ); Debug.fmt( 2, C.children() ); // Test omega remove child // manager.removeComponent( a1 ); // Debug.fmt( 2,manager.getComponents() ); // Debug.fmt( 2, manager.getComponentsRegisterList() ); // Debug.fmt( 2, C.children() ); // Test omega remove parent manager.removeComponent( A ); // Diane has been erased from every universe across infinity. Debug.fmt( 2,manager.getComponents() ); Debug.fmt( 2, manager.getComponentsRegisterList() ); Debug.fmt( 2, C.children() ); Debug.fmt( 2, b3.children() ); // Test self-destruction // A.purge(); // Debug.fmt( 2,manager.getComponents() ); // Debug.fmt( 2, manager.getComponentsRegisterList() ); // Debug.fmt( 2, C.children() ); // Debug.fmt( 2, b3.children() ); // Test others // a1.independent( "cyc" ); // Debug.fmt( 2,manager.getComponents() ); // Debug.fmt( 2, manager.getComponentsRegisterList() ); // Debug.fmt( 2, C.children() ); // Debug.fmt( 2, b3.children() ); } public static void main( String[] args ) throws Exception { //String szJson = FileUtils.readAll("J:/120KWordsPhonetics.json5"); Pinecone.init( (Object...cfg )->{ //TestComponent.testAdd(); //TestComponent.testRefer(); TestComponent.testCascadeRemove(); return 0; }, (Object[]) args ); } } ================================================ FILE: Pinecones/Pinecone/src/test/java/com/unit/JavaGenericTests.java ================================================ package com.unit; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.LinkedHashMap; import java.util.Map; import com.pinecone.Pinecone; import com.pinecone.framework.util.Debug; public class JavaGenericTests { public static void testBasic() { Map instance = new LinkedHashMap<>(); Class clazz = instance.getClass(); Type genericSuperclass = clazz.getGenericSuperclass(); if ( genericSuperclass instanceof ParameterizedType ) { ParameterizedType parameterizedType = (ParameterizedType) genericSuperclass; Type[] actualTypeArguments = parameterizedType.getActualTypeArguments(); for ( Type type : actualTypeArguments ) { Debug.trace( type.getTypeName() ); } } } public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ JavaGenericTests.testBasic(); return 0; }, (Object[]) args ); } } ================================================ FILE: Pinecones/Pinecone/src/test/java/com/unit/TestFileIteratorAndDistinct.java ================================================ package com.unit; import com.pinecone.Pinecone; import com.pinecone.framework.unit.distinct.*; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.io.FileNamePathIterator; import com.pinecone.framework.util.io.FileUtils; import com.pinecone.framework.util.io.PathItemIterator; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.*; public class TestFileIteratorAndDistinct { public static void testFileIterator() throws Exception { PathItemIterator iterator = new PathItemIterator( Path.of( "C:/Users/undefined/Desktop/wolfmc" ), false ); while ( iterator.hasNext() ) { Debug.trace( iterator.next().toString() ); } } public static void testDistinct_Simple() throws Exception { List list1 = Arrays.asList( "t1", "t2", "t3", "t4" ); List list2 = Arrays.asList( "t1", "t3", "t5", "t6" ); List list3 = Arrays.asList( "t1", "t2", "t3", "t7", "t7" ); List list4 = Arrays.asList( "t8", "t2", "t3", "t9" ); MegaBloomDistinctAudit distinctAudit = new MegaBloomDistinctAudit<>( List.of( list1, list2, list3 ), DistinctType.SymmetricDistinct, new HashSet<>() ); Collection distinctElements = distinctAudit.audit(); Debug.trace( distinctElements ); Debug.trace( distinctAudit.audit( list4 ) ); Debug.trace( distinctAudit.hasOwnElement( "t3" ) ); } public static void testDistinct_Master() throws Exception { List list1 = Arrays.asList( "t1", "t2", "t3", "t4" ); List list2 = Arrays.asList( "t1", "t3", "t5", "t6" ); List list3 = Arrays.asList( "t1", "t2", "t3", "t7", "t7" ); List list4 = Arrays.asList( "t8", "t2", "t3", "t9" ); MegaPrototypeBloomDistinctAudit distinctAudit = new MegaPrototypeBloomDistinctAudit<>( list1.iterator(), List.of( list2, list3 ), DistinctType.SymmetricDistinct, new HashSet<>() ); Collection distinctElements = distinctAudit.audit(); Debug.trace( distinctElements ); Debug.trace( distinctAudit.audit( list4 ) ); Debug.trace( distinctAudit.hasOwnElement( "t3" ) ); } public static void testDistinct_Tiny() throws Exception { List list1 = Arrays.asList( "t1", "t2", "t3", "t4", "t5", "t6", "t7", "t8", "t9", "t10", "t11" ); List list2 = Arrays.asList( "t6", "t2", "t4", "t8", "t1", "t3", "t5" ); List list3 = Arrays.asList( "t1", "t2", "t3", "t7", "t7" ); List list4 = Arrays.asList( "t8", "t2", "t3", "t9" ); GenericDistinctAudit distinctAudit = new GenericDistinctAudit<>( List.of( list1, list2 ), DistinctType.SymmetricDistinct, new ArrayList<>() ); Collection distinctElements = distinctAudit.audit(); Debug.trace( distinctElements ); // Debug.trace( distinctAudit.audit( list4 ) ); // // Debug.trace( distinctAudit.hasOwnElement( "t3" ) ); } public static void testDistinct_TinyMaster() throws Exception { List list1 = Arrays.asList( "t1", "t2", "t3", "t4" ); List list2 = Arrays.asList( "t1", "t3", "t5", "t6" ); List list3 = Arrays.asList( "t1", "t2", "t3", "t7", "t7" ); List list4 = Arrays.asList( "t8", "t2", "t3", "t9" ); GenericPrototypeDistinctAudit distinctAudit = new GenericPrototypeDistinctAudit<>( list1.iterator(), List.of( list2, list3 ), DistinctType.SymmetricDistinct, new HashSet<>() ); Collection distinctElements = distinctAudit.audit(); Debug.trace( distinctElements ); Debug.trace( distinctAudit.audit( list4 ) ); Debug.trace( distinctAudit.hasOwnElement( "t3" ) ); } public static void testDistinct_MegaMerge() throws Exception { Collection list1 = List.of( "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t8", "t9" ); //List list2 = Arrays.asList( "t9", "t8", "t6", "t5", "t0", "t2" ); Collection list2 = List.of( "t0", "t1", "t2", "t3", "t6", "t5" ); List list3 = Arrays.asList( "t1", "t2", "t3", "t7", "t7" ); List list4 = Arrays.asList( "t8", "t2", "t3", "t9" ); MegaMergeDistinctAudit distinctAudit = new MegaMergeDistinctAudit<>( list1.iterator(), list2.iterator(), 2 ); Collection distinctElements = distinctAudit.audit(); Debug.trace( distinctElements ); //Debug.trace( distinctAudit.audit( list4 ) ); //Debug.trace( distinctAudit.hasOwnElement( "t3" ) ); } public static void testDistinct_dir() throws Exception { Path desk = Path.of( "C:/Users/undefined/Desktop/wolfmc" ); FileNamePathIterator iterator1 = new FileNamePathIterator( desk, false ); FileNamePathIterator iterator2 = new FileNamePathIterator( Path.of( "E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Pinecones/Hydra/src/main/java/com/pinecone/hydra/umc/wolfmc" ), false ); // List fn1 = new ArrayList<>(); // while ( iterator1.hasNext() ) { // fn1.add( iterator1.next().toString() ); // } // // List fn2 = new ArrayList<>(); // while ( iterator2.hasNext() ) { // fn2.add( iterator2.next().toString() ); // } // // Set s1 = new HashSet<>( fn2 ); // List uni = new ArrayList<>(); // for( String s : fn1 ) { // if( !s1.contains( s ) ) { // uni.add( s ); // } // } // GenericDistinctAudit distinctAudit = new GenericDistinctAudit<>( // List.of( fn1, fn2 ), DistinctType.SymmetricDistinct, new ArrayList<>() // ); // GenericPrototypeDistinctAudit distinctAudit = new GenericPrototypeDistinctAudit<>( // fn2.iterator(), List.of( fn1 ), DistinctType.SymmetricDistinct, new ArrayList<>() // ); // // Collection c = distinctAudit.audit(); // //c = uni; // Debug.trace( c.size() ); // for( String p : c ) { // Debug.trace( desk.resolve( p ).toString() ); // // //Files.copy( p.toAbsolutePath(), Path.of( "C:/Users/undefined/Desktop/welsir" ), StandardCopyOption.REPLACE_EXISTING ); // } GenericPrototypeDistinctAudit distinctAudit2 = new GenericPrototypeDistinctAudit<>( iterator2, List.of( iterator1 ), new ArrayList<>(), DistinctType.SymmetricDistinct ); Collection cp = distinctAudit2.audit(); Debug.trace( cp.size() ); for( Path p : cp ) { Debug.trace( desk.resolve( p ).toString() ); Files.copy( desk.resolve( p ), Path.of( "C:/Users/undefined/Desktop/welsir/" ).resolve( p ), StandardCopyOption.REPLACE_EXISTING ); } } public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ //TestFileIteratorAndDistinct.testFileIterator(); //TestFileIteratorAndDistinct.testDistinct_Simple(); //TestFileIteratorAndDistinct.testDistinct_Master(); //TestFileIteratorAndDistinct.testDistinct_Tiny(); //TestFileIteratorAndDistinct.testDistinct_TinyMaster(); //TestFileIteratorAndDistinct.testDistinct_MegaMerge(); TestFileIteratorAndDistinct.testDistinct_dir(); return 0; }, (Object[]) args ); } } ================================================ FILE: Pinecones/Pinecone/src/test/java/com/unit/TestMultiValueMap.java ================================================ package com.unit; import com.pinecone.Pinecone; import com.pinecone.framework.unit.LinkedMultiValueMap; import com.pinecone.framework.util.Debug; public class TestMultiValueMap { public static void testBasic() { LinkedMultiValueMap multiValueMap = new LinkedMultiValueMap<>(); multiValueMap.add( 1, "fuck1" ); multiValueMap.add( 2, "fuck2" ); multiValueMap.add( 1, "fuck1_1" ); Debug.trace( multiValueMap ); } public static void main( String[] args ) throws Exception { //String szJson = FileUtils.readAll("J:/120KWordsPhonetics.json5"); Pinecone.init( (Object...cfg )->{ TestMultiValueMap.testBasic(); return 0; }, (Object[]) args ); } } ================================================ FILE: Pinecones/Pinecone/src/test/java/com/unit/TestUnits.java ================================================ package com.unit; import com.pinecone.Pinecone; import com.pinecone.framework.unit.*; import com.pinecone.framework.unit.TreeMap; import com.pinecone.framework.unit.tabulate.*; import com.pinecone.framework.unit.trie.TrieMap; import com.pinecone.framework.unit.trie.TrieNode; import com.pinecone.framework.unit.trie.UniTrieMaptron; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JSONMaptron; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @SuppressWarnings( "unchecked" ) public class TestUnits { public static void testUniScopeMap() { UniScopeMaptron map2 = new UniScopeMaptron(new JSONMaptron("{c1:'kc1', c2:'kc2', c3:'kc3'}")); UniScopeMaptron map1 = new UniScopeMaptron(new JSONMaptron("{c1:'kb1', b2:'kb2', b3:'kb3'}"), map2); UniScopeMaptron map = new UniScopeMaptron(true, map1); //map.put( "ka1", "a1" ); for ( Object o: map.scopeEntrySet() ) { Map.Entry kv = (Map.Entry) o; Debug.trace( kv.getKey(), kv.getValue() ); } Debug.trace( map.get("c1") ); Debug.trace( map.get("b2") ); Debug.trace( map.get("d3") ); LinkedHashMap ls = new LinkedHashMap(); ls.put( "g1", "gg" ); map.elevate( ls ); LinkedHashMap linkedHashMap = new LinkedHashMap(); map.overrideTo( linkedHashMap ); Debug.trace( linkedHashMap, map.isEmpty(), map.isScopeEmpty(), map.ancestors(), map.hasOwnProperty("g1"), map.hasOwnProperty("kc2") ); } public static void testMultiScopeMap() { MultiScopeMaptron map4_0 = new MultiScopeMaptron(new JSONMaptron("{e1:'ke1', e2:'ke2'}")); MultiScopeMaptron map3_0 = new MultiScopeMaptron(new JSONMaptron("{d1:'kd1', c1:'kd1'}")); map3_0.addParent( new MultiScopeMaptron() ); map3_0.addParent( ( new MultiScopeMaptron() ).addParent( map4_0 ) ); MultiScopeMaptron map2_0 = new MultiScopeMaptron(new JSONMaptron("{c1:'kc1', c2:'kc2', c3:'kc3'}"), null, "jesus"); MultiScopeMaptron map2_1 = new MultiScopeMaptron(new JSONMaptron("{c11:'kc11'}")); map2_1.addParent(map3_0); MultiScopeMaptron map1 = new MultiScopeMaptron(new JSONMaptron("{c1:'kb1', b2:'kb2', b3:'kb3'}")); map1.addParent( map2_0 ).addParent( map2_1 ); MultiScopeMaptron map = new MultiScopeMaptron(true, null); map.addParent( map1 ); map.put( "fuck", "me" ); map.put( "fuck2", "this" ); //map.put( "ka1", "a1" ); Debug.trace( map.scopes() ); for ( Object o: map.scopeEntrySet() ) { Map.Entry kv = (Map.Entry) o; Debug.trace( kv.getKey(), kv.getValue() ); } Debug.trace( map.getAll( "c1" ), map.query( "c1","jesus" ) ); Debug.trace( map.get("c1") ); Debug.trace( map.get("b2") ); Debug.trace( map.get("d3") ); LinkedHashMap ls = new LinkedHashMap(); ls.put( "g1", "gg" ); map.elevate( ls ); LinkedHashMap linkedHashMap = new LinkedHashMap(); map.overrideTo( linkedHashMap ); Debug.trace( linkedHashMap, map.isEmpty(), map.isScopeEmpty(), map.hasOwnProperty("g1"), map.hasOwnProperty("kc2") ); } public static void testPrecedeMultiMap() { MultiScopeMaptron map1_0 = new MultiScopeMaptron(new JSONMaptron("{p1:'kp1', p2:'kp2'}")); PrecedeMultiScopeMap p = new PrecedeMultiMaptron(); p.addParent( map1_0 ); p.put( "this1", "this1" ); p.put( "this" , "this is this" ); MultiScopeMaptron mapKeyWord = new MultiScopeMaptron(new JSONMaptron("{this:'this is keyword', super:'super is keyword'}")); p.setPrecedeScope( mapKeyWord ); Debug.trace( p, p.get( "p1" ), p.get( "this" ) ); } public static void testRecursiveEntryIterator() { Map map = new JSONMaptron( "{ k1:v1, k2:v2, k3: { k3_1:v3_1, k3_2:v3_2, li:[ 0,1,2,3, { lk1: vlk1, lk2:vlk2 } ] }, k3_4: v3_4 }" ); //Map map = new JSONMaptron( "{ k1:v1, li:[ 0, { lk1: vlk1, lk2:vlk2 } ] }, k3_4: v3_4 }" ); RecursiveFamilyIterator iterator = new RecursiveFamilyIterator<>( map, true ); //RecursiveEntryIterator iterator = new RecursiveEntryIterator( map, true ); TypedNamespaceFamilyEntryNameEncoder encoder = new TypedNamespaceFamilyEntryNameEncoder(); while ( iterator.hasNext() ) { UnitFamilyNode node = iterator.next(); //Debug.trace( node, node.parent(), node.namespacify( true ) ); Debug.trace( node, node.parent(), encoder.encodeNS( node, true ).getFullName(), node.namespacify( true ) ); // if( node.parent() != null ) { // Debug.trace( "K", node.parent().parent() ); // } } // while ( iterator.hasNext() ) { // Map.Entry node = iterator.next(); // Debug.trace( node ); // } iterator = new RecursiveFamilyIterator<>( map, true ); GenericCollectedEntryEncoder entryEncoder = new GenericCollectedEntryEncoder( iterator ); Collection collection = entryEncoder.encode(); Debug.trace( collection ); Debug.trace( map ); GenericCollectedEntryDecoder decoder = new GenericCollectedEntryDecoder<>(); Map decoded = decoder.decode(collection); Debug.trace(decoded); iterator = new RecursiveFamilyIterator<>( map, true ); entryEncoder = new GenericCollectedEntryEncoder( iterator ); Map map1 = entryEncoder.regress(); Debug.trace( map1 ); decoded = decoder.evolve( map1 ); Debug.trace(decoded); } public static void testMergeSharedList(){ List list1 = new ArrayList<>(List.of("a", "b", "c","d","e")); List list2 = new ArrayList<>(List.of("X", "D", "F","X","Y")); System.out.println("list1: "+list1); System.out.println("list2: "+list2); SharedList mergeList = SharedList.SharedListBuilder.merge(list1, list2); System.out.println("merge list1 and list2 : "+ mergeList); SharedList slice = SharedList.SharedListBuilder.slice(2, 6, mergeList); System.out.println("slice mergeList from 2 to 6 : "+ slice); SharedList merge2List = SharedList.SharedListBuilder.merge(list1, list2, slice); System.out.println("merge list1 and list2 and slice : "+ merge2List); System.out.println("merge2 get index 1: "+ merge2List.get(1)); SharedList subList = merge2List.subList(4, 5); System.out.println("merge2 subList from 4 to 5 :"+ subList); subList.set(1, "hello"); System.out.println("sublist after set sublist index 1 to hello: "+subList); System.out.println("merge2 after set sublist index 1 to hello: "+merge2List); } public static void testTrieMap() { UniTrieMaptron trieMap = new UniTrieMaptron<>(); trieMap.put("a1/b1/c1", "T1"); trieMap.put("a2/b2/c2", "T2"); trieMap.put("a3/b3/c3", "T3"); trieMap.put("a3/b4/c4", "T4"); trieMap.put("a4/b5/c5", "T5"); trieMap.put("a1/b1/c2", "T6"); trieMap.put("a1/b1/c3", "T7"); trieMap.makeSymbolic( "a1/b1/rc5", "a3/b3/c3" ); TrieNode node = trieMap.queryNode("a1/b1"); //node.put("c4","T8",trieMap); Debug.trace(trieMap.get("a1/b1/rc5")); //trieMap.makeSymbolic( "a1/b1/rc2", "a3" ); trieMap.makeSymbolic( "a1/b1/rc2", "a1/b1/rc5" ); TrieNode rc6 = trieMap.queryNode( "a1/b1/rc2" ); Debug.trace( rc6.evinceReparse().reparse() ); Debug.trace( trieMap.queryNode( "a1" ).getFullName() ); Debug.trace( trieMap, trieMap.size() ); trieMap.put("a3/b4", "RRR"); Debug.greenf( trieMap, trieMap.size() ); trieMap.remove( "a3" ); Debug.trace( trieMap, trieMap.size() ); //trieMap.remove( "a1/b1" ); Debug.trace( trieMap.keySet() ); Debug.trace( trieMap.values() ); TrieMap clone = trieMap.clone(); Debug.trace(clone,clone.size()); Debug.trace(clone.keySet()); Debug.trace(clone.values()); // trieMap.put("a1/b1", "TCC"); // Debug.trace( trieMap.get("a1/b1") ); } public static void testConcurrentTrie() { UniTrieMaptron trieMap = new UniTrieMaptron<>( ConcurrentHashMap::new ); //UniTrieMaptron trieMap = new UniTrieMaptron<>( TreeMap::new ); trieMap.put("a1/b1/c1", "T1"); trieMap.put("a2/b2/c2", "T2"); trieMap.put("a3/b3/c3", "T3"); trieMap.put("a3/b4/c4", "T4"); trieMap.put("a4/b5/c5", "T5"); trieMap.put("a1/b1/c2", "T6"); trieMap.put("a1/b1/c3", "T7"); var s = trieMap.root(); Debug.greenfs( trieMap ); int numberOfThreads = 10; ExecutorService executorService = Executors.newFixedThreadPool(numberOfThreads); for (int i = 0; i < numberOfThreads; i++) { executorService.execute(new PathInserter(trieMap)); } executorService.shutdown(); while (!executorService.isTerminated()) { } Debug.trace( trieMap.size() ); } private static final String CHARACTERS = "abcdefghijklmnopqrstuvwxyz"; private static final Random random = new Random(); private static String generateRandomPath() { int segmentCount = 3 + random.nextInt(3); // Generate between 3 to 5 segments StringBuilder path = new StringBuilder(); for (int i = 0; i < segmentCount; i++) { if (i > 0) { path.append('/'); } path.append(generateRandomSegment()); } return path.toString(); } private static String generateRandomSegment() { //int length = 2 + random.nextInt(3); // Generate segment length between 2 to 4 int length = 1; StringBuilder segment = new StringBuilder(length); for (int i = 0; i < length; i++) { segment.append(CHARACTERS.charAt(random.nextInt(CHARACTERS.length()))); } return segment.toString(); } private static class PathInserter implements Runnable { private final UniTrieMaptron trieMap; public PathInserter(UniTrieMaptron trieMap) { this.trieMap = trieMap; } @Override public void run() { for (int i = 0; i < 10000; i++) { String path = generateRandomPath(); String value = "Value_" + random.nextInt(1000); System.out.printf("Inserting path: %s with value: %s%n", path, value); try{ trieMap.put(path, value); } catch ( IllegalArgumentException e ) { e.printStackTrace(); } } } } private static void testBitSet64(){ long i = 0b1110101110101110101010000010001111011000001010111011101110111010L ; long k = 0 ; for ( int j = 0; j < 64; ++j ) { Debug.redf(BitSet64.toBinaryStringLSB(i)); Debug.greenfs(BitSet64.toBinaryStringLSB(BitSet64.reverse(i,0,j))); Debug.hhf(); } } public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ //TestUnits.testUniScopeMap(); //TestUnits.testMultiScopeMap(); //TestUnits.testPrecedeMultiMap(); //TestUnits.testRecursiveEntryIterator(); //TestUnits.testMergeSharedList(); //TestUnits.testTrieMap(); //TestUnits.testConcurrentTrie(); TestUnits.testBitSet64(); return 0; }, (Object[]) args ); } } ================================================ FILE: Pinecones/Pinecone/src/test/java/com/util/TestCompactTimestamp.java ================================================ package com.util; import com.pinecone.Pinecone; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.datetime.compact.CompactTimeUnit32; import com.pinecone.framework.util.datetime.compact.CompactTimestamp32; public class TestCompactTimestamp { public static void testTimestamp32() { int encodedMs = CompactTimestamp32.encode( 123, CompactTimeUnit32.MILLISECONDS ); int encodedSec = CompactTimestamp32.encode( 60, CompactTimeUnit32.SECONDS ); int encodedMin = CompactTimestamp32.encode( 30, CompactTimeUnit32.MINUTES ); int encodedHour = CompactTimestamp32.encode( 12, CompactTimeUnit32.HOURS ); int encodedDay = CompactTimestamp32.encode( 1, CompactTimeUnit32.DAYS ); int encodedInf = CompactTimestamp32.INFINITE; Debug.trace( "Milliseconds: " + CompactTimestamp32.toMilliseconds(encodedMs)); Debug.trace( "Seconds: " + CompactTimestamp32.toMilliseconds(encodedSec)); Debug.trace( "Minutes: " + CompactTimestamp32.toMilliseconds(encodedMin)); Debug.trace( "Hours: " + CompactTimestamp32.toMilliseconds(encodedHour)); Debug.trace( "Days: " + CompactTimestamp32.toMilliseconds(encodedDay)); Debug.trace( "Infinite: " + CompactTimestamp32.toMilliseconds(encodedInf)); } public static void testTimestamp32_norm() { long[] testValues = { 123, 60_000, 3_600_000, 86_400_000, 500_000_000_000L }; for ( long millis : testValues ) { int encoded = CompactTimestamp32.fromMilliseconds( millis ); System.out.printf( "Millis: %d -> Encoded: %s -> Normalized: %d ms\n", millis, CompactTimestamp32.format( encoded ), CompactTimestamp32.toMilliseconds( encoded ) ); } } public static void main( String[] args ) throws Exception { //String szJson = FileUtils.readAll("J:/120KWordsPhonetics.json5"); Pinecone.init( (Object...cfg )->{ TestCompactTimestamp.testTimestamp32(); TestCompactTimestamp.testTimestamp32_norm(); return 0; }, (Object[]) args ); } } ================================================ FILE: Pinecones/Pinecone/src/test/java/com/util/TestDateTime.java ================================================ package com.util; import com.pinecone.Pinecone; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.datetime.GenericMultiFormDateTimeAudit; import java.time.LocalDateTime; public class TestDateTime { public static void testAudit() { GenericMultiFormDateTimeAudit audit = new GenericMultiFormDateTimeAudit(); LocalDateTime currentTime = LocalDateTime.of(2024, 6, 24, 1, 2, 3); Debug.trace(1,audit.matches("2024-06-24T01:02:03", currentTime)); // true Debug.trace(2,audit.matches("2024-06-24 1:02:03", currentTime)); // true Debug.trace(3,audit.matches("2024-06-24 1:2:03", currentTime)); // true Debug.trace(4,audit.matches("2024-06-24 1:2:3", currentTime)); // true Debug.trace(5,audit.matches("2024-06-24 01:2:3", currentTime)); // true Debug.trace(6,audit.matches("2024-06-24 01:02:3", currentTime)); // true Debug.trace(7,audit.matches("2024-06-24 01:2:03", currentTime)); // true Debug.trace(-7,audit.matches("2024-06-24 01:2:02", currentTime)); Debug.trace(8,audit.matches("2024-06-24", currentTime)); // true Debug.trace(9,audit.matches("2024-6-24", currentTime)); // true Debug.trace(10,audit.matches("1:02:03", currentTime)); // true Debug.trace(11,audit.matches("1:2:03", currentTime)); // true Debug.trace(12,audit.matches("1:2:3", currentTime)); // true Debug.trace(13,audit.matches("01:2:3", currentTime)); // true Debug.trace(14,audit.matches("01:02:3", currentTime)); // true Debug.trace(15,audit.matches("01:2:03", currentTime)); // true Debug.trace(16,audit.matches("?", currentTime)); // true Debug.trace(17,audit.matches("2024-06-24 01:02:??", currentTime)); // true Debug.trace(18,audit.matches("2024-06-24 01:??:??", currentTime)); // true Debug.trace(19,audit.matches("2024-06-24 ??:??:??", currentTime)); // true Debug.trace(20,audit.matches("2024-06-?? ??:??:??", currentTime)); // true Debug.trace(21,audit.matches("2024-??-?? ??:??:??", currentTime)); // true Debug.trace(22,audit.matches("????-??-?? ??:??:??", currentTime)); // true Debug.trace(23,audit.matches("2024-06-24 01:??:03", currentTime)); // true Debug.trace(24,audit.matches("2024-??-24 01:??:03", currentTime)); // true Debug.trace(25,audit.matches("????-??-24 01:??:03", currentTime)); // true Debug.trace(-25,audit.matches("????-??-24 13:??:03", currentTime)); Debug.trace(26,audit.matches("????-??-24 01:??:03", currentTime)); // true Debug.trace(27,audit.matches("01:2", currentTime)); // true Debug.trace(-27,audit.matches("01:3", currentTime)); Debug.trace(28,audit.matches("2024-06-24 01:2", currentTime)); // true Debug.trace(-28,audit.matches("2024-06-24 01:03", currentTime)); Debug.trace(29,audit.matches("2024-06", currentTime)); // true Debug.trace(-29,audit.matches("2024-07", currentTime)); Debug.trace(30,audit.matches("01:?", currentTime)); // true Debug.trace(-30,audit.matches("02:?", currentTime)); Debug.trace(31,audit.matches("2024-06/24 01:2", currentTime)); // true Debug.trace(-31,audit.matches("2024/06/24 01:03", currentTime)); Debug.trace(32,audit.matches("2024.06.24 01:2", currentTime)); // true Debug.trace(-32,audit.matches("2024.06.24 01:03", currentTime)); Debug.trace(33,audit.matches("2024.06", currentTime)); // true Debug.trace(-33,audit.matches("2024.07", currentTime)); Debug.trace(34,audit.matches("2024.06.24", currentTime)); // true Debug.trace(-34,audit.matches("2024.06.25", currentTime)); } public static void testAuditAccuracy() { GenericMultiFormDateTimeAudit audit = new GenericMultiFormDateTimeAudit(); LocalDateTime currentTime = LocalDateTime.of(2024, 6, 24, 1, 2, 3); Debug.trace(1,audit.betweenSec("2024-06-24 01:??:13", currentTime, 10 )); Debug.trace(2,audit.betweenSec("2024-06-24 01:01:58", currentTime, 10 )); Debug.trace(3,audit.betweenSec("2024-06-?? 01:01:58", currentTime, 10 )); Debug.trace(-3,audit.betweenSec("2024-06-?? 01:02:58", currentTime, 10 )); Debug.trace(-3,audit.betweenSec("2024-06-?? 01:02:14", currentTime, 10 )); Debug.trace(4,audit.betweenMin("2024-06-?? 01:01:58", currentTime, 1 )); Debug.trace(-4,audit.betweenMin("2024-06-?? 01:03:52", currentTime, 1 )); Debug.trace(5,audit.betweenMin("2024-06-?? 01:03:02", currentTime, 1 )); Debug.trace(6,audit.betweenMin("2024-06-?? 01:03:03", currentTime, 1 )); Debug.trace(-6,audit.betweenMin("2024-06-?? 01:03:04", currentTime, 1 )); // // Debug.trace(7,audit.between("2024-06-?? 01:02:04", currentTime, 1000 )); // Debug.trace(-7,audit.between("2024-06-?? 01:02:05", currentTime, 1000 )); Debug.trace(8,audit.between("2024-06-?? 01:02:02.950", currentTime, 100 )); Debug.trace(-8,audit.between("2024-06-?? 01:02:02.850", currentTime, 100 )); Debug.trace(9,audit.betweenSec("??:2", currentTime, 10 )); Debug.trace(-9,audit.betweenSec("??:3", currentTime, 10 )); } public static void main( String[] args ) throws Exception { //String szJson = FileUtils.readAll("J:/120KWordsPhonetics.json5"); Pinecone.init( (Object...cfg )->{ //TestDateTime.testAudit(); TestDateTime.testAuditAccuracy(); return 0; }, (Object[]) args ); } } ================================================ FILE: Pinecones/Pinecone/src/test/java/com/util/TestJSONConfig.java ================================================ package com.util; import com.pinecone.Pinecone; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.framework.util.json.*; import java.nio.file.Path; public class TestJSONConfig { public static void test_JC1( ) { JSONConfig json = new JSONConfig( new JSONMaptron( "{ self:selfV, num:1234, sch:{ n: name, ssch: { n:n1 } } }" ) ); json.addGlobalScope( new JSONMaptron("{ satan: 'Satan', jesus: 'Jesus', obj:{ k=sss, f=sxf } }") ); json.addGlobalScope( new JSONMaptron("{ f1: 'Satan', f2: 'Jesus', f3:{ k=fsss, f=s13xf } }") ); Debug.trace( json.optJSONObject( "f3" ), json.opt( "num" ), json.optJSONObject( "sch" ) ); Debug.trace( json ); JSONConfig sch = json.getChild( "sch" ); Debug.trace( sch, sch.optJSONObject( "f3" ), sch.opt( "satan" ), sch.optJSONObject( "ssch" ) ); JSONConfig ssch = json.getChild( "ssch" ); Debug.trace( ssch, ssch.opt( "f2" ), sch.opt( "obj" ), sch.opt( "n" ) ); } public static void test_JC( ) { JPlusContext context = new JPlusContext(); context.addParentPath(Path.of("E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Pinecone/src/test/java")); context.setOverriddenAffinity( true ); context.addGlobalScope( new JSONMaptron("{ satan: 'Satan', jesus: 'Jesus' }") ); //context.addGlobalScope( new JSONMaptron("{ this: { key:'TakeOver' } }") ); JSONObject obj = (JSONObject) JPlus.parse( " { ro = 'root', next = { p = 'parent', po1: { kp:true, int = 9 }, pa:[9,9.01,6]," + "next : { #extends super.po1, int = 7, str: &this.int, end:xxxx, obj:{a:1, h:&this.a}, obj2:{/*#extends super.obj*/ h:&super.obj} ,inc:#include \"./com/util/inc.jplus\" /**/ }, " + "arr:[ #extends 'super.pa', 1, &'this[1]', null, 'fuck' ]/**/ } }", context ) ; Debug.echo( obj.toJSONStringI(4) ); } public static void test_Dictionary( ) { JSONObject object = new JSONMaptron( "{ satan: 'Satan', jesus: 'Jesus' }" ); JSONDictium dictium = object; for ( Object o : dictium.entrySet() ) { Debug.trace( o.toString() ); } JSONArray array = new JSONArraytron( "[0,1,2,3,4,5,6,7,8,9]" ); dictium = array; for ( Object o : dictium.entrySet() ) { Debug.trace( o ); } Debug.trace( dictium.optInt( "31s" ) ); } public static void main( String[] args ) throws Exception { //String szJson = FileUtils.readAll("J:/120KWordsPhonetics.json5"); Pinecone.init( (Object...cfg )->{ //TestUnits.testUniScopeMap(); //TestJSONConfig.test_JC1(); //TestJSONConfig.test_Dictionary(); //Debug.trace( ( new URI( "/ssss" ) ) ); return 0; }, (Object[]) args ); } } ================================================ FILE: Pinecones/Pinecone/src/test/java/com/util/TestNamespace.java ================================================ package com.util; import com.pinecone.Pinecone; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.name.*; import java.util.List; public class TestNamespace { public static void testNS() throws Exception { GenericNamespaceParser parser = new GenericNamespaceParser(); Namespace parsedNamespace = parser.parse( "x1::x2/x3\\x4->x5.x6.x7->x8.x9.x10.x11::x12.x13.x14", List.of( "::", ".", "->", "\\", "/" ) ); Debug.trace( parsedNamespace.getFullName() ); parsedNamespace = parser.parse( "x1::x2/x3\\x4->x5.x6.x7->x8.x9.x10.x11::x12.x13.x14", "::|\\.|->|\\\\|/" ); Debug.trace( parsedNamespace.getFullName(), parsedNamespace.getSimpleName(), parsedNamespace ); Namespace namespace = new UniNamespace( "Jesus", new UniNamespace( "this" ) ); Debug.trace( namespace.getFullName(), namespace.parent().getSimpleName(), parsedNamespace.root() ); } public static void testMultiNS() throws Exception { MultiNamespace root = new GenericMultiNamespace( "root" ); MultiNamespace namespace = new GenericMultiNamespace( "x2" ); namespace.addParent( new GenericMultiNamespace( "x1_0", root ) ); namespace.addParent( new GenericMultiNamespace( "x1_1", root ) ); Debug.trace( namespace.getFullNames(), namespace.hasOwnParentNS( "x1_0" ) ); Debug.trace( namespace.hasOwnParent( new GenericMultiNamespace( "x1_0", root ) ) ); // root.x1_0 Debug.trace( namespace.hasOwnParent( new GenericMultiNamespace( "x1_0" ) ) ); // x1_0 Debug.trace( namespace.getParentByNS( "x1_0" ).getFullName(), namespace.getParents(), namespace.getDomain() ); GenericNamespaceParser parser = new GenericNamespaceParser( GenericMultiNamespace.class ); Namespace parsedNamespace = parser.parse( "x1::x2/x3\\x4->x5.x6.x7->x8.x9.x10.x11::x12.x13.x14", List.of( "::", ".", "->", "\\", "/" ) ); Debug.trace( parsedNamespace.getFullName(), parsedNamespace.getDomain() ); } public static void main( String[] args ) throws Exception { //String szJson = FileUtils.readAll("J:/120KWordsPhonetics.json5"); Pinecone.init( (Object...cfg )->{ TestNamespace.testNS(); //TestNamespace.testMultiNS(); return 0; }, (Object[]) args ); } } ================================================ FILE: Pinecones/Pinecone/src/test/java/com/util/TestParser.java ================================================ package com.util; import com.pinecone.Pinecone; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.config.GenericStartupCommandParser; import java.util.Map; public class TestParser { public static void testGenericStratupCommandParser() throws Exception{ GenericStartupCommandParser parser = new GenericStartupCommandParser(); Map result = parser.parse(new String[]{"--key1=val1,val2", "-key2:val3;val4", "/key3=val5|val6", "--key4=1234"}); for ( Map.Entry entry : result.entrySet() ) { Debug.trace( entry.getKey(), (Object) entry.getValue() ); } } public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ TestParser.testGenericStratupCommandParser(); return 0; }, (Object[]) args ); } } ================================================ FILE: Pinecones/Pinecone/src/test/java/com/util/TestRRWSLock.java ================================================ package com.util; import java.util.Map; import java.util.TreeMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import com.pinecone.Pinecone; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.lock.ReentrantReadWriteSpinLock; import com.pinecone.framework.util.lock.ReentrantSpinLock; import com.pinecone.framework.util.lock.SpinLock; public class TestRRWSLock { private static final ReentrantReadWriteSpinLock lock = new ReentrantReadWriteSpinLock(); //private static final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); //private static final ReentrantLock cl = new ReentrantLock(); //private static final SpinLock cl = new SpinLock(); private static final ReentrantSpinLock cl = new ReentrantSpinLock(); private static void readOperation(int threadId) { lock.readLock().lock(); try { Debug.trace( "Thread " + threadId + " is reading..." ); try { Thread.sleep(100); } catch ( InterruptedException e ) { e.printStackTrace(); } } finally { lock.readLock().unlock(); Debug.trace( "Thread " + threadId + " finished reading." ); } } private static void writeOperation(int threadId) { lock.writeLock().lock(); try { Debug.trace( "Thread " + threadId + " is writing..." ); try { Thread.sleep(200); } catch (InterruptedException e) { e.printStackTrace(); } } finally { lock.writeLock().unlock(); Debug.trace( "Thread " + threadId + " finished writing." ); } } private static void testReentrancy() { lock.writeLock().lock(); try { Debug.trace( "Main thread started writing." ); lock.writeLock().lock(); try { Debug.trace( "Main thread re-entered writing." ); } finally { lock.writeLock().unlock(); } } finally { lock.writeLock().unlock(); Debug.trace( "Main thread finished writing." ); } } private static void testSimple() { ExecutorService executorService = Executors.newFixedThreadPool( 4 ); for ( int i = 1; i <= 3; ++i ) { final int threadId = i; executorService.submit(() -> readOperation(threadId)); } for ( int i = 1; i <= 3; ++i ) { final int threadId = i; executorService.submit(() -> writeOperation(threadId)); } executorService.submit(() -> testReentrancy()); executorService.shutdown(); } private static final Map map = new TreeMap<>(); private static int cnt = 0; private static void treeReadOperation() { for ( int i = 0; i < 1e6; i++ ) { lock.readLock().lock(); //cl.lock(); try { //Debug.trace( map.get(i) ); map.get(i); } finally { //cl.unlock(); lock.readLock().unlock(); } } ++cnt; } private static void treeWriteOperation() { for ( int i = 0; i < 1e6; i++ ) { lock.writeLock().lock(); //cl.lock(); try { map.put(i, i); } finally { //cl.unlock(); lock.writeLock().unlock(); } } ++cnt; } private static void testUnit() { Thread rt = new Thread( TestRRWSLock::treeReadOperation ); Thread wt = new Thread( TestRRWSLock::treeWriteOperation ); rt.start(); wt.start(); while ( cnt < 2 ) { Debug.sleep(1); } } public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ TestRRWSLock.testUnit(); return 0; }, (Object[]) args ); } } ================================================ FILE: Pinecones/Pinecone/src/test/java/com/util/TestTemplate.java ================================================ package com.util; import com.pinecone.Pinecone; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JPlus; import com.pinecone.framework.util.json.JPlusContext; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.template.TemplateParser; import java.nio.file.Path; public class TestTemplate { public static void test_UTL( ) { TemplateParser templateParser = new TemplateParser( "122 ${arr[1].f['k']} ${ key[ key['g'].c ] } sdd", ( new JSONMaptron( "{ g1:'k', key:{ g:{ c:'k' },k:'1xxxx2' }, arr:[1,{f:{k:'sss'}},3] }" ) ).getMap() ); Debug.trace( templateParser.eval() ); } public static void test_JPlus( ) { JPlusContext context = new JPlusContext(); context.addParentPath(Path.of("E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Pinecone/src/test/java")); context.setOverriddenAffinity( true ); context.addGlobalScope( new JSONMaptron("{ satan: 'Satan', jesus: 'Jesus' }") ); //context.addGlobalScope( new JSONMaptron("{ this: { key:'TakeOver' } }") ); JSONObject obj = (JSONObject) JPlus.parse( " { ro = 'root', next = { p = 'parent', po1: { kp:true, int = 9 }, pa:[9,9.01,6]," + "next : { #extends super.po1, int = 7, str: &this.int, end:xxxx, obj:{a:1, h:&this.a}, obj2:{/*#extends super.obj*/ h:&super.obj} ,inc:#include \"./com/util/inc.jplus\" /**/ }, " + "arr:[ #extends 'super.pa', 1, &'this[1]', null, 'fuck' ]/**/ } }", context ) ; Debug.echo( obj.toJSONStringI(4) ); } public static void main( String[] args ) throws Exception { //String szJson = FileUtils.readAll("J:/120KWordsPhonetics.json5"); Pinecone.init( (Object...cfg )->{ //TestUnits.testUniScopeMap(); //TestTemplate.test_UTL(); TestTemplate.test_JPlus(); return 0; }, (Object[]) args ); } } ================================================ FILE: Pinecones/Pinecone/src/test/java/com/util/inc.jplus ================================================ { #extends 'super.obj', so : &super.obj, parentScope:{ keykey: "satan", }, "key": 'self->key', "utl": #"this->key: '${this.key}' ${ __scope__[ parentScope.keykey ] } fucks ${jesus} and ${key} so super harder | ${__root__}", } ================================================ FILE: Pinecones/Pinecone/src/test/java/com/util/json/Parasite.java ================================================ package com.util.json; import com.pinecone.framework.util.json.homotype.DirectJSONInjector; public class Parasite { public String name ; public long length; public int emnus; public Parasite() { } public String getName() { return this.name; } public long getLength() { return this.length; } public void setName( String name ) { this.name = name; } public void setLength( long length ) { this.length = length; } public String toJSONString() { return DirectJSONInjector.instance().inject( this ).toString(); } public String toString(){ return DirectJSONInjector.instance().inject( this ).toString(); } } ================================================ FILE: Pinecones/Pinecone/src/test/java/com/util/json/Slave.java ================================================ package com.util.json; import java.util.List; import java.util.Map; import com.pinecone.framework.util.json.homotype.StructJSONEncoder; public class Slave { public String name ; public long length; public int emnus; public Parasite parasite; public Map atts; public Object[] li; //public Slave child; public List children; //public Slave[] children2; //public Object[] children; //public Map[] children; //public List children; //public List children; //public List children; //public List children; public Map ms; public Slave() { } public String getName() { return this.name; } public long getLength() { return this.length; } public void setName( String name ) { this.name = name; } public void setLength( long length ) { this.length = length; } public void setParasite2( Parasite parasite ) { this.parasite = parasite; } // public void setChildren( List slaves ) { // this.children = slaves; // } // public List getChildren() { // return this.children; // } public String toJSONString() { return StructJSONEncoder.BasicEncoder.encode( this ); } public String toString(){ return StructJSONEncoder.BasicEncoder.encode( this ); } } ================================================ FILE: Pinecones/Pinecone/src/test/java/com/util/json/TestJSON.java ================================================ package com.util.json; import java.util.List; import com.pinecone.Pinecone; import com.pinecone.framework.system.prototype.ObjectiveBean; import com.pinecone.framework.system.prototype.ObjectiveClass; import com.pinecone.framework.system.prototype.ObjectiveEvaluator; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.*; import com.pinecone.framework.util.json.homotype.*; class Dick { @JSONGet( "name" ) public String mName ; @MapStructure public long length; public int emnus; public Dick() { } public Dick( long length ) { this.mName = "dick"; this.length = length; } public String toJSONString() { return AnnotatedJSONInjector.instance().inject( this ).toString(); } public String toString(){ return AnnotatedJSONInjector.instance().inject( this ).toString(); } } class Shit{ @JSONGet public String mName ; public int length; @JSONGet Dick[] array; //public JSONArray array; @JSONGet public Dick dick = new Dick(); public Shit( ){ } // public Object getName() { // return this.mName; // } public Object test( int i, Integer c, String sz ) { return sz + i + c; } public Object trial( Object... arg ){ return arg; } /* public String toJSONString() { return DirectJSONInjector.instance().inject( this ).toString(); }*/ /* public String toString(){ return DirectJSONInjector.instance().inject( this ).toString(); }*/ } class Vagina { public String name ; public long length; public int emnus; public Vagina() { } public String getName() { return this.name; } public long getLength() { return this.length; } public void setName( String name ) { this.name = name; } public void setLength( long length ) { this.length = length; } public String toJSONString() { return DirectJSONInjector.instance().inject( this ).toString(); } public String toString(){ return DirectJSONInjector.instance().inject( this ).toString(); } } public class TestJSON { public static void testDirectlyInjector() { Shit shit = new Shit(); JSONObject jsonShit = new JSONMaptron("{ name:'shit', 'fuck':7, 'length': 1, 'array':[{name:'shit',length:1998}] }"); Debug.trace( jsonShit ); shit = (Shit) ( new DirectObjectInjector( true, Shit.class ) ).inject( jsonShit ); Debug.trace(shit); Debug.echo( JSON.marshal( shit ) ); } public static void testAnnotatedInjector() { // Dick dick = new Dick(); // JSONObject jsonShit = new JSONMaptron("{ name:'shit', 'length': 1, 'array':[{name:'shit',length:1998}] }"); // Debug.trace( jsonShit ); // dick = (Dick) ( new AnnotatedObjectInjector( Dick.class ) ).inject( jsonShit ); // Debug.trace( dick ); } public static void testObjectom() { Dick dick = new Dick(); JSONObject jsonShit = new JSONMaptron("{ name:'shit', 'length': 1, 'array':[{name:'shit',length:1998}] }"); Debug.trace( jsonShit ); dick = (Dick) ( new AnnotatedObjectInjector( Dick.class ) ).inject( jsonShit ); ObjectiveClass objectom = new ObjectiveClass( dick ); Debug.echo( objectom.toJSONString(), objectom.get( "length" ), objectom.get( "mName" ) ); } public static void testObjectiveBean() { Vagina vagina = new Vagina(); JSONObject jsonShit = new JSONMaptron("{ name:'shit', 'length': 1 }"); Debug.trace( jsonShit ); vagina = (Vagina) ( new DirectObjectInjector( Vagina.class ) ).inject( jsonShit ); // JSONObject o = new JSONMaptron( vagina ); // Debug.echo( o.toJSONString() ); ObjectiveBean bean = new ObjectiveBean( vagina ); Debug.echo( bean.toJSONString() ); bean.set( "name", "fuck" ); Debug.echo( bean.toJSONString() ); //bean.set( "key", "fuck" ); Debug.trace( bean.keys() ); Vagina na = new Vagina(); ObjectiveBean naBean = new ObjectiveBean( na ); for ( String key : bean.keys() ) { naBean.set( key, bean.get( key ) ); } Debug.trace( vagina, na ); Debug.trace( ObjectiveEvaluator.MapStructures.get( na, "name" ) ); ObjectiveEvaluator.MapStructures.set( na, "name", "test2" ); Debug.trace( ObjectiveEvaluator.MapStructures.get( na, "name" ) ); Debug.trace( ObjectiveEvaluator.MapStructures.get( na, "emnus" ) ); ObjectiveEvaluator.MapStructures.set( na, "emnus", 124 ); Debug.trace( ObjectiveEvaluator.MapStructures.get( na, "emnus" ) ); } public static void testStringfiy() { JSONMaptron jo = new JSONMaptron( "{ k1:v1, k2:v2, k3:{ k3_1: v3_1, k3_2:[ 0, 1, true, false, undefined, [[[[],[],[],{}]]], [{ k3_a_1: v3_a_1, k3_a_2: 3.1415926 }] ] } }" ); Debug.trace( jo ); Object[] arr = new Object[] { "v1", 1, 3.1415926, null, false, "v_end" }; Debug.trace( arr ); } public static void testMarshal() { Slave j = JSON.unmarshal( "{ /*name:Slave, length:1234, parasite:{ name: parasite, length:20241102 }, atts: { key:val }, li:[1,2,3, 'ssss'],*/" + "children: [{ name:Slave, length:1234, parasite:{ name: parasitec, length:20241117 } } ]," + "ms: { fi: { name:Slave, length:1234, parasite:{ name: parasitec, length:20241117 } } }" + " }", Slave.class ); Debug.fmp( 2, j ); List l = JSON.unmarshal( "['fuck', 'me']", new TypeReference<>() {} ); Debug.fmp( 2, l ); } public static void main( String[] args ) throws Exception { //String szJson = FileUtils.readAll("J:/120KWordsPhonetics.json5"); Pinecone.init( (Object...cfg )->{ //TestJSON.testDirectlyInjector(); //TestJSON.testAnnotatedInjector(); //TestJSON.testObjectom(); //TestJSON.testObjectiveBean(); //TestJSON.testStringfiy(); TestJSON.testMarshal(); return 0; }, (Object[]) args ); } } ================================================ FILE: Pinecones/Slime/pom.xml ================================================ pinecones com.pinecones 2.5.1 org.apache.maven.plugins maven-compiler-plugin 9 9 4.0.0 com.pinecone.slime slime 2.1.0 com.pinecone pinecone 2.5.1 compile ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/CacheConstants.java ================================================ package com.pinecone.slime.cache; public final class CacheConstants { public static final int DefaultCachePageLocalCapacity = 100; public static final int DefaultCachePageCapacity = 1000; public static final int DefaultCachePageMegaCapacity = 10000; } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/ArchConcurrentCountDictCache.java ================================================ package com.pinecone.slime.cache.query; import java.util.concurrent.atomic.AtomicLong; public abstract class ArchConcurrentCountDictCache implements UniformCountDictCache { protected AtomicLong mnMisses; protected AtomicLong mnAccesses; protected ArchConcurrentCountDictCache(){ this.mnMisses = new AtomicLong( 0 ); this.mnAccesses = new AtomicLong( 0 ); } protected void afterKeyVisited( Object key ) { this.recordAccess(); } protected abstract V missKey( Object key ) ; protected void recordMiss() { this.mnMisses.incrementAndGet(); } protected void recordAccess() { this.mnAccesses.incrementAndGet(); } @Override public long getMisses() { return this.mnMisses.get(); } @Override public long getAccesses() { return this.mnAccesses.get(); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/ArchCountDictCache.java ================================================ package com.pinecone.slime.cache.query; public abstract class ArchCountDictCache implements UniformCountDictCache { protected long mnMisses; protected long mnAccesses; protected ArchCountDictCache(){ this.mnMisses = 0; this.mnAccesses = 0; } protected void afterKeyVisited( Object key ) { this.recordAccess(); } protected abstract V missKey( Object key ) ; protected void recordMiss() { ++this.mnMisses; } protected void recordAccess() { ++this.mnAccesses; } @Override public long getMisses() { return this.mnMisses; } @Override public long getAccesses() { return this.mnAccesses; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/ArchLocalDictCachePage.java ================================================ package com.pinecone.slime.cache.query; import com.pinecone.framework.unit.Dictium; import java.util.Collection; import java.util.Set; public abstract class ArchLocalDictCachePage extends ArchCountDictCache implements LocalDictCachePage, IterableDictCachePage { private long mnId; private final int mnCapacity; private final Dictium mCache; protected ArchLocalDictCachePage( long id, int capacity, Dictium cache ) { super(); this.mnId = id; this.mnCapacity = capacity; this.mCache = cache; } @Override public long getId() { return this.mnId; } @Override public void setId( long id ) { this.mnId = id; } @Override public Dictium getDictium() { return this.mCache; } @Override public long capacity() { return this.mnCapacity; } @Override public long size() { return this.mCache.size(); } @Override public boolean isEmpty() { return this.mCache.isEmpty(); } @Override public V get( Object key ) { V v = this.mCache.get( key ); if( v == null ) { v = this.missKey( key ); } this.afterKeyVisited( key ); return v; } @Override public V erase( Object key ) { V v = this.mCache.erase( key ); this.afterKeyVisited( key ); return v; } @Override public boolean existsKey( Object key ) { boolean b = this.mCache.containsKey( key ); this.afterKeyVisited( key ); return b; } @Override public void clear() { this.mCache.clear(); } @Override public long elementSize() { return this.size(); } @Override public Set entrySet() { return this.getDictium().entrySet(); } @Override public Collection values() { return this.getDictium().values(); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/ConcurrentMergeLRUDictCachePage.java ================================================ package com.pinecone.slime.cache.query; import java.util.Collection; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import com.pinecone.framework.unit.Dictium; import com.pinecone.framework.unit.LinkedTreeMap; import com.pinecone.framework.unit.MapDictium; import com.pinecone.slime.cache.CacheConstants; /** * Pinecone Ursus For Java [ ConcurrentMergeDictCachePage ] * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Three-level caching strategy: * L1 Cache: A thread-local cache, achieving the highest performance with lock-free access but is not shareable. * L2 Cache: When a key is evicted from L1, it automatically degrades to L2. With read-write locks, * resulting in a slight performance drop [Lazy merging upwards]. * L3 Cache: It will be eliminated, if a key is evicted from L2. No automatic replenishment occurs, * and subsequent access will query external caching services. * This design aims to minimize performance loss caused by locking and accessing external services. * ***************************************************************************************** * 采用三级缓存设计 * 其中L1缓存是线程局部缓存(无锁化,性能最高,分治全局不可共享) * 当L1中键被淘汰,自动降级到L2,此时使用读写锁,性能有下降但不多 [向上懒汉式归并] * L2缓存再次被淘汰,非升即走,不再自动补充,后面需要访问会访问外部缓存服务 * 该设计旨在尽可能避免加锁和访问外部服务带来的性能损失。 * ***************************************************************************************** */ public class ConcurrentMergeLRUDictCachePage extends ArchConcurrentCountDictCache implements LocalDictCachePage, IterableDictCachePage, UniformCountSelfLoadingDictCache { private long mnId; private final int mnCapacity; private final Dictium mMegaCache; private final ReadWriteLock mMegaLock; private SourceRetriever mSourceRetriever; private boolean mbEnableL2DirectLoad; private final ThreadLocal > mLocalPage; protected boolean degradeLocalKey( int size, int capacity, Map.Entry eldest ) { boolean bElimination = size > capacity; if ( bElimination ) { this.mMegaLock.writeLock().lock(); try{ // Degrading local-key and merging into mega-L2-cache if the key is ancient enough. (PS, L2-Cache is rw-lock-based then slower) // 如果线程独占高速缓存中的键被淘汰,降级并入L2. (二缓有锁,慢) this.mMegaCache.insert( eldest.getKey(), eldest.getValue() ); } finally { this.mMegaLock.writeLock().unlock(); } } return bElimination; } public ConcurrentMergeLRUDictCachePage( long id, int capacity, int localCap, boolean bUsingTree, boolean bEnableL2DirectLoad, Map initData, SourceRetriever retriever ) { super(); this.mnId = id; this.mnCapacity = capacity; this.mMegaCache = new MapDictium<>( LocalFixedLRUDictCachePage.newMap( bUsingTree, capacity, initData ) ) ; this.mSourceRetriever = retriever; this.mMegaLock = new ReentrantReadWriteLock(); this.mbEnableL2DirectLoad = bEnableL2DirectLoad; this.mLocalPage = ThreadLocal.withInitial(() -> { Map neo; if ( bUsingTree ) { neo = new LinkedTreeMap<>( true ){ @Override protected boolean removeEldestEntry( Map.Entry eldest ) { return ConcurrentMergeLRUDictCachePage.this.degradeLocalKey( this.size(), localCap, eldest ); } }; } else { neo = new LinkedHashMap<>( capacity, 0.75f, true ){ @Override protected boolean removeEldestEntry( Map.Entry eldest ) { return ConcurrentMergeLRUDictCachePage.this.degradeLocalKey( this.size(), localCap, eldest ); } }; } return neo; }); } public ConcurrentMergeLRUDictCachePage( long id, int capacity, boolean bUsingTree, boolean bEnableL2DirectLoad, Map initData, SourceRetriever retriever ){ this( id, capacity, CacheConstants.DefaultCachePageLocalCapacity, bUsingTree, bEnableL2DirectLoad, initData, retriever ); } public ConcurrentMergeLRUDictCachePage( long id, int capacity, int localCap, boolean bUsingTree, Map initData, SourceRetriever retriever ){ this( id, capacity, localCap, bUsingTree, true, initData, retriever ); } public ConcurrentMergeLRUDictCachePage( long id, int capacity, boolean bUsingTree, Map initData, SourceRetriever retriever ){ this( id, capacity, CacheConstants.DefaultCachePageLocalCapacity, bUsingTree, true, initData, retriever ); } public ConcurrentMergeLRUDictCachePage( long id, int capacity, SourceRetriever retriever ){ this( id, capacity, CacheConstants.DefaultCachePageLocalCapacity, false, true, null, retriever ); } public ConcurrentMergeLRUDictCachePage( int capacity, SourceRetriever retriever ){ this( -1, capacity, retriever ); } public void setEnableL2DirectLoad( boolean bEnableL2DirectLoad ) { this.mbEnableL2DirectLoad = bEnableL2DirectLoad; } @Override public long getId() { return this.mnId; } @Override public void setId( long id ) { this.mnId = id; } @Override public Dictium getDictium() { return this.mMegaCache; } @Override public long capacity() { return this.mnCapacity; } @Override public long size() { this.mMegaLock.readLock().lock(); try{ return this.mMegaCache.size(); } finally { this.mMegaLock.readLock().unlock(); } } @Override public boolean isEmpty() { this.mMegaLock.readLock().lock(); try{ return this.mMegaCache.isEmpty(); } finally { this.mMegaLock.readLock().unlock(); } } @Override public V get( Object key ) { V v = this.mLocalPage.get().get( key ); if ( v == null ) { this.mMegaLock.readLock().lock(); try{ v = this.mMegaCache.get( key ); // Stage2, try Level-2 Cache retrieving. [Single page is thread-unsafe] } finally { this.mMegaLock.readLock().unlock(); } if( v == null ) { // Stage3, try L3 Cache retrieving => L1 . [From superior thread-safe source, e.g. `Redis`, `RDB`] // OR L3 => [ L1, L2 ] v = this.missKey( key ); } else { this.mLocalPage.get().put( key, v ); // L2 => L1 } } this.afterKeyVisited( key ); return v; } @Override public V erase( Object key ) { V v = this.mLocalPage.get().remove( key ); V v1; this.mMegaLock.writeLock().lock(); try{ v1 = this.mMegaCache.erase( key ); } finally { this.mMegaLock.writeLock().unlock(); } this.afterKeyVisited( key ); if ( v == null ) { return v1; } return v; } @Override public boolean existsKey( Object key ) { boolean b; this.mMegaLock.readLock().lock(); try{ b = this.mMegaCache.containsKey( key ); } finally { this.mMegaLock.readLock().unlock(); } if ( !b ) { b = this.mSourceRetriever.countsKey( key ) > 0; if ( b ) { // Trigger cache-loading to ensure coherency. b = this.get( key ) != null; } } this.afterKeyVisited( key ); return b; } @Override public boolean implicatesKey( Object key ) { return this.existsKey( key ); } @Override public SourceRetriever getSourceRetriever() { return this.mSourceRetriever; } @Override public void clear() { this.mLocalPage.get().clear(); this.mMegaLock.writeLock().lock(); try { this.mMegaCache.clear(); } finally { this.mMegaLock.writeLock().unlock(); } } @Override public long elementSize() { return this.size(); } @Override public Set entrySet() { return this.getDictium().entrySet(); } @Override public Collection values() { return this.getDictium().values(); } @Override protected V missKey( Object key ) { this.recordMiss(); V v = this.mSourceRetriever.retrieve( key ); if( v != null ) { // L3 => L1 this.mLocalPage.get().put( key, v ); if ( this.mbEnableL2DirectLoad ) { // L3 => L2 this.mMegaLock.writeLock().lock(); try{ this.mMegaCache.insert( key, v ); } finally { this.mMegaLock.writeLock().unlock(); } } } return v; } public static ConcurrentMergeLRUDictCachePage builder( long id, int capacity, SourceRetriever retriever ) { Builder builder = new Builder<>( id, capacity, retriever ); return builder.build(); } public static ConcurrentMergeLRUDictCachePage builder( int capacity, SourceRetriever retriever ) { Builder builder = new Builder<>( capacity, retriever ); return builder.build(); } public static class Builder { private final long id; private final int capacity; private final SourceRetriever retriever; private int localCap = CacheConstants.DefaultCachePageLocalCapacity; private boolean usingTree = false; private boolean enableL2DirectLoad = true; private Map initData = null; public Builder( long id, int capacity, SourceRetriever retriever ) { this.id = id; this.capacity = capacity; this.retriever = retriever; } public Builder( int capacity, SourceRetriever retriever ) { this( -1, capacity, retriever ); } public Builder localCap( int localCap ) { this.localCap = localCap; return this; } public Builder usingTree( boolean bUsingTree ) { this.usingTree = bUsingTree; return this; } public Builder enableL2DirectLoad( boolean bEnableL2DirectLoad ) { this.enableL2DirectLoad = bEnableL2DirectLoad; return this; } public Builder initData( Map initData ) { this.initData = initData; return this; } public ConcurrentMergeLRUDictCachePage build() { return new ConcurrentMergeLRUDictCachePage<>( this.id, this.capacity, this.localCap, this.usingTree, this.enableL2DirectLoad, this.initData, this.retriever ); } } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/CountDictCachePage.java ================================================ package com.pinecone.slime.cache.query; public interface CountDictCachePage extends DictCachePage, UniformCountDictCache { @Override default long size(){ return this.elementSize(); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/DictCachePage.java ================================================ package com.pinecone.slime.cache.query; import com.pinecone.slime.chunk.Page; public interface DictCachePage extends Page, UniformDictCache { @Override default long size(){ return this.elementSize(); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/DirectlySourceAccessCacheAdapter.java ================================================ package com.pinecone.slime.cache.query; public class DirectlySourceAccessCacheAdapter extends ArchCountDictCache implements UniformCountSelfLoadingDictCache { private SourceRetriever mSourceRetriever; public DirectlySourceAccessCacheAdapter( SourceRetriever retriever ) { this.mSourceRetriever = retriever; } @Override protected V missKey( Object key ) { this.recordMiss(); return this.mSourceRetriever.retrieve( key ); } @Override public boolean implicatesKey( Object key ) { return this.mSourceRetriever.countsKey( key ) > 0; } @Override public long capacity() { return 0; } @Override public long size() { return 0; } @Override public boolean isEmpty() { return false; } @Override public V get( Object key ) { this.recordAccess(); return this.missKey( key ); } @Override public V erase( Object key ) { return null; // Do nothing. } @Override public boolean existsKey( Object key ) { return this.implicatesKey( key ); } @Override public void clear() { // Do nothing. } @Override public SourceRetriever getSourceRetriever() { return this.mSourceRetriever; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/IterableDictCachePage.java ================================================ package com.pinecone.slime.cache.query; import java.util.Collection; import java.util.Iterator; import java.util.Set; public interface IterableDictCachePage extends CountDictCachePage, Iterable { default Iterator iterator() { return this.entrySet().iterator(); } Set entrySet(); Collection values(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/LocalBufferedDictCachePage.java ================================================ package com.pinecone.slime.cache.query; import com.pinecone.framework.unit.Dictium; /** * LocalBufferedDictCachePage * Only buffered, not self-loading * @param */ public class LocalBufferedDictCachePage extends ArchLocalDictCachePage { public LocalBufferedDictCachePage( long id, int capacity, Dictium cache ) { super( id, capacity, cache ); } public LocalBufferedDictCachePage( int capacity, Dictium cache ) { this( -1, capacity, cache ); } public LocalBufferedDictCachePage( Dictium cache ) { this( cache.size(), cache ); } @Override protected V missKey( Object key ) { this.recordMiss(); return null; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/LocalDictCachePage.java ================================================ package com.pinecone.slime.cache.query; import com.pinecone.framework.unit.Dictium; public interface LocalDictCachePage extends CountDictCachePage { Dictium getDictium(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/LocalFixedLRUDictCachePage.java ================================================ package com.pinecone.slime.cache.query; import com.pinecone.framework.unit.LinkedTreeMap; import com.pinecone.framework.unit.MapDictium; import java.util.LinkedHashMap; import java.util.Map; public class LocalFixedLRUDictCachePage extends ArchLocalDictCachePage implements UniformCountSelfLoadingDictCache { protected static Map newMap( boolean bUsingTree, int capacity, Map initData ) { Map neo; if( bUsingTree ) { neo = new LinkedTreeMap<>( true ){ @Override protected boolean removeEldestEntry( Map.Entry eldest ) { return this.size() > capacity; } }; } else { neo = new LinkedHashMap<>( capacity, 0.75f, true ){ @Override protected boolean removeEldestEntry( Map.Entry eldest ) { return this.size() > capacity; } }; } if( initData != null ) { if( initData.size() > capacity ) { throw new IllegalArgumentException( String.format( "The initialization size[%d] exceeds the capacity[%d].", initData.size(), capacity ) ); } neo.putAll( initData ); } return neo; } private SourceRetriever mSourceRetriever; public LocalFixedLRUDictCachePage( long id, int capacity, boolean bUsingTree, Map initData, SourceRetriever retriever ) { super( id, capacity, new MapDictium<>( LocalFixedLRUDictCachePage.newMap( bUsingTree, capacity, initData ) ) ); this.mSourceRetriever = retriever; } public LocalFixedLRUDictCachePage( int capacity, Map initData, SourceRetriever retriever ) { this( -1, capacity, false, initData, retriever ); } public LocalFixedLRUDictCachePage( int capacity, SourceRetriever retriever ) { this( capacity, null, retriever ); } @Override protected void afterKeyVisited( Object key ) { super.afterKeyVisited( key ); // Since we used the `accessOrder`, the newest accessed key will auto moving to the top. } @Override protected V missKey( Object key ) { this.recordMiss(); V v = this.mSourceRetriever.retrieve( key ); if( v != null ) { this.getDictium().insert( key, v ); } return v; } @Override public boolean implicatesKey( Object key ) { return this.get( key ) != null; } @Override public SourceRetriever getSourceRetriever() { return this.mSourceRetriever; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/RangedDictCachePage.java ================================================ package com.pinecone.slime.cache.query; import com.pinecone.slime.unitization.PartialRange; public interface RangedDictCachePage extends CountDictCachePage { > PartialRange getRange(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/SourceRetriever.java ================================================ package com.pinecone.slime.cache.query; import com.pinecone.framework.system.prototype.Pinenut; public interface SourceRetriever extends Pinenut { V retrieve( Object key ); long countsKey( Object key ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/UniformCountDictCache.java ================================================ package com.pinecone.slime.cache.query; public interface UniformCountDictCache extends UniformDictCache { long getMisses(); long getAccesses(); default double getHitRate() { double acc = (double)this.getAccesses(); return 1 - (double) this.getMisses() / acc; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/UniformCountSelfLoadingDictCache.java ================================================ package com.pinecone.slime.cache.query; public interface UniformCountSelfLoadingDictCache extends UniformCountDictCache, UniformSelfLoadingDictCache { } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/UniformDictCache.java ================================================ package com.pinecone.slime.cache.query; import com.pinecone.framework.system.prototype.Pinenut; public interface UniformDictCache extends Pinenut { long capacity(); long size(); boolean isEmpty(); V get( Object key ); boolean existsKey( Object key ); V erase( Object key ); void clear(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/UniformSelfLoadingDictCache.java ================================================ package com.pinecone.slime.cache.query; public interface UniformSelfLoadingDictCache extends UniformDictCache { // Searching key in cache and data-source. // If key is missed in cache, and there will triggers self-loading from data-source. boolean implicatesKey( Object key ); SourceRetriever getSourceRetriever(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/pool/BatchPageSourceRetriever.java ================================================ package com.pinecone.slime.cache.query.pool; import com.pinecone.slime.cache.query.RangedDictCachePage; import com.pinecone.slime.cache.query.SourceRetriever; import com.pinecone.slime.unitization.PartialRange; public interface BatchPageSourceRetriever extends SourceRetriever { String getRangeKey(); RangedDictCachePage retrieves( Object key ); > RangedDictCachePage retrieves( Object key, PartialRange range ); > PartialRange queryRangeOnly( Object key ); > long counts( PartialRange range ); long getBatchSize(); > T nextRangeMax( T key ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/pool/CountSelfPooledPageDictCache.java ================================================ package com.pinecone.slime.cache.query.pool; import com.pinecone.slime.cache.query.UniformCountSelfLoadingDictCache; public interface CountSelfPooledPageDictCache extends UniformCountSelfLoadingDictCache, PooledPageDictCache { } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/pool/LocalHotspotPooledDictCache.java ================================================ package com.pinecone.slime.cache.query.pool; import com.pinecone.framework.unit.LinkedTreeMap; import com.pinecone.framework.unit.Mapnut; import com.pinecone.framework.unit.top.LinkedMultiTreeToptron; import com.pinecone.slime.cache.query.ArchCountDictCache; import com.pinecone.slime.cache.query.RangedDictCachePage; import com.pinecone.slime.unitization.PartialRange; import java.util.Map; public class LocalHotspotPooledDictCache, V > extends ArchCountDictCache implements CountSelfPooledPageDictCache { private final int mnPagesCapacity; private final int mnPageCapacity; private final int mnTemperaturesCapacity; protected Mapnut, RangedDictCachePage > mPageQueuePool; // Interval range search with O(log( SUM( Pages ) / Each )) protected Mapnut, Long > mTemperaturesRecord; // Interval range search with O(log( SUM( Pages ) / Each )) //protected Topper > > mTopNTemperatures; // Heap method to find top-N with O(log(N)) protected LinkedMultiTreeToptron > mTopNTemperatures; // Tree method to find top-N with O(log(N)) protected final BatchPageSourceRetriever mSourceRetriever; public LocalHotspotPooledDictCache( int nPageEachCapacity, int nPagesCapacity, int nTemperaturesCapacity, BatchPageSourceRetriever retriever ) { super(); this.mnPageCapacity = nPageEachCapacity; this.mnPagesCapacity = nPagesCapacity; this.mSourceRetriever = retriever; if( nTemperaturesCapacity < this.mnPagesCapacity ) { throw new IllegalArgumentException( "TemperaturesRecordCapacity can`t below the PagesCapacity." ); } this.mnTemperaturesCapacity = nTemperaturesCapacity; this.mPageQueuePool = new LinkedTreeMap<>( PartialRange.DefaultIntervalRangeComparator, true ); // With deque sequence access order. this.mTemperaturesRecord = new LinkedTreeMap<>( PartialRange.DefaultIntervalRangeComparator, true ); // With deque sequence access order. // this.mTopNTemperatures = new HeapTopper<>( this.mnPagesCapacity, new Comparator<>() { // @Override // public int compare( Map.Entry > o1, Map.Entry > o2 ) { // return o1.getKey().compareTo( o2.getKey() ); // } // }); this.mTopNTemperatures = new LinkedMultiTreeToptron< >( this.mnPagesCapacity, true ); // Select Top-Pages::Temperature as activated caches. } public LocalHotspotPooledDictCache( int nPageEachCapacity, int nPagesCapacity, BatchPageSourceRetriever retriever ) { this( nPageEachCapacity, nPagesCapacity, nPagesCapacity * 4, retriever ); } protected void updateTopNTemperatures( TemperatureInfo info ) { this.mTopNTemperatures.clear(); info.nLowestTemp = Long.MAX_VALUE; info.nHighestTemp = Long.MIN_VALUE; for( Map.Entry, Long > kv : this.mTemperaturesRecord.entrySet() ) { Long tp = kv.getValue(); if ( tp < info.nLowestTemp ) { info.nLowestTemp = tp; info.lowestEntry = kv; } if ( tp > info.nHighestTemp ) { info.nHighestTemp = tp; info.highestEntry = kv; } this.mTopNTemperatures.add( tp, kv.getKey() ); //this.mTopNTemperatures.add( new KeyValue<>( tp, this.mPageQueuePool.get( kv.getKey() ) )); } Mapnut, RangedDictCachePage > neoPool = new LinkedTreeMap<>( PartialRange.DefaultIntervalRangeComparator, true ); //Collection > > chosen = this.mTopNTemperatures.topmost(); info.nPooledLowestTemp = Long.MAX_VALUE; info.nPooledHighestTemp = Long.MIN_VALUE; for( Map.Entry > kv : this.mTopNTemperatures.collection()/*chosen*/ ) { Long tp = kv.getKey(); PartialRange range = kv.getValue(); RangedDictCachePage legacy = this.mPageQueuePool.get( range ); if( legacy == null ) { // Restoring from history range. RangedDictCachePage recover = this.mSourceRetriever.retrieves( range ); neoPool.put( range, recover ); } else { neoPool.put( range, legacy ); } if ( tp < info.nPooledLowestTemp ) { info.nPooledLowestTemp = tp; info.lowestPooledTopEntry = kv; } if ( tp > info.nPooledHighestTemp ) { info.nPooledHighestTemp = tp; info.highestPooledTopEntry = kv; } } this.mPageQueuePool = neoPool; } protected void updateCacheTemperature( Object key ) { Map.Entry, Long > tempInfo = this.mTemperaturesRecord.getEntryByKey( key ); if( tempInfo != null ) { Long temperature = tempInfo.getValue(); ++temperature; tempInfo.setValue( temperature ); } } @Override protected void afterKeyVisited( Object key ) { super.afterKeyVisited( key ); } @Override protected V missKey( Object key ) { this.recordMiss(); TemperatureInfo info = new TemperatureInfo(); this.updateTopNTemperatures( info ); PartialRange range = this.mSourceRetriever.queryRangeOnly( key ); if( range != null ) { Long temperature = this.mTemperaturesRecord.get( range ); if( temperature != null ) { ++temperature; this.mTemperaturesRecord.put( range, temperature ); if( temperature >= info.nPooledLowestTemp ) { RangedDictCachePage page = this.mSourceRetriever.retrieves( key ); if( this.mPageQueuePool.size() >= this.mnPagesCapacity ) { Map.Entry > elimination = this.mTopNTemperatures.nextEviction(); this.mPageQueuePool.remove( elimination.getValue() ); } this.mTopNTemperatures.add( temperature, page.getRange() ); // Updating TopNTemperatures, and substitutes lowest LRU page. this.mPageQueuePool.put( range, page ); return page.get( key ); } return this.mSourceRetriever.retrieve( key ); // Don`t using cache. } else { temperature = 1L; if( this.mPageQueuePool.size() < this.mnPagesCapacity ) { RangedDictCachePage page = this.mSourceRetriever.retrieves( key ); this.mTopNTemperatures.add( temperature, page.getRange() ); this.mPageQueuePool.put( range, page ); this.mTemperaturesRecord.put( range, temperature ); // TemperaturesRecord.size should beyond PageQueuePool.size return page.get( key ); } else { if( this.mTemperaturesRecord.size() >= this.mnTemperaturesCapacity ) { this.mTemperaturesRecord.remove( info.lowestEntry.getKey() );// Substituting lowest one in record. temperature = info.lowestEntry.getValue(); } this.mTemperaturesRecord.put( range, temperature ); return this.mSourceRetriever.retrieve( key ); // Don`t using cache. } } } return null; } @Override public long capacity() { return this.mnPageCapacity * this.mnPagesCapacity; } @Override public long getPooledPagesCapacity() { return this.mnPagesCapacity; } @Override public long size() { return PoolCaches.countPoolSize( this.mPageQueuePool ); } @Override public boolean isEmpty() { return this.mPageQueuePool.isEmpty(); } protected V getFromCache( Object key ) { for( Map.Entry, RangedDictCachePage > kv : this.mPageQueuePool.entrySet() ) { V v = kv.getValue().get( key ); if( v != null ) { return v; } } return null; } @Override public V erase( Object key ) { for( Map.Entry, RangedDictCachePage > kv : this.mPageQueuePool.entrySet() ) { V v = kv.getValue().erase( key ); if( v != null ) { return v; } } return null; } @Override public V get( Object key ) { V v = this.getFromCache( key ); if( v == null ) { //Debug.trace( key ); v = this.missKey( key ); // Update miss temperature } else { this.updateCacheTemperature( key ); // Update cache temperature } this.afterKeyVisited( key ); return v; } @Override public boolean existsKey( Object key ) { boolean b = this.getFromCache( key ) != null; this.afterKeyVisited( key ); return b; } @Override public boolean implicatesKey( Object key ) { return this.get( key ) != null; } @Override public void clear() { this.mPageQueuePool.clear(); } @Override public BatchPageSourceRetriever getSourceRetriever() { return this.mSourceRetriever; } class TemperatureInfo { long nLowestTemp = Long.MAX_VALUE; long nHighestTemp = Long.MIN_VALUE; long nPooledLowestTemp = Long.MAX_VALUE; long nPooledHighestTemp = Long.MIN_VALUE; Map.Entry, Long > lowestEntry; Map.Entry, Long > highestEntry; Map.Entry > lowestPooledTopEntry; // In top-N unit. Map.Entry > highestPooledTopEntry; // In top-N unit. } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/pool/LocalLRUPooledDictCache.java ================================================ package com.pinecone.slime.cache.query.pool; import com.pinecone.framework.unit.LinkedTreeMap; import com.pinecone.slime.cache.query.ArchCountDictCache; import com.pinecone.slime.cache.query.RangedDictCachePage; import com.pinecone.slime.unitization.PartialRange; import java.util.Map; public class LocalLRUPooledDictCache, V > extends ArchCountDictCache implements CountSelfPooledPageDictCache { private final int mnPagesCapacity; private final int mnPageCapacity; protected final Map, RangedDictCachePage> mPageQueuePool; protected final BatchPageSourceRetriever mSourceRetriever; public LocalLRUPooledDictCache( int nPageEachCapacity, int nPagesCapacity, BatchPageSourceRetriever retriever ) { super(); this.mnPageCapacity = nPageEachCapacity; this.mnPagesCapacity = nPagesCapacity; this.mSourceRetriever = retriever; this.mPageQueuePool = new LinkedTreeMap<>( PartialRange.DefaultIntervalRangeComparator ) { @Override protected boolean removeEldestEntry( Map.Entry, RangedDictCachePage > eldest ) { return this.size() > LocalLRUPooledDictCache.this.mnPagesCapacity; } }; } @Override protected void afterKeyVisited( Object key ) { super.afterKeyVisited( key ); // Since we used the `accessOrder`, the newest accessed key will auto moving to the top. } @Override protected V missKey( Object key ) { this.recordMiss(); RangedDictCachePage page = this.mSourceRetriever.retrieves( key ); if( page != null ) { this.mPageQueuePool.put( page.getRange(), page ); return page.get( key ); } return null; } @Override public long capacity() { return this.mnPageCapacity * this.mnPagesCapacity; } @Override public long getPooledPagesCapacity() { return this.mnPagesCapacity; } @Override public long size() { return PoolCaches.countPoolSize( this.mPageQueuePool ); } @Override public boolean isEmpty() { return this.mPageQueuePool.isEmpty(); } protected V getFromCache( Object key ) { for( Map.Entry, RangedDictCachePage > kv : this.mPageQueuePool.entrySet() ) { V v = kv.getValue().get( key ); if( v != null ) { return v; } } return null; } @Override public V get( Object key ) { V v = this.getFromCache( key ); if( v == null ) { v = this.missKey( key ); } this.afterKeyVisited( key ); return v; } @Override public V erase( Object key ) { for( Map.Entry, RangedDictCachePage > kv : this.mPageQueuePool.entrySet() ) { V v = kv.getValue().erase( key ); if( v != null ) { return v; } } return null; } @Override public boolean existsKey( Object key ) { boolean b = this.getFromCache( key ) != null; this.afterKeyVisited( key ); return b; } @Override public boolean implicatesKey( Object key ) { return this.get( key ) != null; } @Override public void clear() { this.mPageQueuePool.clear(); } @Override public BatchPageSourceRetriever getSourceRetriever() { return this.mSourceRetriever; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/pool/LocalLRUPrimaryPooledDictCache.java ================================================ package com.pinecone.slime.cache.query.pool; import com.pinecone.slime.cache.query.RangedDictCachePage; /** * For the index-key is same with the dict-cache-key * e.g. RDB::id( Auto-Increment ) as the Range-Key and the Cache-Key * [000-100] => [object { 0 => key0, 1 => key1, ..., 100 => key100 }] * [100-200] => [object { 100 => key100, 101 => key101, ..., 200 => key200 }] * In this example: Find( key ) => O(log(pages))(TreeMap) + O(1)(HashMap) */ public class LocalLRUPrimaryPooledDictCache, V > extends LocalLRUPooledDictCache { public LocalLRUPrimaryPooledDictCache( int nPageEachCapacity, int nPagesCapacity, BatchPageSourceRetriever retriever ) { super( nPageEachCapacity, nPagesCapacity, retriever ); } @Override protected V getFromCache( Object key ) { RangedDictCachePage page = this.mPageQueuePool.get( key ); if( page != null ) { return page.get( key ); } return null; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/pool/LocalRangedDictCachePage.java ================================================ package com.pinecone.slime.cache.query.pool; import com.pinecone.framework.unit.Dictium; import com.pinecone.slime.cache.query.LocalBufferedDictCachePage; import com.pinecone.slime.cache.query.RangedDictCachePage; import com.pinecone.slime.unitization.PartialRange; public class LocalRangedDictCachePage extends LocalBufferedDictCachePage implements RangedDictCachePage { protected PartialRange mRange; public > LocalRangedDictCachePage( long id, int capacity, Dictium cache, PartialRange range ) { super( id, capacity, cache ); this.mRange = range; } @Override @SuppressWarnings( "unchecked" ) public > PartialRange getRange() { return (PartialRange) this.mRange; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/pool/PoolCaches.java ================================================ package com.pinecone.slime.cache.query.pool; import com.pinecone.slime.cache.query.RangedDictCachePage; import com.pinecone.slime.unitization.PartialRange; import java.util.Map; public final class PoolCaches { public static , V > long countPoolSize( Map, RangedDictCachePage> pool ) { long n = 0; for( Map.Entry, RangedDictCachePage> kv : pool.entrySet() ) { n += kv.getValue().size(); } return n; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/pool/PooledPageDictCache.java ================================================ package com.pinecone.slime.cache.query.pool; import com.pinecone.slime.cache.query.UniformDictCache; public interface PooledPageDictCache extends UniformDictCache { long getPooledPagesCapacity(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/pool/PrimaryPooledDictCache.java ================================================ package com.pinecone.slime.cache.query.pool; public interface PrimaryPooledDictCache { } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/ArchPatriarchalChunk.java ================================================ package com.pinecone.slime.chunk; public abstract class ArchPatriarchalChunk implements PatriarchalChunk { protected ArchPatriarchalChunk mParent; protected ArchPatriarchalChunk(){ } protected ArchPatriarchalChunk( ArchPatriarchalChunk parent ) { this.mParent = parent; } @Override public PatriarchalChunk parent() { return this.mParent; } @Override public void setParent( PatriarchalChunk parent ){ this.mParent = (ArchPatriarchalChunk) parent; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/Chunk.java ================================================ package com.pinecone.slime.chunk; import com.pinecone.framework.system.prototype.Pinenut; public interface Chunk extends Pinenut { long getId(); void setId( long id ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/ContiguousPage.java ================================================ package com.pinecone.slime.chunk; public interface ContiguousPage extends Page, Continunk { void apply( Object... args ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/Continunk.java ================================================ package com.pinecone.slime.chunk; import com.pinecone.slime.unitization.Range; import com.pinecone.slime.unitization.Precision; /** * Continum Chunk */ public interface Continunk extends Chunk { Range getRange(); Precision size(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/DiscreteChunk.java ================================================ package com.pinecone.slime.chunk; import com.pinecone.slime.unitization.Precision; public interface DiscreteChunk extends Chunk { Precision size(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/DivisibleChunk.java ================================================ package com.pinecone.slime.chunk; /** * Divisible Chunk [Slime] */ public interface DivisibleChunk extends Chunk { } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/Frame.java ================================================ package com.pinecone.slime.chunk; public interface Frame extends Minimunk { } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/Minimunk.java ================================================ package com.pinecone.slime.chunk; /** * Minimum Chunk */ public interface Minimunk extends Chunk { } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/Page.java ================================================ package com.pinecone.slime.chunk; public interface Page extends Chunk { long elementSize(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/PatriarchalChunk.java ================================================ package com.pinecone.slime.chunk; public interface PatriarchalChunk extends Chunk { PatriarchalChunk parent(); default PatriarchalChunk root() { PatriarchalChunk p = this.parent(); if( p == null ) { return this; } return p.root(); } void setParent( PatriarchalChunk parent ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/RangedChunk64.java ================================================ package com.pinecone.slime.chunk; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.json.JSONEncoder; import com.pinecone.slime.unitization.MinMaxRange64; import com.pinecone.slime.unitization.Precision64; public abstract class RangedChunk64 extends ArchPatriarchalChunk implements Splitunk { protected long mnId; protected MinMaxRange64 mRange; protected Precision64 mChunkSize; protected RangedChunk64(){ super(); } public RangedChunk64( long nStart, long nEnd, long id, RangedChunk64 parent ) { super( parent ); this.applyMembers( nStart, nEnd, id, parent ); } public RangedChunk64( long nStart, long nEnd, long id ) { this( nStart, nEnd, id, null ); } protected void applyMembers( long nStart, long nEnd, long id, RangedChunk64 parent ) { this.mParent = parent; this.mnId = id; this.mRange = new MinMaxRange64( nStart, nEnd ); this.mChunkSize = new Precision64( (long)this.mRange.span() ); } @Override public long getId() { return this.mnId; } @Override public void setId( long id ) { this.mnId = id; } @Override public MinMaxRange64 getRange() { return this.mRange; } @Override public Precision64 size(){ return this.mChunkSize; } @Override public String toJSONString() { return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "class", this.className() ), new KeyValue<>( "min", this.getRange().getMin() ), new KeyValue<>( "max", this.getRange().getMax() ) } ); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/RangedPage.java ================================================ package com.pinecone.slime.chunk; import com.pinecone.slime.unitization.NumPrecision; public interface RangedPage extends ContiguousPage { @Override NumPrecision size(); @Override default long elementSize() { return this.size().longValue(); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/RangedPage64.java ================================================ package com.pinecone.slime.chunk; public class RangedPage64 extends RangedChunk64 implements RangedPage { public RangedPage64(){ super(); } public RangedPage64( long nStart, long nEnd, long id, RangedChunk64 parent ) { super( nStart, nEnd, id, parent ); } public RangedPage64( long nStart, long nEnd, long id ) { super( nStart, nEnd, id ); } public void apply( long nStart, long nEnd, long id, RangedChunk64 parent ) { this.applyMembers( nStart, nEnd, id, parent ); } @Override public void apply( Object... args ) { if( args.length == 0 ) { return; } else if( args.length >= 3 ) { long nStart = ((Number) args[0]).longValue(); long nEnd = ((Number) args[1]).longValue(); long id = ((Number) args[2]).longValue(); RangedChunk64 parent = null; if( args.length >= 4 ){ parent = (RangedChunk64) args[3]; } this.applyMembers( nStart, nEnd, id, parent ); return; } throw new IllegalArgumentException( "RangedPage64 only be applied with 0, 3 and 4 arguments." ); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/Splitunk.java ================================================ package com.pinecone.slime.chunk; public interface Splitunk extends Continunk, DivisibleChunk { } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/ArchMasterSplitunkPartitioner64.java ================================================ package com.pinecone.slime.chunk.marshaling; import com.pinecone.slime.chunk.Splitunk; public abstract class ArchMasterSplitunkPartitioner64 implements ChunkPartitioner { protected Splitunk mMasterChunk; protected ArchMasterSplitunkPartitioner64( Splitunk masterChunk ) { this.mMasterChunk = masterChunk; } @Override public Splitunk getMasterChunk(){ return this.mMasterChunk; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/BuddyPrepPartitionDividerStrategy64.java ================================================ package com.pinecone.slime.chunk.marshaling; import com.pinecone.slime.cluster.SequentialChunkGroup; import com.pinecone.slime.chunk.Chunk; import java.util.List; public class BuddyPrepPartitionDividerStrategy64 implements PartitionDividerStrategy { protected long mnMaxPerPage; protected int mnBootstrapDivFactor; protected long mnMinThresholdPerPage; public BuddyPrepPartitionDividerStrategy64( long nMaxPerPage, int nBootstrapDivFactor, long nMinThresholdPerPage ) { this.mnMaxPerPage = nMaxPerPage; this.mnBootstrapDivFactor = nBootstrapDivFactor; this.mnMinThresholdPerPage = nMinThresholdPerPage; } public BuddyPrepPartitionDividerStrategy64( long nMaxPerPage, long nMinThresholdPerPage ) { this( nMaxPerPage, 2, nMinThresholdPerPage ); } @Override public SequentialChunkGroup assignment(SequentialChunkGroup group ) { List chunks = (List) group.getSequentialChunks(); long each = this.mnMaxPerPage; for ( int i = 0; i < chunks.size(); ++i ) { ( (PreparedPageDividerPartition64)chunks.get( i ) ).setEachPerPage( each ); each = Math.max( this.mnMinThresholdPerPage , each / this.mnBootstrapDivFactor ); } return group; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/ChunkPartitioner.java ================================================ package com.pinecone.slime.chunk.marshaling; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.slime.chunk.DivisibleChunk; public interface ChunkPartitioner extends Pinenut { DivisibleChunk getMasterChunk(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/EvenSeqChunkPartitioner64.java ================================================ package com.pinecone.slime.chunk.marshaling; import com.pinecone.slime.chunk.RangedPage64; import com.pinecone.slime.chunk.scheduler.DirectPagePool; import com.pinecone.slime.chunk.scheduler.FixedPageDivider64; import com.pinecone.slime.cluster.Cluster; import com.pinecone.slime.cluster.SequentialChunkGroup; import com.pinecone.slime.unitization.NumPrecision; import com.pinecone.slime.chunk.Chunk; import com.pinecone.slime.chunk.Splitunk; public abstract class EvenSeqChunkPartitioner64 extends ArchMasterSplitunkPartitioner64 { protected long mnGroups; protected long mnEach; protected FixedPageDivider64 mDivider; protected EvenSeqChunkPartitioner64( Splitunk masterChunk, long nGroups ) { super( masterChunk ); this.mnGroups = nGroups; this.mnEach = ((NumPrecision)this.getMasterChunk().size()).longValue() / this.mnGroups; this.mDivider = new FixedPageDivider64( this.getMasterChunk(), new DirectPagePool( RangedPage64.class ), this.mnEach ); } protected abstract Cluster newCluster(long nMin, long nMax, long id ); protected abstract SequentialChunkGroup newGroup(); public SequentialChunkGroup partition() { SequentialChunkGroup group = this.newGroup(); for ( long i = 0; i < this.mnGroups; i++ ) { Chunk c = this.mDivider.allocate(); RangedPage64 tp = (RangedPage64) c; group.add( this.newCluster( tp.getRange().getMin(), tp.getRange().getMax(), i ) ); } return group; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PageCluster.java ================================================ package com.pinecone.slime.chunk.marshaling; import com.pinecone.slime.chunk.ContiguousPage; import com.pinecone.slime.cluster.RangedCluster; public interface PageCluster extends RangedCluster { boolean hasOwnPage( ContiguousPage that ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PageDividerPartition64.java ================================================ package com.pinecone.slime.chunk.marshaling; import com.pinecone.slime.chunk.ContiguousPage; public interface PageDividerPartition64 extends PagePartition { long pagesSize(); long eachPerPage(); void inheritRange( ContiguousPage that ); void setEachPerPage( long eachPerPage ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PageGroup.java ================================================ package com.pinecone.slime.chunk.marshaling; import com.pinecone.slime.cluster.ChunkGroup; import com.pinecone.slime.chunk.PatriarchalChunk; public interface PageGroup extends PatriarchalChunk, ChunkGroup { boolean hasOwnPartition( PagePartition that ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PagePartition.java ================================================ package com.pinecone.slime.chunk.marshaling; import com.pinecone.slime.chunk.PatriarchalChunk; import com.pinecone.slime.chunk.Splitunk; public interface PagePartition extends PageCluster, PatriarchalChunk, Splitunk { } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PagePartitionGroup.java ================================================ package com.pinecone.slime.chunk.marshaling; public interface PagePartitionGroup extends PageGroup { } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PagePartitioner.java ================================================ package com.pinecone.slime.chunk.marshaling; public interface PagePartitioner extends ChunkPartitioner { } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PartitionDividerStrategy.java ================================================ package com.pinecone.slime.chunk.marshaling; import com.pinecone.slime.cluster.SequentialChunkGroup; import com.pinecone.framework.system.prototype.Pinenut; public interface PartitionDividerStrategy extends Pinenut { SequentialChunkGroup assignment(SequentialChunkGroup group ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PartitionableChunkDivider64.java ================================================ package com.pinecone.slime.chunk.marshaling; import com.pinecone.slime.chunk.scheduler.ArchMasterSplitunkDivider64; import com.pinecone.slime.chunk.scheduler.BadAllocateException; import com.pinecone.slime.chunk.Chunk; import com.pinecone.slime.chunk.Splitunk; import com.pinecone.slime.chunk.scheduler.ChunkDivider; import com.pinecone.slime.chunk.scheduler.FixedChunkDivider64; import com.pinecone.slime.cluster.SequentialChunkGroup; import java.util.ArrayList; import java.util.List; public abstract class PartitionableChunkDivider64 extends ArchMasterSplitunkDivider64 implements ChunkDivider { protected SequentialChunkGroup mChunkGroup; protected List mPartitionsOwnedDivider; protected int mnCurrentPartDivider; protected long mnCurrentEpoch; protected long mnMaxAllocations; public PartitionableChunkDivider64( Splitunk masterChunk, SequentialChunkGroup chunkGroup ) { super( masterChunk ); this.mnCurrentEpoch = 0; this.mChunkGroup = chunkGroup; this.mnCurrentPartDivider = 0; } protected abstract FixedChunkDivider64 newDivider( Splitunk masterChunk, long each ); protected void preparePartitionsOwnedDivider() { this.mPartitionsOwnedDivider = new ArrayList<>(); List chunks = (List) this.mChunkGroup.getSequentialChunks(); for ( int i = 0; i < chunks.size(); ++i ) { PreparedPageDividerPartition64 partition = (PreparedPageDividerPartition64)chunks.get( i ); FixedChunkDivider64 divider = this.newDivider( partition, partition.eachPerPage() ); this.mPartitionsOwnedDivider.add( divider ); this.mnMaxAllocations += divider.getMaxAllocations(); } } @Override public long getMaxAllocations() { return this.mnMaxAllocations; } @Override public long remainAllocatable(){ return this.mnMaxAllocations - this.mnCurrentEpoch; } @Override public Chunk allocate() throws BadAllocateException { if( this.mnCurrentEpoch < this.getMaxAllocations() ) { FixedChunkDivider64 divider = this.mPartitionsOwnedDivider.get( this.mnCurrentPartDivider ); if( divider.remainAllocatable() == 0 ){ ++this.mnCurrentPartDivider; divider = this.mPartitionsOwnedDivider.get( this.mnCurrentPartDivider ); } Chunk chunk = divider.allocate(); chunk.setId( this.mnCurrentEpoch ); ++this.mnCurrentEpoch; return chunk; } throw new BadAllocateException(); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PartitionablePageDivider64.java ================================================ package com.pinecone.slime.chunk.marshaling; import com.pinecone.slime.chunk.scheduler.FixedPageDivider64; import com.pinecone.slime.chunk.scheduler.PageDivider; import com.pinecone.slime.cluster.SequentialChunkGroup; import com.pinecone.slime.chunk.Chunk; import com.pinecone.slime.chunk.Splitunk; import com.pinecone.slime.chunk.scheduler.FixedChunkDivider64; import com.pinecone.slime.chunk.scheduler.PagePool; public class PartitionablePageDivider64 extends PartitionableChunkDivider64 implements PageDivider { protected PagePool mPagePool; protected long mnPageIdOffset; public PartitionablePageDivider64(Splitunk masterChunk, PagePool pagePool, SequentialChunkGroup chunkGroup, long pageIdOffset ) { super( masterChunk, chunkGroup ); this.mPagePool = pagePool; this.mnPageIdOffset = pageIdOffset; this.preparePartitionsOwnedDivider(); } public PartitionablePageDivider64( Splitunk masterChunk, PagePool pagePool, SequentialChunkGroup chunkGroup ) { this( masterChunk, pagePool, chunkGroup, 0 ); } @Override protected Chunk newChunk( long start, long end, long epoch ) { return this.mPagePool.allocate( start, end, this.mnPageIdOffset + this.mnCurrentEpoch, this.mMasterChunk ); } @Override protected FixedChunkDivider64 newDivider( Splitunk masterChunk, long each ) { return new FixedPageDivider64( masterChunk, this.getPagePool(), each ); } @Override public PagePool getPagePool() { return this.mPagePool; } @Override public long getPageIdOffset() { return this.mnPageIdOffset; } @Override public void setPageIdOffset( long offset ) { this.mnPageIdOffset = offset; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PreparedEvenSeqPagePartitioner64.java ================================================ package com.pinecone.slime.chunk.marshaling; import com.pinecone.slime.chunk.RangedChunk64; import com.pinecone.slime.chunk.Splitunk; public class PreparedEvenSeqPagePartitioner64 extends EvenSeqChunkPartitioner64 { public PreparedEvenSeqPagePartitioner64( Splitunk masterChunk, long nGroups ) { super( masterChunk, nGroups ); } @Override protected PreparedPageDividerPartition64 newCluster( long nMin, long nMax, long id ) { return new PreparedPageDividerPartition64( nMin, nMax, id, 1, (RangedChunk64) this.getMasterChunk() ); } @Override protected SequentialPagePartitionGroup64 newGroup() { return new SequentialPagePartitionGroup64(); } @Override public SequentialPagePartitionGroup64 partition() { return (SequentialPagePartitionGroup64)super.partition(); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PreparedPageDividerPartition64.java ================================================ package com.pinecone.slime.chunk.marshaling; import com.pinecone.slime.chunk.RangedChunk64; import com.pinecone.slime.chunk.RangedPage; import com.pinecone.slime.unitization.MinMaxRange; import com.pinecone.slime.chunk.ContiguousPage; public class PreparedPageDividerPartition64 extends RangedChunk64 implements PageDividerPartition64 { protected long mnPagesSize; protected long mnEachPerPage; public PreparedPageDividerPartition64( long nStart, long nEnd, long id, long each, RangedChunk64 parent ) { super( nStart, nEnd, id, parent ); this.mnEachPerPage = each; this.update_page_size(); } public PreparedPageDividerPartition64(ContiguousPage inheritedIntegratedPage, long id, long each, RangedChunk64 parent ) { this( 0, 0, id, each, parent ); this.inheritRange( inheritedIntegratedPage ); this.update_page_size(); } public PreparedPageDividerPartition64(ContiguousPage inheritedIntegratedPage, long id, long each ) { this( inheritedIntegratedPage, id, each, null ); } protected void update_page_size() { this.mnPagesSize = (this.getRange().span() + this.mnEachPerPage - 1) / this.mnEachPerPage; } @Override public long pagesSize() { return this.mnPagesSize; } @Override public long eachPerPage() { return this.mnEachPerPage; } @Override public void setEachPerPage( long eachPerPage ) { this.mnEachPerPage = eachPerPage; } @Override public void inheritRange( ContiguousPage that ) { MinMaxRange range = (MinMaxRange) that.getRange(); this.mRange.setRange( range.getMin(), range.getMax() ); } @Override public PageDividerPartition64 parent() { return (PageDividerPartition64)this.mParent; } @Override public boolean hasOwnPage( ContiguousPage that ) { RangedPage rangedPage = (RangedPage) that; return this.getRange().contains( rangedPage.getRange() ); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/SequentialPagePartitionGroup.java ================================================ package com.pinecone.slime.chunk.marshaling; import com.pinecone.slime.cluster.SequentialChunkGroup; public interface SequentialPagePartitionGroup extends SequentialChunkGroup, PagePartitionGroup { } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/SequentialPagePartitionGroup64.java ================================================ package com.pinecone.slime.chunk.marshaling; import com.pinecone.slime.cluster.ArchSequentialChunkGroup; import com.pinecone.slime.chunk.ArchPatriarchalChunk; import com.pinecone.slime.chunk.PatriarchalChunk; public class SequentialPagePartitionGroup64 extends ArchSequentialChunkGroup implements PatriarchalChunk, SequentialPagePartitionGroup { protected ArchPatriarchalChunk mParent; public SequentialPagePartitionGroup64() { super(); } @Override public PatriarchalChunk parent() { return this.mParent; } @Override public void setParent( PatriarchalChunk parent ){ this.mParent = (ArchPatriarchalChunk) parent; } @Override public PagePartitionGroup getFirstChunkById( long id ){ return (PagePartitionGroup) super.getFirstChunkById( id ); } @Override public boolean hasOwnPartition( PagePartition that ) { return this.mChunkRegister.containsKey( that.getId() ); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/ActivePageScheduler.java ================================================ package com.pinecone.slime.chunk.scheduler; import com.pinecone.slime.chunk.ContiguousPage; public interface ActivePageScheduler extends RangedPageScheduler { ContiguousPage activate(); void activate( ContiguousPage that ); void deactivate( ContiguousPage that ); void deactivate( ContiguousPage[] those ); long getActivatedSize(); ContiguousPage getPageById(long id ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/ActivePageScheduler64.java ================================================ package com.pinecone.slime.chunk.scheduler; import com.pinecone.slime.chunk.ContiguousPage; import com.pinecone.slime.chunk.RangedPage; import java.util.ArrayList; import java.util.Map; public abstract class ActivePageScheduler64 extends RangedPageScheduler64 implements ActivePageScheduler { protected ChunkRegister mChunkRegister; protected ActivePageScheduler64( RangedPage masterPage, PagePool pagePool, PageDivider divider, long autoIncrementId ) { super( masterPage, pagePool, divider, autoIncrementId ); } protected ActivePageScheduler64( PageDivider divider, long autoIncrementId ) { super( divider, autoIncrementId ); } @Override protected void beforeActivatePage() { if( this.mRecycleStrategy == null ) { return; } ArrayList badPages = null; for ( Map.Entry kv : this.mChunkRegister.entrySet() ) { if( this.mRecycleStrategy.qualified( (ContiguousPage)kv.getValue() ) ) { if( badPages == null ) { badPages = new ArrayList<>(); } badPages.add( (ContiguousPage)kv.getValue() ); } } if( badPages != null ) { for ( ContiguousPage p : badPages ) { this.deactivate( p ); } } } @Override public ContiguousPage activate() { this.beforeActivatePage(); ContiguousPage page = (ContiguousPage) this.getDivider().allocate(); this.activate( page ); return page; } @Override public void activate( ContiguousPage that ) { this.mChunkRegister.put( that.getId(), that ); } @Override public void deactivate( ContiguousPage that ) { this.mChunkRegister.remove( that.getId() ); this.mPagePool.deallocate( that ); } @Override public void deactivate( ContiguousPage[] those ){ for ( ContiguousPage p : those ) { this.deactivate( p ); } } @Override public long getActivatedSize() { return this.mChunkRegister.size(); } @Override public ContiguousPage getPageById(long id ){ return this.mChunkRegister.get( id ); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/ArchMasterSplitunkDivider64.java ================================================ package com.pinecone.slime.chunk.scheduler; import com.pinecone.slime.chunk.Chunk; import com.pinecone.slime.chunk.Splitunk; public abstract class ArchMasterSplitunkDivider64 implements ChunkDivider { protected Splitunk mMasterChunk; protected ArchMasterSplitunkDivider64( Splitunk masterChunk ) { this.mMasterChunk = masterChunk; } protected abstract Chunk newChunk( long start, long end, long epoch ); @Override public Splitunk getMasterChunk(){ return this.mMasterChunk; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/BadAllocateException.java ================================================ package com.pinecone.slime.chunk.scheduler; import com.pinecone.framework.system.PineRuntimeException; public class BadAllocateException extends PineRuntimeException { public BadAllocateException () { super(); } public BadAllocateException ( String message ) { super(message); } public BadAllocateException ( String message, Throwable cause ) { super(message, cause); } public BadAllocateException ( Throwable cause ) { super(cause); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/BatchActivePageScheduler.java ================================================ package com.pinecone.slime.chunk.scheduler; import com.pinecone.slime.chunk.ContiguousPage; public interface BatchActivePageScheduler extends ActivePageScheduler { long batchSize(); long getBatchEpoch(); ContiguousPage[] activates(long batch ); default ContiguousPage[] activates() { return this.activates( this.batchSize() ); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/BatchActivePageScheduler64.java ================================================ package com.pinecone.slime.chunk.scheduler; import com.pinecone.slime.chunk.RangedPage; public abstract class BatchActivePageScheduler64 extends ActivePageScheduler64 implements BatchActivePageScheduler { protected long mnBatchSize; protected long mnBatchEpoch; protected BatchActivePageScheduler64(RangedPage masterPage, PagePool pagePool, PageDivider divider, long autoIncrementId, long batchSize ) { super( masterPage, pagePool, divider, autoIncrementId ); this.mnBatchSize = batchSize; this.mnBatchEpoch = 0; } protected BatchActivePageScheduler64( PageDivider divider, long autoIncrementId, long batchSize ) { super( divider, autoIncrementId ); this.mnBatchSize = batchSize; this.mnBatchEpoch = 0; } @Override public long batchSize(){ return this.mnBatchSize; } @Override public long getBatchEpoch(){ return this.mnBatchEpoch; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/ChunkDivider.java ================================================ package com.pinecone.slime.chunk.scheduler; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.slime.chunk.Chunk; import com.pinecone.slime.chunk.DivisibleChunk; public interface ChunkDivider extends Pinenut { Chunk allocate() throws BadAllocateException; DivisibleChunk getMasterChunk(); long getMaxAllocations(); long remainAllocatable(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/ChunkRegister.java ================================================ package com.pinecone.slime.chunk.scheduler; import com.pinecone.framework.system.prototype.Pinenut; import java.util.Map; public interface ChunkRegister extends Pinenut, Map { } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/DefaultPageRecycleStrategy.java ================================================ package com.pinecone.slime.chunk.scheduler; import com.pinecone.slime.chunk.ContiguousPage; public class DefaultPageRecycleStrategy implements PageRecycleStrategy { protected PageScheduler mPageScheduler; public DefaultPageRecycleStrategy( PageScheduler parent ){ this.mPageScheduler = parent; } @Override public PageScheduler parentScheduler(){ return this.mPageScheduler; } @Override public boolean qualified( ContiguousPage that ){ return false; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/DirectPagePool.java ================================================ package com.pinecone.slime.chunk.scheduler; import com.pinecone.framework.system.PineRuntimeException; import com.pinecone.slime.chunk.ContiguousPage; import java.lang.reflect.InvocationTargetException; public class DirectPagePool implements PagePool { protected Class stereotype; public DirectPagePool( Class stereotype ){ this.stereotype = stereotype; } @Override public int size(){ return Integer.MAX_VALUE - 2; } @Override public ContiguousPage allocate( Object... args ){ ContiguousPage page; try { page = this.stereotype.getDeclaredConstructor().newInstance(); } catch ( InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e ) { throw new PineRuntimeException( "Failed to allocate a new page.", e ); } page.apply( args ); return page; } @Override public void deallocate( ContiguousPage that ){ } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/FixedChunkDivider64.java ================================================ package com.pinecone.slime.chunk.scheduler; import com.pinecone.slime.chunk.Splitunk; import com.pinecone.slime.unitization.MinMaxRange; import com.pinecone.slime.chunk.Chunk; public abstract class FixedChunkDivider64 extends ArchMasterSplitunkDivider64 implements ChunkDivider { protected long mnEach; protected long mnStartOffset; protected long mnChunkMin; protected long mnChunkMax; protected long mnChunkElements; protected long mnCurrentEpoch; protected long mnMaxAllocations; public FixedChunkDivider64(Splitunk masterChunk, long each ) { super( masterChunk ); this.mnEach = each; this.mnChunkMin = ( (MinMaxRange)this.mMasterChunk.getRange() ).getMin().longValue(); this.mnChunkMax = ( (MinMaxRange)this.mMasterChunk.getRange() ).getMax().longValue(); this.mnStartOffset = this.mnChunkMin; this.mnChunkElements = this.mnChunkMax - this.mnChunkMin; this.mnCurrentEpoch = 0; this.mnMaxAllocations = (this.mnChunkElements + this.mnEach - 1) / this.mnEach; } protected long nextRange( long to ) { if( to + this.mnEach > this.mnChunkElements ) { return this.mnChunkElements; } return to + this.mnEach; } @Override public long getMaxAllocations() { return this.mnMaxAllocations; } @Override public long remainAllocatable(){ return this.mnMaxAllocations - this.mnCurrentEpoch; } public long getEach() { return this.mnEach; } @Override public Chunk allocate() throws BadAllocateException { if( this.mnCurrentEpoch < this.getMaxAllocations() ) { long start = this.nextRange( (this.mnCurrentEpoch - 1) * this.mnEach ) + this.mnStartOffset; long end = this.nextRange( this.mnCurrentEpoch * this.mnEach ) + this.mnStartOffset; Chunk chunk = this.newChunk( start, end, this.mnCurrentEpoch ); ++this.mnCurrentEpoch; return chunk; } throw new BadAllocateException(); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/FixedPageDivider64.java ================================================ package com.pinecone.slime.chunk.scheduler; import com.pinecone.slime.chunk.Chunk; import com.pinecone.slime.chunk.Splitunk; import com.pinecone.slime.chunk.marshaling.PageDividerPartition64; public class FixedPageDivider64 extends FixedChunkDivider64 implements PageDivider { protected PagePool mPagePool; protected long mnPageIdOffset; public FixedPageDivider64( Splitunk masterChunk, PagePool pagePool, long each, long pageIdOffset ) { super( masterChunk, each ); this.mPagePool = pagePool; this.mnPageIdOffset = pageIdOffset; } public FixedPageDivider64( Splitunk masterChunk, PagePool pagePool, long each ) { this( masterChunk, pagePool, each, 0 ); } public FixedPageDivider64( PageDividerPartition64 partition, PagePool pagePool, long pageIdOffset ) { this( partition, pagePool, partition.eachPerPage(), pageIdOffset ); } public FixedPageDivider64( PageDividerPartition64 partition, PagePool pagePool ) { this( partition, pagePool, 0 ); } @Override protected Chunk newChunk( long start, long end, long epoch ) { return this.mPagePool.allocate( start, end, this.mnPageIdOffset + this.mnCurrentEpoch, this.mMasterChunk ); } @Override public PagePool getPagePool() { return this.mPagePool; } @Override public long getPageIdOffset() { return this.mnPageIdOffset; } @Override public void setPageIdOffset( long offset ) { this.mnPageIdOffset = offset; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/LocalBatchActivePageScheduler64.java ================================================ package com.pinecone.slime.chunk.scheduler; import com.pinecone.slime.chunk.ContiguousPage; public class LocalBatchActivePageScheduler64 extends BatchActivePageScheduler64 { public LocalBatchActivePageScheduler64( PageDivider divider, long autoIncrementId, long batchSize ) { super( divider, autoIncrementId, batchSize ); this.mChunkRegister = new LocalMapChunkRegister<>(); } @Override public ContiguousPage[] activates(long batch ) { long nActivated = this.getActivatedSize(); long nAllocations = batch - nActivated; long nRemains = this.getDivider().remainAllocatable(); if( nRemains < nAllocations ) { nAllocations = nRemains; } int iAlloc = (int) nAllocations; ContiguousPage[] pages = new ContiguousPage[ iAlloc ]; for ( int i = 0; i < iAlloc; i++ ) { pages[i] = this.activate(); } ++this.mnBatchEpoch; return pages; } @Override public ContiguousPage[] activates() { return this.activates( this.batchSize() ); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/LocalMapChunkRegister.java ================================================ package com.pinecone.slime.chunk.scheduler; import com.pinecone.framework.util.json.JSON; import java.util.Map; import java.util.LinkedHashMap; import java.util.Set; import java.util.Collection; public class LocalMapChunkRegister implements ChunkRegister { private final Map targetMap; public LocalMapChunkRegister() { this.targetMap = new LinkedHashMap<>(); } public LocalMapChunkRegister( Map otherMap ) { this.targetMap = otherMap; } @Override public int size() { return this.targetMap.size(); } @Override public boolean isEmpty() { return this.targetMap.isEmpty(); } @Override public boolean containsKey( Object key ) { return this.targetMap.containsKey(key); } @Override public boolean containsValue( Object value ) { return this.targetMap.containsValue(value); } @Override public V get( Object key ) { return this.targetMap.get(key); } @Override public V put( K key, V value ) { return this.targetMap.put(key, value); } @Override public V remove( Object key ) { return this.targetMap.remove(key); } @Override public void putAll( Map m ) { this.targetMap.putAll(m); } @Override public void clear() { this.targetMap.clear(); } @Override public Set keySet() { return this.targetMap.keySet(); } @Override public Collection values() { return this.targetMap.values(); } @Override public Set > entrySet() { return this.targetMap.entrySet(); } @Override public boolean equals( Object o ){ return this.targetMap.equals(o); } @Override public int hashCode(){ return this.targetMap.hashCode(); } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return JSON.stringify( this ); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/PageDivider.java ================================================ package com.pinecone.slime.chunk.scheduler; public interface PageDivider extends ChunkDivider { PagePool getPagePool(); long getPageIdOffset(); void setPageIdOffset( long offset ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/PagePool.java ================================================ package com.pinecone.slime.chunk.scheduler; import com.pinecone.slime.chunk.ContiguousPage; import com.pinecone.framework.system.prototype.Pinenut; public interface PagePool extends Pinenut { int size(); ContiguousPage allocate(Object... args ); void deallocate( ContiguousPage that ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/PageRecycleStrategy.java ================================================ package com.pinecone.slime.chunk.scheduler; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.slime.chunk.ContiguousPage; public interface PageRecycleStrategy extends Pinenut { PageScheduler parentScheduler(); boolean qualified( ContiguousPage that ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/PageScheduler.java ================================================ package com.pinecone.slime.chunk.scheduler; import com.pinecone.framework.system.prototype.Pinenut; public interface PageScheduler extends Pinenut { PageScheduler setPageRecycleStrategy( PageRecycleStrategy strategy ); PageRecycleStrategy getPageRecycleStrategy(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/RangedPageScheduler.java ================================================ package com.pinecone.slime.chunk.scheduler; import com.pinecone.slime.chunk.ContiguousPage; public interface RangedPageScheduler extends PageScheduler { long getAutoIncrementId(); PageDivider getDivider(); ContiguousPage getMasterPage(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/RangedPageScheduler64.java ================================================ package com.pinecone.slime.chunk.scheduler; import com.pinecone.slime.chunk.ContiguousPage; import com.pinecone.slime.chunk.RangedPage; public abstract class RangedPageScheduler64 implements RangedPageScheduler { protected long mnAutoIncrementId; protected RangedPage mMasterPage; protected PagePool mPagePool; protected PageDivider mDivider; protected PageRecycleStrategy mRecycleStrategy; protected RangedPageScheduler64( RangedPage masterPage, PagePool pagePool, PageDivider divider, long autoIncrementId ) { this.mMasterPage = masterPage; this.mPagePool = pagePool; this.mDivider = divider; this.mnAutoIncrementId = autoIncrementId; this.mRecycleStrategy = null; } protected RangedPageScheduler64( PageDivider divider, long autoIncrementId ) { this( (RangedPage) divider.getMasterChunk(), divider.getPagePool(), divider, autoIncrementId ); this.mDivider.setPageIdOffset( autoIncrementId ); } protected abstract void beforeActivatePage() ; @Override public PageScheduler setPageRecycleStrategy( PageRecycleStrategy strategy ) { this.mRecycleStrategy = strategy; return this; } @Override public PageRecycleStrategy getPageRecycleStrategy() { return this.mRecycleStrategy; } @Override public long getAutoIncrementId() { return this.mnAutoIncrementId; } @Override public PageDivider getDivider() { return this.mDivider; } @Override public ContiguousPage getMasterPage() { return this.mMasterPage; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cluster/ArchSequentialChunkGroup.java ================================================ package com.pinecone.slime.cluster; import com.pinecone.framework.unit.LinkedMultiValueMap; import com.pinecone.framework.unit.MultiValueMap; import com.pinecone.framework.util.json.JSON; import com.pinecone.slime.unitization.Precision; import com.pinecone.slime.unitization.Precision64; import com.pinecone.slime.chunk.Chunk; import java.util.ArrayList; import java.util.Iterator; import java.util.List; public abstract class ArchSequentialChunkGroup implements SequentialChunkGroup { protected long mnId; protected List mChunkList; protected MultiValueMap mChunkRegister; protected ArchSequentialChunkGroup() { this.mChunkList = new ArrayList<>(); this.mChunkRegister = new LinkedMultiValueMap<> (); } @Override public long getId() { return this.mnId; } @Override public void setId( long id ) { this.mnId = id; } @Override public Precision size() { return new Precision64( this.getSequentialChunks().size() ); } @Override public void add( Chunk that ) { this.mChunkList.add( that ); this.mChunkRegister.add( that.getId(), that ); } @Override public List getChunksById( long id ){ return this.mChunkRegister.get( id ); } @Override public Chunk getFirstChunkById( long id ){ return this.mChunkRegister.getFirst( id ); } @Override public void remove( Chunk that ) { List chunks = this.getChunksById( that.getId() ); if( chunks.size() > 1 ) { chunks.remove( that ); } else { this.mChunkRegister.remove( that.getId() ); } this.mChunkList.remove( that ); } @Override public void remove( long id ) { List chunks = this.getChunksById( id ); this.mChunkRegister.remove( id ); for ( Chunk c : chunks ) { this.mChunkList.remove( c ); } } @Override public List getSequentialChunks() { return this.mChunkList; } @Override public Iterator begin() { return this.getSequentialChunks().iterator(); } @Override public String toJSONString() { return JSON.stringify( this.mChunkList ); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cluster/ChunkGroup.java ================================================ package com.pinecone.slime.cluster; import com.pinecone.slime.chunk.DiscreteChunk; import com.pinecone.slime.chunk.PatriarchalChunk; public interface ChunkGroup extends Cluster, PatriarchalChunk, DiscreteChunk { } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cluster/Cluster.java ================================================ package com.pinecone.slime.cluster; import com.pinecone.slime.chunk.Chunk; public interface Cluster extends Chunk { } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cluster/RangedCluster.java ================================================ package com.pinecone.slime.cluster; import com.pinecone.slime.chunk.Continunk; public interface RangedCluster extends Cluster, Continunk { } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/cluster/SequentialChunkGroup.java ================================================ package com.pinecone.slime.cluster; import com.pinecone.slime.chunk.Chunk; import java.util.Collection; import java.util.Iterator; import java.util.List; public interface SequentialChunkGroup extends ChunkGroup { void add( Chunk that ); List getChunksById( long id ); Chunk getFirstChunkById( long id ); void remove( Chunk that ); void remove( long id ); Collection getSequentialChunks(); Iterator begin(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/entity/ArchEnumIndexableEntity.java ================================================ package com.pinecone.slime.entity; public abstract class ArchEnumIndexableEntity implements EnumIndexableEntity { protected long mnEnumId; protected ArchEnumIndexableEntity() { } @Override public long getEnumId() { return this.mnEnumId; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/entity/EnumIndexableEntity.java ================================================ package com.pinecone.slime.entity; import com.pinecone.framework.util.id.Identification; import com.pinecone.framework.util.id.Int64ID; public interface EnumIndexableEntity extends ObjectiveEntity { @Override default Identification getId() { return new Int64ID( this.getEnumId() ); } long getEnumId(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/entity/ObjectiveEntity.java ================================================ package com.pinecone.slime.entity; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.Identification; public interface ObjectiveEntity extends Pinenut { Identification getId(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/map/AlterableCacher.java ================================================ package com.pinecone.slime.map; import java.util.concurrent.TimeUnit; public interface AlterableCacher extends AlterableQuerier { V insert( Object key, V value, long expire, TimeUnit unit ) ; V insert( Object key, V value, long expireMill ); V insertIfAbsent( Object key, V value, long expireMill ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/map/AlterableQuerier.java ================================================ package com.pinecone.slime.map; public interface AlterableQuerier extends Querier { void clear(); V insert( Object key, V value ); V insertIfAbsent( Object key, V value ); V erase( Object key ); // No need to retrieve value. void expunge( Object key ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/map/LocalMapQuerier.java ================================================ package com.pinecone.slime.map; import com.pinecone.framework.unit.Dictium; import com.pinecone.framework.unit.ListDictium; import com.pinecone.framework.unit.MapDictium; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; public class LocalMapQuerier implements AlterableQuerier { protected Dictium mTarget; public LocalMapQuerier( Dictium dictium ) { this.mTarget = dictium; } public LocalMapQuerier( boolean bUsingList ) { if( bUsingList ) { this.mTarget = new ListDictium<>(); } else { this.mTarget = new MapDictium<>(); } } public LocalMapQuerier() { this( false ); } @Override public long size() { return this.mTarget.size(); } @Override public boolean isEmpty() { return this.mTarget.isEmpty(); } @Override public void clear() { this.mTarget.clear(); } @Override public boolean containsKey( Object key ) { return this.mTarget.containsKey( key ); } @Override public boolean containsValue( Object value ) { return this.mTarget.containsValue( value ); } @Override public V get( Object key ) { return this.mTarget.get( key ); } @Override public V insert( Object key, V value ) { return this.mTarget.insert( key, value ); } @Override public V insertIfAbsent( Object key, V value ) { return this.mTarget.insertIfAbsent( key, value ); } @Override public V erase( Object key ) { return this.mTarget.erase( key ); } @Override public void expunge( Object key ) { this.erase( key ); } @Override public Set entrySet() { return this.mTarget.entrySet(); } @Override public Collection values() { return this.mTarget.values(); } @Override public Map toMap() { return this.mTarget.toMap(); } @Override public List toList() { return this.mTarget.toList(); } @Override public boolean hasOwnProperty( Object elm ) { return this.mTarget.hasOwnProperty( elm ); } @Override public String toJSONString() { return this.mTarget.toJSONString(); } @Override public String toString() { return this.mTarget.toString(); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/map/Mapper.java ================================================ package com.pinecone.slime.map; import com.pinecone.framework.system.prototype.PineUnit; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; public interface Mapper extends PineUnit { long size(); boolean isEmpty(); @Override boolean containsKey( Object key ); boolean containsValue( Object value ); V get( Object key ); Set entrySet(); Collection values(); Map toMap(); List toList(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/map/MonoKeyQueryRange.java ================================================ package com.pinecone.slime.map; import com.pinecone.slime.unitization.PartialOrderRange; public class MonoKeyQueryRange > extends PartialOrderRange implements QueryRange { protected String mszRangeKey; public MonoKeyQueryRange( T min, T max, String szRangeKey ) { super( min, max ); this.mszRangeKey = szRangeKey; } @Override public String getRangeKey() { return this.mszRangeKey; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/map/Querier.java ================================================ package com.pinecone.slime.map; import java.util.List; public interface Querier extends Mapper { default List query( Object statement ) { return List.of( this.get( statement ) ); } default List queryVal( Object statement ) { return List.of( this.get( statement ) ); } default V queryValFirst( Object statement ) { List l = this.queryVal( statement ); if( l != null && !l.isEmpty() ) { return l.get( 0 ); } return null; } @Override default boolean hasOwnProperty( Object elm ) { return this.containsKey( elm ); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/map/QueryRange.java ================================================ package com.pinecone.slime.map; import com.pinecone.slime.unitization.PartialRange; public interface QueryRange > extends PartialRange { String getRangeKey(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/map/indexable/IndexableCachedMap.java ================================================ package com.pinecone.slime.map.indexable; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.unit.Mapnut; import com.pinecone.framework.util.json.JSON; import com.pinecone.slime.cache.query.UniformCountSelfLoadingDictCache; import com.pinecone.slime.source.indexable.IndexableDataManipulator; import com.pinecone.slime.source.indexable.IndexableIterableManipulator; import com.pinecone.slime.source.indexable.IndexableTargetScopeMeta; import java.util.Collection; import java.util.Map; import java.util.Set; import java.util.AbstractSet; import java.util.AbstractCollection; import java.util.Iterator; public class IndexableCachedMap implements Mapnut { private final IndexableMapQuerier mQuerier; private final IndexableIterableManipulator mManipulator; protected UniformCountSelfLoadingDictCache mCache; protected IndexableTargetScopeMeta mIndexMeta; public IndexableCachedMap( IndexableTargetScopeMeta indexMeta, UniformCountSelfLoadingDictCache cache, IndexableMapQuerier querier ) { IndexableDataManipulator manipulator = (IndexableDataManipulator) indexMeta.getDataManipulator(); if( ! ( manipulator instanceof IndexableIterableManipulator ) ) { throw new IllegalArgumentException( "Manipulator should be IterableManipulator." ); } this.mManipulator = (IndexableIterableManipulator) manipulator; this.mIndexMeta = indexMeta; this.mCache = cache; this.mQuerier = querier; } public IndexableCachedMap( IndexableTargetScopeMeta indexMeta, UniformCountSelfLoadingDictCache cache ) { this( indexMeta, cache, new IndexableMapQuerier<>( indexMeta, cache ) ); } @Override public long megaSize() { return this.mQuerier.size(); } @Override public int size() { return (int)this.megaSize(); } @Override public boolean isEmpty() { return this.size() == 0; } @Override public void clear() { this.mQuerier.clear(); } @Override public boolean containsKey( Object key ) { return this.mQuerier.containsKey( key ); } @Override public boolean hasOwnProperty( Object key ) { return this.containsKey( key ); } @Override public boolean containsValue( Object value ) { return this.mQuerier.containsValue( value ); } @Override public V get( Object key ) { return this.mQuerier.get( key ); } @Override public Entry getEntryByKey( Object compatibleKey ) { return this.getEntryCopyByKey( compatibleKey ); } @Override @SuppressWarnings( "unchecked" ) public Entry getEntryCopyByKey( Object compatibleKey ) { return new KeyValue<>( (K)compatibleKey, this.get( compatibleKey ) ); } @Override public V put( K key, V value ) { return this.mQuerier.insert( key, value ); } @Override public V putIfAbsent( K key, V value ) { return this.mQuerier.insertIfAbsent( key, value ); } @Override public void putAll( Map m ) { for( Map.Entry kv : m.entrySet() ){ this.put( kv.getKey(), kv.getValue() ); } } @Override public V remove( Object key ) { return this.mQuerier.erase( key ); } @Override public Set keySet() { return new IndexableKeySet(); } @Override public Set > entrySet() { return new IndexableEntrySet(); } @Override public Collection values() { return new IndexableValCollection(); } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return JSON.stringify( this ); } protected final class IndexableValueIterator implements Iterator { Iterator > entryIterator; IndexableValueIterator() { this.entryIterator = IndexableCachedMap.this.mManipulator.iterator( IndexableCachedMap.this.mIndexMeta ); } @Override public final boolean hasNext() { return this.entryIterator.hasNext(); } public final V next() { return this.entryIterator.next().getValue(); } } protected class IndexableEntrySet extends AbstractSet > { public final int size() { return IndexableCachedMap.this.size(); } public final void clear() { IndexableCachedMap.this.clear(); } public final Iterator > iterator() { return IndexableCachedMap.this.mManipulator.iterator( IndexableCachedMap.this.mIndexMeta ); } public final boolean contains( Object o ) { if ( !(o instanceof Map.Entry) ) { return false; } Map.Entry e = (Map.Entry) o; Object key = e.getKey(); Object v = IndexableCachedMap.this.get(key); return v != null && v.equals(e.getValue()); } public final boolean remove( Object o ) { if ( this.contains(o) ) { Map.Entry e = (Map.Entry) o; Object key = e.getKey(); return IndexableCachedMap.this.remove(key) != null ; } return false; } } protected class IndexableKeySet extends AbstractSet { public final int size() { return IndexableCachedMap.this.size(); } public final void clear() { IndexableCachedMap.this.clear(); } public final Iterator iterator() { return IndexableCachedMap.this.mManipulator.keysIterator( IndexableCachedMap.this.mIndexMeta ); } public final boolean contains( Object o ) { return IndexableCachedMap.this.containsKey( o ); } public final boolean remove( Object o ) { return IndexableCachedMap.this.remove( o ) != null; } } protected class IndexableValCollection extends AbstractCollection { public final int size() { return IndexableCachedMap.this.size(); } public final void clear() { IndexableCachedMap.this.clear(); } public final Iterator iterator() { return new IndexableValueIterator(); } } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/map/indexable/IndexableMapQuerier.java ================================================ package com.pinecone.slime.map.indexable; import com.pinecone.framework.system.NotImplementedException; import com.pinecone.slime.cache.CacheConstants; import com.pinecone.slime.cache.query.ConcurrentMergeLRUDictCachePage; import com.pinecone.slime.cache.query.LocalFixedLRUDictCachePage; import com.pinecone.slime.cache.query.UniformCountSelfLoadingDictCache; import com.pinecone.slime.map.AlterableCacher; import com.pinecone.slime.source.indexable.GenericIndexKeySourceRetriever; import com.pinecone.slime.source.indexable.IndexableDataManipulator; import com.pinecone.slime.source.indexable.IndexableIterableManipulator; import com.pinecone.slime.source.indexable.IndexableTargetScopeMeta; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import java.util.ArrayList; import java.util.concurrent.TimeUnit; public class IndexableMapQuerier implements AlterableCacher { private final IndexableDataManipulator mManipulator; protected UniformCountSelfLoadingDictCache mCache; protected IndexableTargetScopeMeta mIndexMeta; protected static UniformCountSelfLoadingDictCache newCache( IndexableTargetScopeMeta meta, boolean bConcurrent ) { if ( bConcurrent ) { return new ConcurrentMergeLRUDictCachePage<>( CacheConstants.DefaultCachePageMegaCapacity, new GenericIndexKeySourceRetriever<>( meta ) ); } else { return new LocalFixedLRUDictCachePage<>( CacheConstants.DefaultCachePageMegaCapacity, new GenericIndexKeySourceRetriever<>( meta ) ); } } public IndexableMapQuerier( IndexableTargetScopeMeta meta, UniformCountSelfLoadingDictCache cache ) { this.mManipulator = (IndexableDataManipulator) meta.getDataManipulator(); this.mIndexMeta = meta; this.mCache = cache; } public IndexableMapQuerier( IndexableTargetScopeMeta meta, boolean bConcurrent ) { this( meta, IndexableMapQuerier.newCache( meta, bConcurrent ) ); } public IndexableMapQuerier( IndexableTargetScopeMeta meta ) { this( meta, true ); } @Override public long size() { return this.mManipulator.counts( this.mIndexMeta, null ); } @Override public boolean isEmpty() { return this.size() == 0; } @Override public void clear() { this.mCache.clear(); this.mManipulator.purge( this.mIndexMeta ); } @Override public boolean containsKey( Object key ) { return this.mCache.implicatesKey( key ); } @Override public boolean containsValue( Object value ) { Object values = this.mManipulator.selectAllByNS( this.mIndexMeta, null, null ); if( values instanceof Collection) { return ((Collection) values).contains( value ); } else if( values instanceof Map) { return ((Map) values).values().contains( value ); } return false; } @Override public V get( Object key ) { return this.mCache.get( key ); } @Override public V insert( Object key, V value ) { this.mManipulator.insert( this.mIndexMeta, (K)key, value ); return value; } @Override public V insert( Object key, V value, long expireMill ) { this.mManipulator.insert( this.mIndexMeta, (K)key, value, expireMill ); return value; } @Override public V insert( Object key, V value, long expire, TimeUnit unit ) { this.insert( key, value, unit.toMillis( expire ) ); return value; } @Override public V insertIfAbsent( Object key, V value ) { if ( !this.containsKey( key ) ) { return this.insert( key, value ); } return null; } @Override public V insertIfAbsent( Object key, V value, long expireMill ) { if ( !this.containsKey( key ) ) { return this.insert( key, value, expireMill ); } return null; } @Override public V erase( Object key ) { V value = this.get( key ); this.expunge( key ); return value; } @Override public void expunge( Object key ) { this.mCache.erase( key ); this.mManipulator.deleteByKey( this.mIndexMeta, key ); } @Override public Set> entrySet() { Map map = this.toMap(); return map.entrySet(); } @Override public Collection values() { return this.toMap().values(); } @Override public Map toMap() { if( this.mManipulator instanceof IndexableIterableManipulator ) { IndexableIterableManipulator manipulator = (IndexableIterableManipulator)this.mManipulator; return new IndexableCachedMap<>( this.mIndexMeta, this.mCache, this ); } throw new NotImplementedException( "Manipulator should be IterableManipulator." ); } @Override public List toList() { return new ArrayList<>( this.values() ); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/map/rdb/RDBMapQuerier.java ================================================ package com.pinecone.slime.map.rdb; import com.pinecone.framework.system.NotImplementedException; import com.pinecone.slime.cache.CacheConstants; import com.pinecone.slime.cache.query.SourceRetriever; import com.pinecone.slime.cache.query.UniformCountSelfLoadingDictCache; import com.pinecone.slime.cache.query.pool.BatchPageSourceRetriever; import com.pinecone.slime.cache.query.pool.CountSelfPooledPageDictCache; import com.pinecone.slime.cache.query.pool.LocalLRUPrimaryPooledDictCache; import com.pinecone.slime.map.AlterableQuerier; import com.pinecone.slime.map.MonoKeyQueryRange; import com.pinecone.slime.source.rdb.ContiguousNumIndexBatchPageSourceRetriever; import com.pinecone.slime.source.rdb.RDBQuerierDataManipulator; import com.pinecone.slime.source.rdb.RDBTargetTableMeta; import com.pinecone.slime.source.rdb.RangedRDBQuerierDataManipulator; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; public class RDBMapQuerier implements AlterableQuerier { private final RDBQuerierDataManipulator mDataMapper; protected UniformCountSelfLoadingDictCache mCache; protected RDBTargetTableMeta mTableMeta; public RDBMapQuerier( RDBTargetTableMeta tableMeta, UniformCountSelfLoadingDictCache cache ) { this.mDataMapper = (RDBQuerierDataManipulator) tableMeta.getDataManipulator(); this.mTableMeta = tableMeta; this.mCache = cache; } public RDBMapQuerier( RDBTargetTableMeta tableMeta, String szRangeKey ) { this( tableMeta, new LocalLRUPrimaryPooledDictCache<>( CacheConstants.DefaultCachePageCapacity, 5, new ContiguousNumIndexBatchPageSourceRetriever<>( tableMeta, CacheConstants.DefaultCachePageCapacity, szRangeKey ) ) ); } public RDBMapQuerier( RDBTargetTableMeta tableMeta ) { this( tableMeta, tableMeta.getPrimaryKey() ); } @Override public long size() { return this.mDataMapper.counts( this.mTableMeta,"" ); } @Override public boolean isEmpty() { return this.size() == 0; } @Override public void clear() { this.mCache.clear(); this.mDataMapper.truncate( this.mTableMeta ); } @Override public boolean containsKey( Object key ) { return this.mCache.implicatesKey( key ); } @Override public boolean containsValue( Object value ) { throw new NotImplementedException( "Querier::containsValue is not implemented." ); //return this.dataMapper.selectListByColumn( this.tableMeta, ) } @Override public V get( Object key ) { return this.mCache.get( key ); } @Override public List queryVal( Object statement ) { if( statement instanceof String ) { return this.mDataMapper.queryVal( this.mTableMeta, (String) statement ); } return this.mDataMapper.queryVal( this.mTableMeta, statement.toString() ); } public List query( Object statement ) { if( statement instanceof String ) { return this.mDataMapper.query( this.mTableMeta, (String) statement ); } return this.mDataMapper.query( this.mTableMeta, statement.toString() ); } @Override public V insert( Object key, V value ) { try{ this.mDataMapper.insert( this.mTableMeta, (K)key, value ); } catch ( Exception e ) { this.mDataMapper.update( this.mTableMeta, (K)key, value ); if( this.mCache.existsKey( key ) ) { this.mCache.erase( key ); } } return value; } @Override public V insertIfAbsent( Object key, V value ) { if( !this.containsKey( key ) ) { return this.insert( key, value ); } return null; } @Override public V erase( Object key ) { V v = this.get( key ); this.expunge( key ); return v; } @Override public void expunge( Object key ) { this.mCache.erase( key ); this.mDataMapper.deleteByKey( this.mTableMeta, key ); } @Override public Set > entrySet() { return this.toMap().entrySet(); } @Override public Collection values() { try{ Map map = this.toMap(); return map.values(); } catch ( NotImplementedException e ) { return this.toList(); } } @Override @SuppressWarnings( "unchecked" ) public Map toMap() { if( this.mDataMapper instanceof RangedRDBQuerierDataManipulator ) { RangedRDBQuerierDataManipulator manipulator = (RangedRDBQuerierDataManipulator)this.mDataMapper; SourceRetriever retriever = this.mCache.getSourceRetriever(); String szRangeKey = this.mTableMeta.getIndexKey(); if( this.mCache instanceof CountSelfPooledPageDictCache && retriever instanceof BatchPageSourceRetriever ) { return new RangedRDBCachedMap<>( this.mTableMeta, (CountSelfPooledPageDictCache)this.mCache, this ); } else { if( retriever instanceof BatchPageSourceRetriever ) { szRangeKey = ((BatchPageSourceRetriever) retriever).getRangeKey(); } Object max = manipulator.getMaximumRangeVal( this.mTableMeta, szRangeKey ); Object min = manipulator.getMinimumRangeVal( this.mTableMeta, szRangeKey ); if( max instanceof Comparable ) { return (Map)manipulator.selectMappedByRange( this.mTableMeta, new MonoKeyQueryRange<>( (Comparable)min, (Comparable)max, szRangeKey ) ); } } } throw new NotImplementedException( "Manipulator should be has `Ranged`, and max/min should be `Comparable`." ); } @Override @SuppressWarnings( "unchecked" ) public List toList() { return ( List ) this.mDataMapper.selectList( this.mTableMeta, "" ); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/map/rdb/RangedRDBCachedMap.java ================================================ package com.pinecone.slime.map.rdb; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.unit.Mapnut; import com.pinecone.framework.util.json.JSON; import com.pinecone.slime.cache.query.IterableDictCachePage; import com.pinecone.slime.cache.query.RangedDictCachePage; import com.pinecone.slime.cache.query.pool.BatchPageSourceRetriever; import com.pinecone.slime.cache.query.pool.CountSelfPooledPageDictCache; import com.pinecone.slime.map.MonoKeyQueryRange; import com.pinecone.slime.source.rdb.RDBTargetTableMeta; import com.pinecone.slime.source.rdb.RangedRDBQuerierDataManipulator; import java.util.Map; import java.util.Set; import java.util.Collection; import java.util.Iterator; import java.util.AbstractSet; import java.util.AbstractCollection; import java.util.NoSuchElementException; public class RangedRDBCachedMap implements Mapnut { private final RDBMapQuerier mQuerier; private final RangedRDBQuerierDataManipulator mDataMapper; protected CountSelfPooledPageDictCache mCache; protected RDBTargetTableMeta mTableMeta; public RangedRDBCachedMap( RDBTargetTableMeta tableMeta, CountSelfPooledPageDictCache cache, RDBMapQuerier querier ) { this.mDataMapper = (RangedRDBQuerierDataManipulator) tableMeta.getDataManipulator(); this.mTableMeta = tableMeta; this.mCache = cache; this.mQuerier = querier; } public RangedRDBCachedMap( RDBTargetTableMeta tableMeta, CountSelfPooledPageDictCache cache ) { this( tableMeta, cache, new RDBMapQuerier<>( tableMeta, cache ) ); } @Override public long megaSize() { return this.mQuerier.size(); } @Override public int size() { return (int)this.megaSize(); } @Override public boolean isEmpty() { return this.size() == 0; } @Override public void clear() { this.mQuerier.clear(); } @Override public boolean containsKey( Object key ) { return this.mQuerier.containsKey( key ); } @Override public boolean hasOwnProperty( Object key ) { return this.containsKey( key ); } @Override public boolean containsValue( Object value ) { return this.mQuerier.containsValue( value ); } @Override public V get( Object key ) { return this.mQuerier.get( key ); } @Override public Entry getEntryByKey( Object compatibleKey ) { return this.getEntryCopyByKey( compatibleKey ); } @Override @SuppressWarnings( "unchecked" ) public Entry getEntryCopyByKey( Object compatibleKey ) { return new KeyValue<>( (K)compatibleKey, this.get( compatibleKey ) ); } @Override public V put( K key, V value ) { return this.mQuerier.insert( key, value ); } @Override public V putIfAbsent( K key, V value ) { return this.mQuerier.insertIfAbsent( key, value ); } @Override public void putAll( Map m ) { for( Map.Entry kv : m.entrySet() ){ this.put( kv.getKey(), kv.getValue() ); } } @Override public V remove( Object key ) { return this.mQuerier.erase( key ); } @Override public Set keySet() { return new BufferedRDBKeySet(); } @Override public Set > entrySet() { return new BufferedRDBEntrySet(); } @Override public Collection values() { return new BufferedRDBValCollection(); } @Override public String toString() { return this.toJSONString(); } @Override public String toJSONString() { return JSON.stringify( this ); } protected abstract class BufferedRDBIterator { protected Comparable mMax; protected Comparable mMin; protected RangedRDBQuerierDataManipulator mDataMapper; protected RDBTargetTableMeta mTableMeta; protected BatchPageSourceRetriever mRetriever; protected long mnPageCapacity; protected String mszRangeKey; protected RangedDictCachePage mCurrentPage; protected Iterator mCurrentIter; @SuppressWarnings( "unchecked" ) public BufferedRDBIterator() { this.mDataMapper = RangedRDBCachedMap.this.mDataMapper; this.mTableMeta = RangedRDBCachedMap.this.mTableMeta; this.mRetriever = (BatchPageSourceRetriever ) RangedRDBCachedMap.this.mCache.getSourceRetriever(); // Specifically required `BatchPageSourceRetriever`. this.mszRangeKey = this.mRetriever.getRangeKey(); this.mMax = (Comparable) this.mDataMapper.getMaximumRangeVal( this.mTableMeta, this.mszRangeKey ); this.mMin = (Comparable) this.mDataMapper.getMinimumRangeVal( this.mTableMeta, this.mszRangeKey ); this.mnPageCapacity = this.mRetriever.getBatchSize(); this.mCurrentPage = this.mRetriever.retrieves( this.mMin ); if( !( this.mCurrentPage instanceof IterableDictCachePage ) ) { throw new IllegalArgumentException( "DictCachePage is not iterable." ); } IterableDictCachePage page = ( IterableDictCachePage ) this.mCurrentPage; this.mCurrentIter = page.iterator(); } @SuppressWarnings( "unchecked" ) public boolean hasNext() { if( this.mCurrentIter.hasNext() ) { return true; } this.mCurrentPage = this.mRetriever.retrieves( this.mMin, new MonoKeyQueryRange( this.mCurrentPage.getRange().getMax(), this.mRetriever.nextRangeMax( this.mCurrentPage.getRange().getMax() ), this.mszRangeKey ) ); IterableDictCachePage page = ( IterableDictCachePage ) this.mCurrentPage; this.mCurrentIter = page.iterator(); return this.mCurrentIter.hasNext(); } @SuppressWarnings( "unchecked" ) protected Map.Entry nextNode() { if ( !this.hasNext() ) { throw new NoSuchElementException(); } Object next = this.mCurrentIter.next(); if( next instanceof Map.Entry ) { return (Map.Entry) next; } throw new IllegalArgumentException( "Iterable object is not `Map.Entry`." ); } public void remove() { this.mCurrentIter.remove(); } } protected final class BufferedRDBEntryIterator extends BufferedRDBIterator implements Iterator > { public final Map.Entry next() { return this.nextNode(); } } protected class BufferedRDBEntrySet extends AbstractSet > { public final int size() { return RangedRDBCachedMap.this.size(); } public final void clear() { RangedRDBCachedMap.this.clear(); } public final Iterator > iterator() { return new BufferedRDBEntryIterator(); } public final boolean contains( Object o ) { if ( !(o instanceof Map.Entry) ) { return false; } Map.Entry e = (Map.Entry) o; Object key = e.getKey(); Object v = RangedRDBCachedMap.this.get(key); return v != null && v.equals(e.getValue()); } public final boolean remove( Object o ) { if ( this.contains(o) ) { Map.Entry e = (Map.Entry) o; Object key = e.getKey(); return RangedRDBCachedMap.this.remove(key) != null ; } return false; } } protected final class BufferedRDBKeyIterator extends BufferedRDBIterator implements Iterator { public final K next() { return this.nextNode().getKey(); } } protected class BufferedRDBKeySet extends AbstractSet { public final int size() { return RangedRDBCachedMap.this.size(); } public final void clear() { RangedRDBCachedMap.this.clear(); } public final Iterator iterator() { return new BufferedRDBKeyIterator(); } public final boolean contains( Object o ) { return RangedRDBCachedMap.this.containsKey( o ); } public final boolean remove( Object o ) { return RangedRDBCachedMap.this.remove( o ) != null; } } protected final class BufferedRDBValueIterator extends BufferedRDBIterator implements Iterator { public final V next() { return this.nextNode().getValue(); } } protected class BufferedRDBValCollection extends AbstractCollection { public final int size() { return RangedRDBCachedMap.this.size(); } public final void clear() { RangedRDBCachedMap.this.clear(); } public final Iterator iterator() { return new BufferedRDBValueIterator(); } } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/meta/TableIndex64Meta.java ================================================ package com.pinecone.slime.meta; public class TableIndex64Meta implements TableIndexMeta { private long mnMinId; private long mnMaxId; public TableIndex64Meta( long nMinId, long nMaxId ) { this.mnMinId = nMinId; this.mnMaxId = nMaxId; } @Override public long getMinId() { return this.mnMinId; } @Override public long getMaxId() { return this.mnMaxId; } public void setMaxId( long nMaxId ) { this.mnMaxId = nMaxId; } public void setMinId( long nMinId ) { this.mnMinId = nMinId; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/meta/TableIndexMeta.java ================================================ package com.pinecone.slime.meta; import com.pinecone.framework.system.prototype.Pinenut; public interface TableIndexMeta extends Pinenut { long getMaxId(); long getMinId(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/query/GenericPageQuery.java ================================================ package com.pinecone.slime.query; import com.pinecone.framework.system.Nullable; public class GenericPageQuery implements PageQuery { @Nullable private String key; private E value; private long offset; private long pageSize; public GenericPageQuery( String key, E value, long offset, long pageSize ) { this.key = key; this.value = value; this.offset = offset; this.pageSize = pageSize; } public GenericPageQuery( E value, long offset, long pageSize ) { this.value = value; this.offset = offset; this.pageSize = pageSize; } @Override public String getKey() { return this.key; } @Override public void setKey( String key ) { this.key = key; } @Override public E getValue() { return this.value; } @Override public void setValue( E value ) { this.value = value; } @Override public long getOffset() { return this.offset; } @Override public void setOffset( long offset ) { this.offset = offset; } @Override public long getPageSize() { return pageSize; } @Override public void setPageSize( long pageSize ) { this.pageSize = pageSize; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/query/PageQuery.java ================================================ package com.pinecone.slime.query; import com.pinecone.framework.system.prototype.Pinenut; public interface PageQuery extends Pinenut { String getKey(); void setKey( String key ); E getValue(); void setValue( E value ); long getOffset(); void setOffset( long offset ); long getPageSize(); void setPageSize( long pageSize ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/source/ArchQueryScopeMeta.java ================================================ package com.pinecone.slime.source; import java.util.Set; public abstract class ArchQueryScopeMeta implements UniformQueryScopeMeta { private String mszScopeName; private String mszPrimaryKey; private String mszIndexKey; private Class mValueType; private Set mValueMetaKeys; private ResultConverter mResultConverter; protected ArchQueryScopeMeta( String namespace, String primaryKey, String indexKey, Class valueType, Set valueMetaKeys ) { this.mszScopeName = namespace; this.mszPrimaryKey = primaryKey; this.mszIndexKey = indexKey; this.mValueType = valueType; this.mValueMetaKeys = valueMetaKeys; } @Override public String getScopeNS() { return this.mszScopeName; } @Override public UniformQueryScopeMeta setScopeNS( String namespace ) { this.mszScopeName = namespace; return this; } @Override public String getPrimaryKey() { return this.mszPrimaryKey; } @Override public UniformQueryScopeMeta setPrimaryKey( String primaryKey ) { this.mszPrimaryKey = primaryKey; return this; } @Override public String getIndexKey() { return this.mszIndexKey; } @Override public UniformQueryScopeMeta setIndexKey( String indexKey ) { this.mszIndexKey = indexKey; return this; } @Override public Class getValueType() { return this.mValueType; } @Override public UniformQueryScopeMeta setValueType( Class valueType ) { this.mValueType = valueType; return this; } @Override public Set getValueMetaKeys(){ return this.mValueMetaKeys; } @Override public UniformQueryScopeMeta setValueMetaKeys( Set keys ){ this.mValueMetaKeys = keys; return this; } @Override public UniformQueryScopeMeta addValueMetaKey( String key ) { this.getValueMetaKeys().add( key ); return this; } @Override public UniformQueryScopeMeta removeValueMetaKey( String key ) { this.getValueMetaKeys().remove( key ); return this; } @Override public UniformQueryScopeMeta clone() { try { return (UniformQueryScopeMeta) super.clone(); // Refers inner pointer. } catch ( CloneNotSupportedException e ) { // this shouldn't happen, since we are Cloneable throw new InternalError(e); } } @Override @SuppressWarnings( "unchecked" ) public ResultConverter getResultConverter() { return this.mResultConverter; } @Override public UniformQueryScopeMeta setResultConverter( ResultConverter converter ) { this.mResultConverter = converter; return this; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/source/DAOScanner.java ================================================ package com.pinecone.slime.source; import com.pinecone.framework.util.lang.ClassScanner; public interface DAOScanner extends ClassScanner { } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/source/DataAccessObject.java ================================================ package com.pinecone.slime.source; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.lang.annotation.ElementType; @Target({ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface DataAccessObject { String value() default ""; // Which databases or data-manipulator that affinity to. // For multi databases scenario. String scope() default ""; } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/source/GenericResultConverter.java ================================================ package com.pinecone.slime.source; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.unit.LinkedTreeMap; import com.pinecone.slime.source.ResultConverter; import java.beans.IntrospectionException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Set; import java.util.Map; import java.text.ParseException; public class GenericResultConverter implements ResultConverter { private Class mValueType; private Set mValueMetaKeys; public GenericResultConverter( Class valueType, Set valueMetaKeys ) { this.mValueType = valueType; this.mValueMetaKeys = valueMetaKeys; } @Override @SuppressWarnings("unchecked") public V convert( Object val ) { if ( val instanceof Map ) { Map map = (Map) val; // Handling single value scenario for primitive or String if ( this.mValueMetaKeys.size() == 1 ) { Object singleValue = map.get( this.mValueMetaKeys.iterator().next() ); if ( ResultConverter.isPrimitiveOrSpecialType( this.mValueType ) ) { return (V) this.convertToType( singleValue, this.mValueType ); } } // Handling Map scenarios if ( Map.class.isAssignableFrom( this.mValueType ) ) { if ( val instanceof LinkedTreeMap && this.mValueType.isAssignableFrom( LinkedTreeMap.class ) ) { return (V) map; } else { try { Map targetMap = ( Map ) this.mValueType.getDeclaredConstructor().newInstance(); targetMap.putAll( map ); return (V) targetMap; } catch ( NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException e ) { if( this.mValueType.isAssignableFrom( val.getClass() ) ) { return (V) map; } throw new ProxyProvokeHandleException( "Error converting to target Map type.", e ); } } } // Handling Bean scenarios try { Constructor constructor = this.mValueType.getDeclaredConstructor(); constructor.setAccessible( true ); // [NOTICE] Set the constructor accessible V bean = constructor.newInstance(); for ( Map.Entry entry : map.entrySet() ) { try{ String property = entry.getKey(); Object value = entry.getValue(); this.setBeanProperty( bean, property, value ); } catch ( IntrospectionException | InvocationTargetException | IllegalAccessException e ) { e.printStackTrace(); // continue } } return bean; } catch ( NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException e ) { throw new ProxyProvokeHandleException("Error converting to target Bean type", e); } } else if( val != null && ResultConverter.isPrimitiveOrSpecialType( val.getClass() ) ){ if ( ResultConverter.isPrimitiveOrSpecialType( this.mValueType ) ) { return (V) this.convertToType( val, this.mValueType ); } if( this.mValueType.equals( Object.class ) ){ return (V) val; } } else if( this.mValueType.equals( Object.class ) ){ return (V) val; } throw new IllegalArgumentException( "Unsupported conversion from value: " + val ); } private Object convertToType( Object value, Class type ) { if ( value == null ) { return null; } if ( type.isInstance( value ) ) { return type.cast( value ); } if ( type == String.class ) { return value.toString(); } else if ( type == int.class || type == Integer.class ) { if (value instanceof Number) { return ((Number) value).intValue(); } else { return Integer.parseInt(value.toString()); } } else if ( type == long.class || type == Long.class ) { if (value instanceof Number) { return ((Number) value).longValue(); } else { return Long.parseLong(value.toString()); } } else if ( type == double.class || type == Double.class ) { if (value instanceof Number) { return ((Number) value).doubleValue(); } else { return Double.parseDouble(value.toString()); } } else if ( type == boolean.class || type == Boolean.class ) { if (value instanceof Boolean) { return value; } else { return Boolean.parseBoolean(value.toString()); } } else if ( type == byte.class || type == Byte.class ) { if (value instanceof Number) { return ((Number) value).byteValue(); } else { return Byte.parseByte(value.toString()); } } else if ( type == short.class || type == Short.class ) { if (value instanceof Number) { return ((Number) value).shortValue(); } else { return Short.parseShort(value.toString()); } } else if ( type == float.class || type == Float.class ) { if (value instanceof Number) { return ((Number) value).floatValue(); } else { return Float.parseFloat(value.toString()); } } else if ( type == char.class || type == Character.class ) { return value.toString().charAt(0); } else if ( type == Date.class ) { if ( value instanceof Date ) { return value; } else { try { return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse(value.toString()); } catch (ParseException e) { throw new IllegalArgumentException("Cannot parse date: " + value, e); } } } else if ( type.isEnum() ) { return Enum.valueOf((Class) type, value.toString()); } else if ( type == byte[].class ) { if ( value instanceof byte[] ) { return value; } else { return value.toString().getBytes(); } } else { throw new IllegalArgumentException( "Cannot convert value to type: " + type ); } } private void setBeanProperty( Object bean, String property, Object value ) throws IntrospectionException, InvocationTargetException, IllegalAccessException { java.beans.PropertyDescriptor propertyDescriptor = new java.beans.PropertyDescriptor( property, bean.getClass() ); Method writeMethod = propertyDescriptor.getWriteMethod(); if ( writeMethod != null ) { writeMethod.setAccessible( true ); writeMethod.invoke( bean, value ); } } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/source/ResultConverter.java ================================================ package com.pinecone.slime.source; import com.pinecone.framework.system.prototype.Pinenut; import java.util.Date; public interface ResultConverter extends Pinenut { V convert( Object val ); static boolean isPrimitiveOrSpecialType(Class type) { return type.isPrimitive() || type == String.class || Number.class.isAssignableFrom(type) || type == Boolean.class || type == Character.class || type == Date.class || type.isEnum() || type == byte[].class; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/source/UniformQueryScopeMeta.java ================================================ package com.pinecone.slime.source; import com.pinecone.framework.system.prototype.Pinenut; import java.util.Set; public interface UniformQueryScopeMeta extends Pinenut, Cloneable { String getScopeNS(); UniformQueryScopeMeta setScopeNS( String namespace ); String getPrimaryKey(); UniformQueryScopeMeta setPrimaryKey( String primaryKey ); String getIndexKey(); UniformQueryScopeMeta setIndexKey( String indexKey ); Class getValueType(); UniformQueryScopeMeta setValueType( Class valueType ); Set getValueMetaKeys(); UniformQueryScopeMeta setValueMetaKeys( Set keys ); UniformQueryScopeMeta addValueMetaKey( String key ); UniformQueryScopeMeta removeValueMetaKey( String key ); ResultConverter getResultConverter(); UniformQueryScopeMeta setResultConverter( ResultConverter converter ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/source/XMLResourceScanner.java ================================================ package com.pinecone.slime.source; import com.pinecone.framework.util.lang.ObjectScanner; public interface XMLResourceScanner extends ObjectScanner { } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/source/indexable/GenericIndexKeySourceRetriever.java ================================================ package com.pinecone.slime.source.indexable; import com.pinecone.slime.cache.query.SourceRetriever; public class GenericIndexKeySourceRetriever implements SourceRetriever { private IndexableDataManipulator mManipulator; protected IndexableTargetScopeMeta mIndexMeta; public GenericIndexKeySourceRetriever( IndexableTargetScopeMeta meta ) { this.mIndexMeta = meta; this.mManipulator = (IndexableDataManipulator) meta.getDataManipulator(); } @Override public V retrieve( Object key ) { return this.mManipulator.selectByKey( this.mIndexMeta, key ); } @Override public long countsKey( Object key ) { return this.mManipulator.countsByNS( this.mIndexMeta, this.mIndexMeta.getIndexKey(), key ); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/source/indexable/GenericIndexableTargetScopeMeta.java ================================================ package com.pinecone.slime.source.indexable; import com.pinecone.slime.source.ArchQueryScopeMeta; import com.pinecone.slime.source.ResultConverter; import java.util.Set; import java.util.TreeSet; public class GenericIndexableTargetScopeMeta extends ArchQueryScopeMeta implements IndexableTargetScopeMeta { private IndexableDataManipulator mDataManipulator; public GenericIndexableTargetScopeMeta( String scopeName, String primaryKey, String indexKey, Class valueType, IndexableDataManipulator manipulator, Set valueMetaKeys ) { super( scopeName, primaryKey, indexKey, valueType, valueMetaKeys ); this.mDataManipulator = manipulator; } public GenericIndexableTargetScopeMeta( String scopeName, String indexKey, Class valueType, IndexableDataManipulator manipulator, Set valueMetaKeys ) { this( scopeName, indexKey, indexKey, valueType, manipulator, valueMetaKeys ); } public GenericIndexableTargetScopeMeta( String scopeName, String indexKey, Class valueType, IndexableDataManipulator manipulator ) { this( scopeName, indexKey, valueType, manipulator, new TreeSet<>() ); } public GenericIndexableTargetScopeMeta( String scopeName, String indexKey, Class valueType ) { this( scopeName, indexKey, valueType, null ); } @Override public IndexableTargetScopeMeta setScopeNS( String namespace ) { super.setScopeNS( namespace ); return this; } @Override @SuppressWarnings( "unchecked" ) public IndexableDataManipulator getDataManipulator() { return this.mDataManipulator; } @Override public IndexableTargetScopeMeta setDataManipulator( IndexableDataManipulator manipulator ){ this.mDataManipulator = manipulator; return this; } @Override public IndexableTargetScopeMeta setPrimaryKey( String primaryKey ) { super.setPrimaryKey( primaryKey ); return this; } @Override public IndexableTargetScopeMeta setIndexKey( String indexKey ) { super.setIndexKey( indexKey ); return this; } @Override public IndexableTargetScopeMeta setValueType( Class valueType ) { super.setValueType( valueType ); return this; } @Override public IndexableTargetScopeMeta setValueMetaKeys( Set keys ){ super.setValueMetaKeys( keys ); return this; } @Override public IndexableTargetScopeMeta addValueMetaKey( String key ) { super.addValueMetaKey( key ); return this; } @Override public IndexableTargetScopeMeta removeValueMetaKey( String key ) { super.removeValueMetaKey( key ); return this; } @Override public IndexableTargetScopeMeta setResultConverter( ResultConverter converter ) { super.setResultConverter( converter ); return this; } @Override public GenericIndexableTargetScopeMeta clone() { return (GenericIndexableTargetScopeMeta) super.clone(); // Refers inner pointer. } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/source/indexable/IndexableDataManipulator.java ================================================ package com.pinecone.slime.source.indexable; import com.pinecone.framework.system.prototype.Pinenut; import java.util.Collection; import java.util.List; public interface IndexableDataManipulator extends Pinenut { // Counting: szScopeKey long counts ( IndexableTargetScopeMeta meta, String szScopeKey ); // Counting: Namespace::key long countsByNS ( IndexableTargetScopeMeta meta, String szNamespace, Object key ); // Counting: Namespace long countsNS ( IndexableTargetScopeMeta meta, String szNamespace ); List query ( IndexableTargetScopeMeta meta, String szStatement ); List queryVal ( IndexableTargetScopeMeta meta, String szStatement ); Object selectAllByNS ( IndexableTargetScopeMeta meta, String szNamespace, Object key ); List selectsByNS ( IndexableTargetScopeMeta meta, String szNamespace, Object key ); V selectByNS ( IndexableTargetScopeMeta meta, String szNamespace, Object key ); V selectByKey ( IndexableTargetScopeMeta meta, Object key ); void insertByNS ( IndexableTargetScopeMeta meta, String szNamespace, K key, V entity ); void insert ( IndexableTargetScopeMeta meta, K key, V entity ); void insert ( IndexableTargetScopeMeta meta, K key, V entity, long expireMill ); void updateByNS ( IndexableTargetScopeMeta meta, String szNamespace, K key, V entity ); void update ( IndexableTargetScopeMeta meta, K key, V entity ); void deleteByNS ( IndexableTargetScopeMeta meta, String szNamespace, Object key ); void deleteByKey ( IndexableTargetScopeMeta meta, Object key ); void purge ( IndexableTargetScopeMeta meta ); void purgeByNS ( IndexableTargetScopeMeta meta, String szNamespace ); void commit (); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/source/indexable/IndexableIterableManipulator.java ================================================ package com.pinecone.slime.source.indexable; import java.util.Iterator; import java.util.Map; public interface IndexableIterableManipulator extends IndexableDataManipulator { Iterator keysIterator( IndexableTargetScopeMeta meta ); Iterator > iterator( IndexableTargetScopeMeta meta ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/source/indexable/IndexableTargetScopeMeta.java ================================================ package com.pinecone.slime.source.indexable; import com.pinecone.slime.source.ResultConverter; import com.pinecone.slime.source.UniformQueryScopeMeta; import java.util.Set; public interface IndexableTargetScopeMeta extends UniformQueryScopeMeta { @Override IndexableTargetScopeMeta setPrimaryKey( String primaryKey ); @Override IndexableTargetScopeMeta setIndexKey( String indexKey ); @Override IndexableTargetScopeMeta setValueType( Class valueType ); IndexableDataManipulator getDataManipulator(); IndexableTargetScopeMeta setDataManipulator( IndexableDataManipulator manipulator ); @Override Set getValueMetaKeys(); @Override IndexableTargetScopeMeta setValueMetaKeys( Set keys ); @Override IndexableTargetScopeMeta addValueMetaKey( String key ); @Override IndexableTargetScopeMeta removeValueMetaKey( String key ); @Override IndexableTargetScopeMeta setResultConverter( ResultConverter converter ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/source/rdb/ArchRelationalDatabase.java ================================================ package com.pinecone.slime.source.rdb; import com.pinecone.framework.util.json.homotype.MapStructure; import java.util.regex.Matcher; import java.util.regex.Pattern; public abstract class ArchRelationalDatabase implements RelationalDatabase { @MapStructure( "host" ) protected String mHost; @MapStructure( "username" ) protected String mUsername; @MapStructure( "password" ) protected String mPassword; @MapStructure( "database" ) protected String mDatabase; @MapStructure( "port" ) protected int mPort; @MapStructure( "charset" ) protected String mCharset = "utf8"; @MapStructure( "tablePrefix" ) protected String mTablePrefix; @MapStructure( "dbType" ) protected String mDBType; @MapStructure( "Enable" ) protected boolean mEnable = true; @Override public String getHost() { return this.mHost; } @Override public void setHost( String host ) { this.mHost = host; } @Override public String getUsername() { return this.mUsername; } @Override public void setUsername( String username ) { this.mUsername = username; } @Override public String getPassword() { return this.mPassword; } @Override public void setPassword( String password ) { this.mPassword = password; } @Override public String getDatabase() { return this.mDatabase; } @Override public void setDatabase( String database ) { this.mDatabase = database; } @Override public int getPort() { return this.mPort; } @Override public void setPort( int port ) { this.mPort = port; } @Override public String getCharset() { return this.mCharset; } @Override public void setCharset( String charset ) { this.mCharset = charset; } @Override public String getTablePrefix() { return this.mTablePrefix; } @Override public void setTablePrefix( String tablePrefix ) { this.mTablePrefix = tablePrefix; } @Override public boolean isEnabled() { return this.mEnable; } @Override public void setEnabled( boolean enabled ) { this.mEnable = enabled; } @Override public String getDBType() { return this.mDBType; } @Override public void setDBType( String dbType ) { this.mDBType = dbType; } @Override public String getJDBCURL() { String url = "jdbc:" + this.mDBType + "://" + this.mHost + ":" + this.mPort + "/" + this.mDatabase; if( this.mCharset.toLowerCase().startsWith( "utf" ) ) { // utf-8, utf8, etc... url = url +"?useUnicode=true&characterEncoding=" + this.mCharset; } else { url = url +"?characterEncoding=" + this.mCharset; } return url; } @Override public void fromJDBCURL( String jdbcUrl ) { Pattern pattern = Pattern.compile( "jdbc:(\\w+):\\/\\/(.+):(\\d+)\\/(.+)\\?useUnicode=true&characterEncoding=(\\w+)" ); Matcher matcher = pattern.matcher( jdbcUrl ); boolean bMatched = false; if ( matcher.matches() ) { bMatched = true; } else { pattern = Pattern.compile( "jdbc:(\\w+):\\/\\/(.+):(\\d+)\\/(.+)\\?characterEncoding=(\\w+)" ); matcher = pattern.matcher( jdbcUrl ); bMatched = matcher.matches(); } if ( bMatched ) { this.mDBType = matcher.group(1); this.mHost = matcher.group(2); this.mPort = Integer.parseInt(matcher.group(3)); this.mDatabase = matcher.group(4); this.mCharset = matcher.group(5); } else { throw new IllegalArgumentException( "Invalid JDBC URL format: " + jdbcUrl ); } } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/source/rdb/ContiguousNumIndexBatchPageSourceRetriever.java ================================================ package com.pinecone.slime.source.rdb; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.unit.MapDictium; import com.pinecone.slime.cache.query.RangedDictCachePage; import com.pinecone.slime.cache.query.pool.BatchPageSourceRetriever; import com.pinecone.slime.cache.query.pool.LocalRangedDictCachePage; import com.pinecone.slime.map.MonoKeyQueryRange; import com.pinecone.slime.map.QueryRange; import com.pinecone.slime.unitization.PartialRange; import java.util.Map; public class ContiguousNumIndexBatchPageSourceRetriever, V > implements BatchPageSourceRetriever { private RangedRDBQuerierDataManipulator mDataMapper; protected RDBTargetTableMeta mTableMeta; protected int mnPageCapacity; protected String mszRangeKey; public ContiguousNumIndexBatchPageSourceRetriever( RDBTargetTableMeta tableMeta, int nPageCapacity, String szRangeKey ) { this.mTableMeta = tableMeta; this.mDataMapper = (RangedRDBQuerierDataManipulator) tableMeta.getDataManipulator(); this.mnPageCapacity = nPageCapacity; this.mszRangeKey = szRangeKey; } @Override public long getBatchSize() { return this.mnPageCapacity; } @Override @SuppressWarnings( "unchecked" ) public > T nextRangeMax( T key ) { if ( key instanceof Integer ) { return (T)(Integer)( (Integer) key + this.mnPageCapacity ); } else if ( key instanceof Long ) { return (T)(Long)( (Long) key + this.mnPageCapacity ); } else if ( key instanceof Short ) { return (T)(Short)( (Integer)( (Short) key + this.mnPageCapacity ) ).shortValue(); } else if ( key instanceof Double ) { return (T)(Double)( (Double) key + this.mnPageCapacity ); } else if ( key instanceof Float ) { return (T)(Float)( (Float) key + this.mnPageCapacity ); } else if ( key instanceof Byte ) { return (T)(Byte)( (Integer)( (Byte) key + this.mnPageCapacity ) ).byteValue(); } else { throw new IllegalArgumentException( "Unsupported number type." ); } } @SuppressWarnings( "unchecked" ) public QueryRange queryRangeOnly( Object key ) { if ( key instanceof Integer ) { return (QueryRange) this.calculateRangeForInteger( (Integer) key ); } else if ( key instanceof Long ) { return (QueryRange) this.calculateRangeForLong((Long) key); } else if ( key instanceof Short ) { return (QueryRange) this.calculateRangeForShort((Short) key); } else if ( key instanceof Double ) { return (QueryRange) this.calculateRangeForDouble((Double) key); } else if ( key instanceof Float ) { return (QueryRange) this.calculateRangeForFloat((Float) key); } else if ( key instanceof Byte ) { return (QueryRange) this.calculateRangeForByte((Byte) key); } else if ( key instanceof QueryRange ) { return (QueryRange) key; } else { throw new IllegalArgumentException( "Unsupported number type." ); } } protected QueryRange calculateRangeForLong( Long key ) { long start = (key / this.mnPageCapacity) * this.mnPageCapacity; long end = start + this.mnPageCapacity; return new MonoKeyQueryRange<>( start, end, this.mszRangeKey ); } protected QueryRange calculateRangeForInteger(Integer key ) { int start = (key / this.mnPageCapacity) * this.mnPageCapacity; int end = start + this.mnPageCapacity; return new MonoKeyQueryRange<>( start, end, this.mszRangeKey ); } protected QueryRange calculateRangeForShort( Short key ) { short start = (short) ((key / this.mnPageCapacity) * this.mnPageCapacity); short end = (short) (start + this.mnPageCapacity); return new MonoKeyQueryRange<>( start, end, this.mszRangeKey ); } protected QueryRange calculateRangeForDouble( Double key ) { double start = Math.floor(key / this.mnPageCapacity) * this.mnPageCapacity; double end = start + this.mnPageCapacity; return new MonoKeyQueryRange<>( start, end, this.mszRangeKey ); } protected QueryRange calculateRangeForFloat( Float key ) { float start = (float) Math.floor(key / this.mnPageCapacity) * this.mnPageCapacity; float end = start + this.mnPageCapacity; return new MonoKeyQueryRange<>( start, end, this.mszRangeKey ); } protected QueryRange calculateRangeForByte( Byte key ) { byte start = (byte) ((key / this.mnPageCapacity) * this.mnPageCapacity); byte end = (byte) (start + this.mnPageCapacity); return new MonoKeyQueryRange<>( start, end, this.mszRangeKey ); } @Override public String getRangeKey() { return this.mszRangeKey; } @Override public V retrieve( Object key ) { return this.mDataMapper.selectByKey( this.mTableMeta, key ); } @Override @SuppressWarnings( "unchecked" ) public > RangedDictCachePage retrieves( Object key, @Nullable PartialRange range ) { QueryRange queryRange; if( range == null ) { queryRange = ( QueryRange )this.queryRangeOnly( key ); } else { queryRange = new MonoKeyQueryRange<>( range.getMin(), range.getMax(), this.mszRangeKey ); } Map map = this.mDataMapper.selectMappedByRange( this.mTableMeta, queryRange ); return new LocalRangedDictCachePage<>( -1, this.mnPageCapacity, new MapDictium<>( map ), (PartialRange)queryRange ); } @Override public RangedDictCachePage retrieves( Object key ) { return this.retrieves( key, null ); } @Override public > long counts( PartialRange range ) { QueryRange queryRange; if( range instanceof QueryRange ) { queryRange = ( QueryRange ) range; } else { queryRange = new MonoKeyQueryRange<>( range.getMin(), range.getMax(), this.mszRangeKey ); } return this.mDataMapper.countsByRange( this.mTableMeta, queryRange ); } @Override @SuppressWarnings( "unchecked" ) public long countsKey( Object key ) { if( key instanceof Comparable ) { return this.mDataMapper.countsByRange( this.mTableMeta, new MonoKeyQueryRange<>( (Comparable)key, (Comparable)key, this.mszRangeKey ) ); } throw new IllegalArgumentException( "Key should be comparable." ); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/source/rdb/GenericRDBTargetTableMeta.java ================================================ package com.pinecone.slime.source.rdb; import com.pinecone.slime.source.ArchQueryScopeMeta; import com.pinecone.slime.source.ResultConverter; import java.util.Set; import java.util.TreeSet; public class GenericRDBTargetTableMeta extends ArchQueryScopeMeta implements RDBTargetTableMeta { private RDBQuerierDataManipulator mDataManipulator; public GenericRDBTargetTableMeta( String tableName, String primaryKey, String indexKey, Class valueType, RDBQuerierDataManipulator manipulator, Set valueMetaKeys ) { super( tableName, primaryKey, indexKey, valueType, valueMetaKeys ); this.mDataManipulator = manipulator; } public GenericRDBTargetTableMeta( String tableName, String indexKey, Class valueType, RDBQuerierDataManipulator manipulator, Set valueMetaKeys ) { this( tableName, indexKey, indexKey, valueType, manipulator, valueMetaKeys ); } public GenericRDBTargetTableMeta( String tableName, String indexKey, Class valueType, RDBQuerierDataManipulator manipulator ) { this( tableName, indexKey, valueType, manipulator, new TreeSet<>() ); } public GenericRDBTargetTableMeta( String tableName, String indexKey, Class valueType ) { this( tableName, indexKey, valueType, null ); } @Override public RDBTargetTableMeta setScopeNS( String namespace ) { super.setScopeNS( namespace ); return this; } @Override public String getTableName() { return this.getScopeNS(); } @Override public RDBTargetTableMeta setTableName( String tableName ) { return this.setScopeNS( tableName ); } @Override @SuppressWarnings( "unchecked" ) public RDBQuerierDataManipulator getDataManipulator() { return this.mDataManipulator; } @Override public RDBTargetTableMeta setDataManipulator( RDBQuerierDataManipulator manipulator ){ this.mDataManipulator = manipulator; return this; } @Override public RDBTargetTableMeta setPrimaryKey( String primaryKey ) { super.setPrimaryKey( primaryKey ); return this; } @Override public RDBTargetTableMeta setIndexKey( String indexKey ) { super.setIndexKey( indexKey ); return this; } @Override public RDBTargetTableMeta setValueType( Class valueType ) { super.setValueType( valueType ); return this; } @Override public RDBTargetTableMeta setValueMetaKeys( Set keys ){ super.setValueMetaKeys( keys ); return this; } @Override public RDBTargetTableMeta addValueMetaKey( String key ) { super.addValueMetaKey( key ); return this; } @Override public RDBTargetTableMeta removeValueMetaKey( String key ) { super.removeValueMetaKey( key ); return this; } @Override public RDBTargetTableMeta setResultConverter( ResultConverter converter ) { super.setResultConverter( converter ); return this; } @Override public GenericRDBTargetTableMeta clone() { return (GenericRDBTargetTableMeta) super.clone(); // Refers inner pointer. } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/source/rdb/GenericSingleKeySourceRetriever.java ================================================ package com.pinecone.slime.source.rdb; import com.pinecone.slime.cache.query.SourceRetriever; public class GenericSingleKeySourceRetriever implements SourceRetriever { private RangedRDBQuerierDataManipulator mDataMapper; protected RDBTargetTableMeta mTableMeta; public GenericSingleKeySourceRetriever( RDBTargetTableMeta tableMeta ) { this.mTableMeta = tableMeta; this.mDataMapper = (RangedRDBQuerierDataManipulator) tableMeta.getDataManipulator(); } @Override public V retrieve( Object key ) { return this.mDataMapper.selectByKey( this.mTableMeta, key ); } @Override public long countsKey( Object key ) { return this.mDataMapper.countsByColumn( this.mTableMeta, this.mTableMeta.getIndexKey(), key ); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/source/rdb/RDBClient.java ================================================ package com.pinecone.slime.source.rdb; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.slime.source.DAOScanner; import com.pinecone.slime.source.DataAccessObject; import java.lang.annotation.Annotation; import java.util.List; public interface RDBClient extends Pinenut { String getInstanceName(); void close(); boolean isTerminated(); DAOScanner getDataAccessObjectScanner(); List > addDataAccessObjectScope( String szPacketName ); List > addDataAccessObjectScope( String szPacketName, boolean bIgnoreOwnedChecked ); List > addDataAccessObjectScopeNoneSync( String szPacketName, boolean bIgnoreOwnedChecked ); default boolean hasOwnDataAccessObject( Class clazz ) { Annotation[] annotations = clazz.getAnnotations(); for( Annotation annotation : annotations ) { if( annotation instanceof DataAccessObject ) { String s = ((DataAccessObject) annotation).scope(); if( s.isEmpty() || s.equals( this.getInstanceName() ) ){ return true; } } } return false; } String getJDBCURL(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/source/rdb/RDBQuerierDataManipulator.java ================================================ package com.pinecone.slime.source.rdb; import com.pinecone.framework.system.prototype.Pinenut; import java.util.List; public interface RDBQuerierDataManipulator extends Pinenut { long counts ( RDBTargetTableMeta meta, String szExSafeSQL ); long countsByColumn ( RDBTargetTableMeta meta, String szSpecificColumnKeyName, Object key ); List selectList ( RDBTargetTableMeta meta, String szExSafeSQL ); List query ( RDBTargetTableMeta meta, String szStatementSQL ); List queryVal ( RDBTargetTableMeta meta, String szStatementSQL ); List selectListByColumn ( RDBTargetTableMeta meta, String szSpecificColumnKeyName, Object key ); V selectByKey ( RDBTargetTableMeta meta, Object key ); void insert ( RDBTargetTableMeta meta, K key, V entity ); void update ( RDBTargetTableMeta meta, K key, V entity ); void deleteByKey ( RDBTargetTableMeta meta, Object key ); void truncate ( RDBTargetTableMeta meta ); void commit (); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/source/rdb/RDBTargetTableMeta.java ================================================ package com.pinecone.slime.source.rdb; import com.pinecone.slime.source.ResultConverter; import com.pinecone.slime.source.UniformQueryScopeMeta; import java.util.Set; public interface RDBTargetTableMeta extends UniformQueryScopeMeta { @Override default String getScopeNS() { return this.getTableName(); } @Override default UniformQueryScopeMeta setScopeNS( String namespace ) { return this.setTableName( namespace ); } String getTableName(); RDBTargetTableMeta setTableName( String tableName ); @Override RDBTargetTableMeta setPrimaryKey( String primaryKey ); @Override RDBTargetTableMeta setIndexKey( String indexKey ); @Override RDBTargetTableMeta setValueType( Class valueType ); RDBQuerierDataManipulator getDataManipulator(); RDBTargetTableMeta setDataManipulator( RDBQuerierDataManipulator manipulator ); /** * ValueMetaKeys * if set is empty => SELECT * FROM => map / bean * if set has one => SELECT set[0] FROM => map / bean / primitive * if set has more => SELECT ...set FROM => map / bean */ @Override Set getValueMetaKeys(); @Override RDBTargetTableMeta setValueMetaKeys( Set keys ); @Override RDBTargetTableMeta addValueMetaKey( String key ); @Override RDBTargetTableMeta removeValueMetaKey( String key ); @Override RDBTargetTableMeta setResultConverter( ResultConverter converter ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/source/rdb/RangedRDBQuerierDataManipulator.java ================================================ package com.pinecone.slime.source.rdb; import com.pinecone.slime.map.QueryRange; import java.util.List; import java.util.Map; public interface RangedRDBQuerierDataManipulator extends RDBQuerierDataManipulator { long countsByRange ( RDBTargetTableMeta meta, QueryRange range ); List selectListByRange ( RDBTargetTableMeta meta, QueryRange range ); Map selectMappedByRange ( RDBTargetTableMeta meta, QueryRange range ); Object getMaximumRangeVal ( RDBTargetTableMeta meta, String szRangeKeyName ); Object getMinimumRangeVal ( RDBTargetTableMeta meta, String szRangeKeyName ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/source/rdb/RelationalDatabase.java ================================================ package com.pinecone.slime.source.rdb; import com.pinecone.framework.system.prototype.Pinenut; public interface RelationalDatabase extends Pinenut { String getHost(); void setHost( String host ); String getUsername(); void setUsername( String username ); String getPassword(); void setPassword( String password ); String getDatabase(); void setDatabase( String database ); int getPort(); void setPort( int port ); String getCharset(); void setCharset( String charset ); String getTablePrefix(); void setTablePrefix( String tablePrefix ); boolean isEnabled(); void setEnabled( boolean enabled ); String getDBType(); void setDBType( String dbType ); String getJDBCURL(); void fromJDBCURL( String jdbcUrl ); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/unitization/IntervalRangeComparator.java ================================================ package com.pinecone.slime.unitization; import java.util.Comparator; public class IntervalRangeComparator implements Comparator { @Override @SuppressWarnings( "unchecked" ) public int compare( Object o1, Object o2 ) { if( o1 instanceof PartialRange && o2 instanceof PartialRange ) { return ((PartialRange) o1).compareTo( (PartialRange)o2 ); } else if ( o1 instanceof PartialRange && o2 instanceof Comparable ) { PartialRange range = (PartialRange) o1; return range.compareTo( (Comparable)o2 ); } else if ( o1 instanceof Comparable && o2 instanceof PartialRange ) { return -this.compare( o2, o1 ); } else { throw new IllegalArgumentException( "Objects are not of type PartialRange or Comparable" ); } } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/unitization/LinerRange.java ================================================ package com.pinecone.slime.unitization; public interface LinerRange extends Range { Precision getPrimePrecision(); Number span(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/unitization/MinMaxRange.java ================================================ package com.pinecone.slime.unitization; import java.math.BigDecimal; import java.math.BigInteger; public interface MinMaxRange extends LinerRange, Comparable { Number getMin(); Number getMax(); void setRange( Number min, Number max ); void setMin ( Number min ); void setMax ( Number max ); @Override default String toJSONString() { return String.format( "{\"class\":\"%s\",\"min\":%s,\"max\":%s}", this.className(), this.getMin(), this.getMax() ); } @Override default int compareTo( MinMaxRange o ) { if ( this == o ) { return 0; } if ( o == null ) { return 1; } Number min = this.getMin(); if( min instanceof Double || min instanceof Float ) { int minCompare = Double.compare( this.getMin().doubleValue(), o.getMin().doubleValue() ); if ( minCompare != 0 ) { return minCompare; } return Double.compare( this.getMax().doubleValue(), o.getMax().doubleValue() ); } else if( min instanceof Integer || min instanceof Long || min instanceof Short || min instanceof Byte ) { int minCompare = Long.compare( this.getMin().longValue(), o.getMin().longValue() ); if ( minCompare != 0 ) { return minCompare; } return Long.compare( this.getMax().longValue(), o.getMax().longValue() ); } else if( min instanceof BigInteger ) { int minCompare = ( (BigInteger)this.getMin() ).compareTo( (BigInteger)o.getMin() ); if ( minCompare != 0 ) { return minCompare; } return ( (BigInteger)this.getMax() ).compareTo( (BigInteger)o.getMax() ); } else if( min instanceof BigDecimal ) { int minCompare = ( (BigDecimal)this.getMin() ).compareTo( (BigDecimal)o.getMin() ); if ( minCompare != 0 ) { return minCompare; } return ( (BigDecimal)this.getMax() ).compareTo( (BigDecimal)o.getMax() ); } throw new IllegalArgumentException( "Unknown number to compare." ); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/unitization/MinMaxRange64.java ================================================ package com.pinecone.slime.unitization; public class MinMaxRange64 implements MinMaxRange { protected long mnMin; protected long mnMax; protected Precision mPrimePrecision; // Precision(1)=> for the array-like structure. public MinMaxRange64( long nMin, long nMax, Precision primePrecision ) { this.mnMin = nMin; this.mnMax = nMax; this.mPrimePrecision = primePrecision; } public MinMaxRange64( long nMin, long nMax ) { this( nMin, nMax, NumPrecision.PRECISION_64_1 ); } @Override public Long getMin() { return this.mnMin; } @Override public Long getMax() { return this.mnMax; } @Override public void setRange( Number min, Number max ){ this.setMin( min ); this.setMax( max ); } @Override public void setMin ( Number min ){ this.mnMin = min.longValue(); } @Override public void setMax ( Number max ) { this.mnMax = max.longValue(); } @Override public Long span() { return this.mnMax - this.mnMin; } @Override public Precision getPrimePrecision() { return this.mPrimePrecision; } @Override public boolean contains( Range that ) { MinMaxRange range = (MinMaxRange) that; return this.mnMin <= range.getMin().longValue() && this.mnMax >= range.getMax().longValue(); } @Override public boolean contains( Object elm ) { long e = ( (Number) elm ).longValue(); return this.mnMin <= e && this.mnMax >= e; } @Override public int compareTo( MinMaxRange o ) { if ( this == o ) { return 0; } if ( o == null ) { return 1; } int minCompare = Long.compare( this.mnMin, o.getMin().longValue() ); if ( minCompare != 0 ) { return minCompare; } return Long.compare( this.mnMax, o.getMax().longValue() ); } @Override public boolean equals( Object obj ) { if( super.equals( obj ) ) { return true; } if( obj instanceof MinMaxRange ) { return this.getMin().equals( ((MinMaxRange) obj).getMin().longValue() ) && this.getMax().equals( ((MinMaxRange) obj).getMax().longValue() ); } return false; } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/unitization/NumPrecision.java ================================================ package com.pinecone.slime.unitization; public interface NumPrecision extends Precision { NumPrecision PRECISION_64_1 = new Precision64(1); Number numericValue(); long longValue(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/unitization/PartialOrderRange.java ================================================ package com.pinecone.slime.unitization; public class PartialOrderRange > implements PartialRange { protected T mMin; protected T mMax; public PartialOrderRange( T min, T max ) { this.mMin = min; this.mMax = max; } @Override public T getMin(){ return this.mMin; } @Override public T getMax(){ return this.mMax; } @Override public void setRange( T min, T max ){ this.mMin = min; this.mMax = max; } @Override public void setMin ( T min ){ this.mMin = min; } @Override public void setMax ( T max ){ this.mMax = max; } @Override @SuppressWarnings( "unchecked" ) public boolean contains( Range that ) { if ( !( that instanceof PartialOrderRange ) ) { throw new ClassCastException("Range is not a PartialOrderRange."); } PartialOrderRange range = (PartialOrderRange) that; return ( range.getMin().compareTo( this.mMin ) >= 0) && ( range.getMax().compareTo( this.mMax ) <= 0 ); } @Override @SuppressWarnings( "unchecked" ) public boolean equals( Object obj ) { if( super.equals( obj ) ) { return true; } if( obj instanceof PartialOrderRange ) { PartialOrderRange range = (PartialOrderRange) obj; return ( range.getMin().compareTo( this.mMin ) == 0) && ( range.getMax().compareTo( this.mMax ) == 0 ); } return false; } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/unitization/PartialRange.java ================================================ package com.pinecone.slime.unitization; public interface PartialRange > extends Range, Comparable > { T getMin(); T getMax(); void setRange( T min, T max ); void setMin ( T min ); void setMax ( T max ); @Override default String toJSONString() { return String.format( "{\"class\":\"%s\",\"min\":%s,\"max\":%s}", this.className(), this.getMin(), this.getMax() ); } @Override default boolean contains( Object elm ) { if ( elm == null ) { return false; } if ( !( elm instanceof Comparable ) ) { throw new ClassCastException( "Element is not comparable." ); } @SuppressWarnings( "unchecked" ) Comparable comparableElm = (Comparable) elm; return ( comparableElm.compareTo((T) this.getMin() ) >= 0 ) && ( comparableElm.compareTo((T) this.getMax()) <= 0 ); // [min, max] } @Override default int compareTo( PartialRange o ) { int minCompare = this.getMin().compareTo( o.getMin() ); if ( minCompare != 0 ) { return minCompare; } return this.getMax().compareTo( o.getMax() ); } default int compareTo( T that ) { if( this.contains( that ) ) { return 0; } else if ( this.getMin().compareTo( that ) > 0 ) { // this > that return 1; } else if ( this.getMax().compareTo( that ) < 0 ) { // this < that return -1; } else { return 0; // Jesus! } } IntervalRangeComparator DefaultIntervalRangeComparator = new IntervalRangeComparator(); } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/unitization/Precision.java ================================================ package com.pinecone.slime.unitization; import com.pinecone.framework.system.prototype.Pinenut; public interface Precision extends Pinenut { } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/unitization/Precision64.java ================================================ package com.pinecone.slime.unitization; public class Precision64 extends Number implements NumPrecision { protected long precision; public Precision64( long precision ) { this.precision = precision; } @Override public Number numericValue() { return this.precision; } @Override public long longValue() { return this.precision; } @Override public int intValue() { return (int) this.precision; } @Override public float floatValue() { return (float) this.precision; } @Override public double doubleValue() { return (double) this.precision; } @Override public String toString() { return Long.toString( this.precision ); } @Override public String toJSONString() { return this.toString(); } } ================================================ FILE: Pinecones/Slime/src/main/java/com/pinecone/slime/unitization/Range.java ================================================ package com.pinecone.slime.unitization; import com.pinecone.framework.system.prototype.Pinenut; public interface Range extends Pinenut { boolean contains( Range that ); boolean contains( Object elm ); } ================================================ FILE: Pinecones/Slime/src/test/java/com/cache/TestCache.java ================================================ package com.cache; import com.pinecone.Pinecone; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.unit.LinkedTreeMap; import com.pinecone.framework.unit.Mapnut; import com.pinecone.framework.unit.MultiValueMapper; import com.pinecone.framework.unit.multi.MultiSetMaptron; import com.pinecone.framework.unit.top.*; import com.pinecone.framework.util.Debug; import com.pinecone.slime.unitization.PartialOrderRange; import com.pinecone.slime.unitization.PartialRange; import java.util.*; public class TestCache { public static void testRange() { PartialRange partialOrderRange = new PartialOrderRange<>( "A", "F" ); Debug.trace( partialOrderRange.contains( "C" ) ); Debug.trace( partialOrderRange.contains( "G" ) ); } public static void testRangeMap() { Map, Long > map = new LinkedTreeMap<>( PartialRange.DefaultIntervalRangeComparator ); Debug.trace( (new PartialOrderRange<>( 10L, 20L )).compareTo( 10L ) ); Debug.trace( "8".compareTo( "9" ) ); map.put( new PartialOrderRange<>( 0L, 10L ), 10L ); map.put( new PartialOrderRange<>( 10L, 20L ), 20L ); map.put( new PartialOrderRange<>( 20L, 30L ), 30L ); map.put( new PartialOrderRange<>( 40L, 50L ), 50L ); map.put( new PartialOrderRange<>( 90L, 100L ), 100L ); map.put( new PartialOrderRange<>( 60L, 70L ), 70L ); map.put( new PartialOrderRange<>( 80L, 90L ), 90L ); Debug.trace( map ); Debug.trace( ((LinkedTreeMap, Long>) map).treeEntrySet() ); for ( int i = 0; i < 100; i++ ) { Debug.trace( i, map.containsKey( (long)i ) ); } } public static void testMultiValueEntity(){ MultiValueMapper maptron = new MultiSetMaptron<>(); //MultiValueMapper maptron = new MultiListMaptron<>(); maptron.add( 1, 10 ); maptron.add( 2, 20 ); maptron.add( 2, 22 ); maptron.add( 2, 21 ); maptron.add( 2, 22 ); Debug.trace( maptron, maptron.collection(), maptron.collectionValues() ); } public static void testMultiTreeToptron(){ MultiTreeToptron toptron = new MultiTreeToptron<>( 8 ); toptron = new LinkedMultiTreeToptron<>( 8 ); toptron.add( 1, 10 ); toptron.add( 1, 11 ); toptron.add( 4, 40 ); toptron.add( 1, 12 ); toptron.add( 2, 20 ); toptron.add( 3, 30 ); toptron.add( 3, 31 ); toptron.add( 5, 50 ); toptron.add( 6, 60 ); Debug.trace( toptron.getMap(), toptron.topEntrySet(), toptron.bottomEntrySet(), toptron.collection() ); toptron.update( 1, 16, 12 ); Debug.trace( toptron.getMap() ); //toptron.setTopmostSize( 4 ); //Debug.trace( toptron.getMap() ); } public static void testTreeToptron(){ //TreeToptron toptron = new TreeToptron<>( 3 ); LinkedTreeToptron toptron = new LinkedTreeToptron<>( 3 ); toptron.put( 1, 10 ); toptron.put( 5, 50 ); toptron.put( 1, 11 ); toptron.put( 1, 12 ); toptron.put( 2, 20 ); toptron.put( 3, 30 ); toptron.put( 3, 30 ); toptron.put( 4, 40 ); toptron.put( 6, 60 ); Debug.trace( toptron.getMap(), toptron.topEntrySet(), toptron.bottomEntrySet() ); } public static void testTopper(){ Topper > heapTopper = new HeapTopper<>(4, new Comparator>() { @Override public int compare( KeyValue o1, KeyValue o2 ) { return o1.getKey().compareTo( o2.getKey() ); } }); heapTopper.add( new KeyValue<>( 1,10 ) ); heapTopper.add( new KeyValue<>( 5,50 ) ); heapTopper.add( new KeyValue<>( 2,20 ) ); heapTopper.add( new KeyValue<>( 9,90 ) ); heapTopper.add( new KeyValue<>( 4,40 ) ); heapTopper.add( new KeyValue<>( 6,60 ) ); heapTopper.add( new KeyValue<>( 3,30 ) ); Debug.trace( heapTopper, heapTopper.nextEviction() ); } public static void main( String[] args ) throws Exception { //String szJson = FileUtils.readAll("J:/120KWordsPhonetics.json5"); Pinecone.init( (Object...cfg )->{ //TestCache.testRange(); //TestCache.testRangeMap(); //TestCache.testMultiValueEntity(); //TestCache.testMultiTreeToptron(); //TestCache.testTreeToptron(); TestCache.testTopper(); Mapnut map = new LinkedTreeMap<>(); map.put( 4, 40L ); map.put( 3, 30L ); Debug.trace( map ); map.getEntryByKey( 4 ).setValue( 41L ); Debug.trace( map ); return 0; }, (Object[]) args ); } } ================================================ FILE: Pinecones/Slime/src/test/java/com/chunk/TestChunk.java ================================================ package com.chunk; import com.pinecone.slime.chunk.marshaling.*; import com.pinecone.Pinecone; import com.pinecone.framework.util.Debug; import com.pinecone.slime.chunk.Chunk; import com.pinecone.slime.chunk.ContiguousPage; import com.pinecone.slime.chunk.RangedChunk64; import com.pinecone.slime.chunk.RangedPage64; import com.pinecone.slime.chunk.scheduler.DirectPagePool; import com.pinecone.slime.chunk.scheduler.FixedPageDivider64; import com.pinecone.slime.chunk.scheduler.LocalBatchActivePageScheduler64; import java.util.List; public class TestChunk { public static void testPool() { RangedPage64 page64 = new RangedPage64( 0, 850,0 ); DirectPagePool pagePool = new DirectPagePool( RangedPage64.class ); FixedPageDivider64 divider64 = new FixedPageDivider64( page64, pagePool, 100 ); Debug.trace( divider64.getMaxAllocations() ); for ( int i = 0; i < divider64.getMaxAllocations(); i++ ) { RangedPage64 page = (RangedPage64) divider64.allocate(); Debug.trace( page.getRange(), ( (RangedPage64)page.parent() ).getRange(), page.getId(), page.size() ); } } public static void testPartition() { RangedPage64 page64 = new RangedPage64( 0, 850,0 ); DirectPagePool pagePool = new DirectPagePool( RangedPage64.class ); PreparedPageDividerPartition64 partition64 = new PreparedPageDividerPartition64( page64, 0, 100 ); FixedPageDivider64 divider64 = new FixedPageDivider64( partition64, pagePool ); Debug.trace( divider64.getMaxAllocations() ); for ( int i = 0; i < divider64.getMaxAllocations(); i++ ) { RangedPage64 page = (RangedPage64) divider64.allocate(); Debug.trace( page.getRange(), ( (RangedChunk64)page.parent() ).getRange(), page.getId(), page.size() ); } Debug.trace( partition64 ); } public static void testPartitioner() { RangedPage64 page64 = new RangedPage64( 0, 1000,0 ); PreparedEvenSeqPagePartitioner64 partitioner64 = new PreparedEvenSeqPagePartitioner64( page64, 5 ); SequentialPagePartitionGroup64 group64 = partitioner64.partition(); List l = group64.getSequentialChunks(); Debug.trace( ( (PreparedPageDividerPartition64)l.get(0)).eachPerPage() ); BuddyPrepPartitionDividerStrategy64 strategy64 = new BuddyPrepPartitionDividerStrategy64( 100, 2, 1 ); strategy64.assignment( group64 ); for ( int i = 0; i < l.size();++ i ) { Debug.trace( ((PreparedPageDividerPartition64)l.get(i)).eachPerPage() ); } } public static void testPartitionablePageDivider() { RangedPage64 page64 = new RangedPage64( 0, 1000,0 ); PreparedEvenSeqPagePartitioner64 partitioner64 = new PreparedEvenSeqPagePartitioner64( page64, 6 ); SequentialPagePartitionGroup64 group64 = partitioner64.partition(); List l = group64.getSequentialChunks(); Debug.trace( ( (PreparedPageDividerPartition64)l.get(0)).eachPerPage() ); BuddyPrepPartitionDividerStrategy64 strategy64 = new BuddyPrepPartitionDividerStrategy64( 100, 2, 1 ); strategy64.assignment( group64 ); DirectPagePool pagePool = new DirectPagePool( RangedPage64.class ); PartitionablePageDivider64 divider64 = new PartitionablePageDivider64( page64, pagePool, group64 ); Debug.trace( divider64.getMaxAllocations() ); for ( int i = 0; i < divider64.getMaxAllocations(); i++ ) { RangedPage64 page = (RangedPage64) divider64.allocate(); Debug.trace( page.getRange(), page.getId(), page.size() ); } } public static void testSimpleScheduler() { RangedPage64 page64 = new RangedPage64( 0, 850,0 ); DirectPagePool pagePool = new DirectPagePool( RangedPage64.class ); LocalBatchActivePageScheduler64 scheduler64 = new LocalBatchActivePageScheduler64( new FixedPageDivider64( page64, pagePool, 100 ), page64.getId() + 1, 4 ); ContiguousPage[] pages = scheduler64.activates(); Debug.trace( scheduler64.getDivider().getMaxAllocations() ); for ( int i = 0; i < pages.length; i++ ) { Debug.trace( pages[i].getRange(), pages[i].getId() ); } //scheduler64.deactivate( pages[1] ); scheduler64.deactivate( pages ); Debug.hhf(); pages = scheduler64.activates(); for ( int i = 0; i < pages.length; i++ ) { Debug.trace( pages[i].getRange(), pages[i].getId() ); } scheduler64.deactivate( pages ); Debug.hhf(); pages = scheduler64.activates(); for ( int i = 0; i < pages.length; i++ ) { Debug.trace( pages[i].getRange(), pages[i].getId() ); } } public static void main( String[] args ) throws Exception { //String szJson = FileUtils.readAll("J:/120KWordsPhonetics.json5"); Pinecone.init( (Object...cfg )->{ //TestBasicTransaction.testSequential(); //TestBasicTransaction.testParallel(); //TestChunk.testPool(); TestChunk.testSimpleScheduler(); //TestChunk.testPartition(); //TestChunk.testPartitioner(); //TestChunk.testPartitionablePageDivider(); // double factor = 0.2; // int page = 1000; // int stratum = 2; // int start = 0; // int end = 10000; // // splitInterval(start, end, factor, page, stratum); return 0; }, (Object[]) args ); } public static void splitInterval(int start, int end, double factor, int page, int stratum) { int totalRange = end - start; int subIntervalSize = (int) (totalRange * factor); int numSubIntervals = (int) (1 / factor); for (int i = 0; i < numSubIntervals; i++) { int subStart = start + i * subIntervalSize; int subEnd = Math.min(subStart + subIntervalSize, end); splitSubInterval(subStart, subEnd, page, stratum); page = Math.max(10, page / stratum); // Update the page size for the next interval } } public static void splitSubInterval(int start, int end, int page, int stratum) { int currentStart = start; while (currentStart < end) { int currentEnd = Math.min(currentStart + page, end); System.out.println(currentStart + ", " + currentEnd); currentStart = currentEnd; } } } ================================================ FILE: Pinecones/Springram/pom.xml ================================================ pinecones com.pinecones 2.5.1 4.0.0 com.pinecone.summer.springram springram 2.1.0 com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile org.springframework.boot spring-boot-starter-web ================================================ FILE: Pinecones/Springram/src/main/java/com/pinecone/summer/spring/SpringKernel.java ================================================ package com.pinecone.summer.spring; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.framework.util.config.PatriarchalConfig; import com.pinecone.summer.spring.util.ConfigUtils; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.context.ConfigurableApplicationContext; import java.util.Map; import com.pinecone.framework.system.functions.Executor; @SpringBootApplication public class SpringKernel implements Pinenut { private ConfigurableApplicationContext mContext; private SpringApplication mSpringApplication; private Springram mSpringram; private Class mPrimarySources = SpringKernel.class; private Executor mInitializer; void setSpringram( Springram springram ) { this.mSpringram = springram; } public void setPrimarySources( Class primarySources ) { this.mPrimarySources = primarySources; } public void execute( String... args ) { this.mSpringApplication = new SpringApplication( this.mPrimarySources ); PatriarchalConfig jo = this.mSpringram.getConfig(); if( jo instanceof JSONConfig ) { Map confs = ConfigUtils.recursionMapToPropertiesMap( (JSONConfig)jo ); if( !confs.containsKey( "spring.config.location" ) ){ confs.put( "spring.config.location", "" ); } // this.mSpringApplication.setDefaultProperties(Map.of( // //"spring.config.location", "./system/setup/application.yaml" // "spring.config.location", "", // "server.port", "3912" // )); this.mSpringApplication.setDefaultProperties( confs ); } // Otherwise, using default config `application.yaml`. if( this.mInitializer != null ) { try{ this.mInitializer.execute(); } catch ( Exception ignore ) { // Ignore } } this.mContext = this.mSpringApplication.run( args ); } public void terminate() { if ( this.mContext != null ) { this.mContext.close(); } } public ConfigurableApplicationContext getContext() { return this.mContext; } public SpringApplication getSpringApplication() { return this.mSpringApplication; } public void setInitializer( Executor initializer ) { this.mInitializer = initializer; } } ================================================ FILE: Pinecones/Springram/src/main/java/com/pinecone/summer/spring/Springram.java ================================================ package com.pinecone.summer.spring; import com.pinecone.framework.system.functions.Executor; import com.pinecone.hydra.servgram.Servgram; import org.springframework.boot.SpringApplication; import org.springframework.context.ConfigurableApplicationContext; public interface Springram extends Servgram { void execute() throws Exception; void join() throws InterruptedException; void join( long millis ) throws InterruptedException; ConfigurableApplicationContext getContext(); Springram setPrimarySources( Class primarySources ) ; SpringApplication getSpringApplication(); void setInitializer( Executor initializer ); } ================================================ FILE: Pinecones/Springram/src/main/java/com/pinecone/summer/spring/Springron.java ================================================ package com.pinecone.summer.spring; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.system.functions.Executor; import com.pinecone.hydra.servgram.ArchServgramium; import org.springframework.boot.SpringApplication; import org.springframework.context.ConfigurableApplicationContext; import java.util.concurrent.TimeoutException; public class Springron extends ArchServgramium implements Springram { protected String[] mSpringbootArgs; protected Thread mSpringPrimaryThread; protected SpringKernel mSpringKernel; public Springron( String szName, Processum parent, String[] springbootArgs ) { super( szName, parent ); this.mSpringbootArgs = springbootArgs; this.mSpringKernel = new SpringKernel(); this.mSpringKernel.setSpringram( this ); this.mSpringPrimaryThread = new Thread(new Runnable() { @Override public void run() { Springron.this.infoLifecycle( "VitalizingSubsystem", "Start" ); Springron.this.mSpringKernel.execute( Springron.this.mSpringbootArgs ); Springron.this.infoLifecycle( "VitalizingSubsystem", "Subsystem readied" ); while ( Springron.this.mSpringKernel.getContext().isActive() ) { try { Thread.sleep( 100 ); } catch ( InterruptedException e ) { Springron.this.mSpringKernel.terminate(); Thread.currentThread().interrupt(); break; } } Springron.this.infoLifecycle( "SubsystemTermination", "Subsystem terminated" ); } }); this.mSpringPrimaryThread.setName( this.getName() + "Primary" + this.mSpringPrimaryThread.getName() ); this.setThreadAffinity( this.mSpringPrimaryThread ); } public Springron( String szName, Processum parent ) { this( szName, parent, new String[0] ); } @Override public Springram setPrimarySources( Class primarySources ) { this.mSpringKernel.setPrimarySources( primarySources ); return this; } @Override public void join() throws InterruptedException { this.mSpringPrimaryThread.join(); } @Override public void join( long millis ) throws InterruptedException { this.mSpringPrimaryThread.join( millis ); } @Override public void execute() throws Exception { this.mSpringPrimaryThread.start(); } @Override public ConfigurableApplicationContext getContext() { return this.mSpringKernel.getContext(); } @Override public SpringApplication getSpringApplication() { return this.mSpringKernel.getSpringApplication(); } @Override public void setInitializer( Executor initializer ) { this.mSpringKernel.setInitializer(initializer); } @Override public void terminate() { this.mSpringKernel.terminate(); long nStart = System.currentTimeMillis(); try{ while ( this.mSpringKernel.getContext().isActive() ){ Thread.sleep( 50 ); if( System.currentTimeMillis() - nStart > 5000 ) { throw new TimeoutException( "Terminating springboot timeout." ); } } this.mSpringPrimaryThread.join(); } catch ( InterruptedException e ) { Thread.currentThread().interrupt(); } catch ( TimeoutException e1 ) { throw new ProxyProvokeHandleException( e1 ); } } @Override public void interrupt() { super.interrupt(); } @Override public void apoptosis() { this.terminate(); } @Override public void kill() { this.terminate(); super.kill(); } } ================================================ FILE: Pinecones/Springram/src/main/java/com/pinecone/summer/spring/util/ConfigUtils.java ================================================ package com.pinecone.summer.spring.util; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.unit.Units; import com.pinecone.framework.unit.tabulate.FamilyEntryNameEncoder; import com.pinecone.framework.unit.tabulate.GenericNamespaceFamilyEntryNameEncoder; import com.pinecone.framework.unit.tabulate.RecursiveFamilyIterator; import com.pinecone.framework.unit.tabulate.UnitFamilyNode; import java.util.Collection; import java.util.Map; public final class ConfigUtils { /** * Convert JSON formatted or recursion map to spring-properties map. * So spring can using json or json5. * e.g. { server : { port : 1234 } } => { server.port : 1234 }. * @return Spring Properties Map */ public static Map recursionMapToPropertiesMap( Map recursionMap ){ RecursiveFamilyIterator iterator = new RecursiveFamilyIterator<>( recursionMap, true ); FamilyEntryNameEncoder entryNameEncoder = new GenericNamespaceFamilyEntryNameEncoder( ".", true ); Map neo = Units.spawnExtendParent( recursionMap ); while( iterator.hasNext() ) { UnitFamilyNode node = iterator.next(); String k = entryNameEncoder.encode( node ); k = k.substring( 1 ); // Skip '.' neo.put( k, node.getEntry().getValue() ); } return neo; } } ================================================ FILE: Pinecones/Summer/pom.xml ================================================ pinecones com.pinecones 2.5.1 4.0.0 com.pinecone.summer summer 2.1.0 com.pinecone pinecone 2.5.1 compile com.pinecone.ulf ulfhedinn 1.2.1 compile commons-fileupload commons-fileupload 1.3.1 ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/ArchConnectDispatcher.java ================================================ package com.pinecone.summer; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.json.JSONException; import com.pinecone.summer.http.HttpEntityParser; import com.pinecone.summer.prototype.ConnectDispatcher; import com.pinecone.summer.prototype.Pagesion; import com.pinecone.summer.prototype.Wizard; import com.pinecone.Pinecone; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import java.io.IOException; import java.lang.reflect.InvocationTargetException; /** * Pinecone For Java SystemDispatcher [ Bean Nuts Pinecone PineconeJava Summer SystemDispatcher ] * Copyright © 2008 - 2024 Bean Nuts Foundation ( DR.Undefined ) All rights reserved. [Mr.A.R.B / WJH] * Tip: * ***************************************************************************************** * For Pinecone Net Family, we adopted PHP-Style as function paradigm because for Pinecone CPP * we has been referenced PHP-Style for long history and in this case there is almost no inappropriate * scenarios has found so for each version of Pinecone in other language we still inherited Pinecone CPP. * For following case we defined: * => $_GET : Parsed query string key-values json object. * => $_POST : Whichever form or multipart for key-values json object. * => $_GPC : Using Java parameter map but json object format. * => $_FILES : Like PHP $_FILES just files map. * => $_REQUEST : Currently session global http request. * => $_RESPONSE : Currently session global http response. * ***************************************************************************************** */ public class ArchConnectDispatcher implements ConnectDispatcher { protected ArchHostSystem mArchHostSystem; protected RouterType mRouterType = RouterType.QueryString; protected String mszURI = ""; protected String[] mURIParts = new String[0]; protected String mszDomainHref = ""; protected ArchWizardSummoner mWizardSummoner = null; protected HttpEntityParser mHttpEntityParser = null ; protected String mszWizardCommand = null; protected String mszModelCommand = null; protected String mszControlCommand = null; protected ArchConnection mConnection = null; public ArchConnectDispatcher( ArchHostSystem system, RouterType routerType ){ this.mArchHostSystem = system; this.mHttpEntityParser = this.mArchHostSystem.mHttpEntityParser; this.mRouterType = routerType; } public ArchHostSystem getHostSystem(){ return this.mArchHostSystem; } public HttpEntityParser getHttpEntityParser(){ return this.mHttpEntityParser; } public ArchWizardSummoner getWizardSummoner() { return this.mWizardSummoner; } public ArchConnection getConnection(){ return this.mConnection; } public String getWizardCommand() { return this.mszWizardCommand; } public String getModelCommand() { return this.mszModelCommand; } public String getControlCommand() { return this.mszControlCommand; } @Override public void afterConnectionAccepted( Connectiom connectiom ) throws ServletException, IOException { connectiom.response.setCharacterEncoding( this.mArchHostSystem.getServerCharset() ); } /** Http Method Handler **/ @Override public void handleGet( Connectiom connectiom ) throws ServletException, IOException { this.afterConnectionAccepted(connectiom); this.mConnection = new GetConnection( this, connectiom ); this.invokeDispatchBus(); } @Override public void handlePost( Connectiom connectiom ) throws ServletException, IOException { this.afterConnectionAccepted(connectiom); this.mConnection = new PostConnection( this, connectiom ); this.invokeDispatchBus(); } @Override public void handleHead( Connectiom connectiom ) throws ServletException, IOException { this.afterConnectionAccepted(connectiom); } @Override public void handleOptions( Connectiom connectiom ) throws ServletException, IOException { this.afterConnectionAccepted(connectiom); } @Override public void handlePut( Connectiom connectiom ) throws ServletException, IOException { this.afterConnectionAccepted(connectiom); } @Override public void handlePatch( Connectiom connectiom ) throws ServletException, IOException { this.afterConnectionAccepted(connectiom); } @Override public void handleDelete( Connectiom connectiom ) throws ServletException, IOException { this.afterConnectionAccepted(connectiom); } @Override public void handleTrace( Connectiom connectiom ) throws ServletException, IOException { this.afterConnectionAccepted(connectiom); } @Override public void stop() throws RuntimeException { throw new TerminateSessionException("This session or sequence has been terminated."); } public void jspRenderPage( String szDispatcherPath ) throws IOException, ServletException { this.mConnection.getRequest().getRequestDispatcher( szDispatcherPath ).forward(this.mConnection.getRequest(), this.mConnection.getResponse()); } public void jspTPLRenderPage( String szTemplatePath ) throws IOException, ServletException { this.mConnection.getRequest().getRequestDispatcher( this.mArchHostSystem.getRealTemplatePath() + szTemplatePath ).forward(this.mConnection.getRequest(), this.mConnection.getResponse()); } @Override public void traceSystemErrorMsg( String szTitle, String szErrorMsg ) throws IOException, ServletException { this.mConnection.getRequest().setAttribute("pineVersion", Pinecone.VERSION); this.mConnection.getRequest().setAttribute("pineReleaseDate", Pinecone.RELEASE_DATE); this.mConnection.getRequest().setAttribute("javaVersion", System.getProperty("java.version")); this.mConnection.getRequest().setAttribute("pageTitle", szTitle); this.mConnection.getRequest().setAttribute("errorMsg", szErrorMsg); this.jspRenderPage( this.mArchHostSystem.getDefaultErrorPagePath() ); } @Override public void traceSystemErrorMsg( int nErrorID, String szTitle, String szErrorMsg ) throws IOException, ServletException { this.mConnection.getResponse().setStatus(nErrorID); this.traceSystemErrorMsg( szTitle,szErrorMsg ); } @Override public void traceSystem404Error() throws IOException, ServletException { this.traceSystem404Error("

You are trying to access an undefined file !

" ); } @Override public void traceSystem404Error( String szErrorMsg ) throws IOException, ServletException { this.traceSystemErrorMsg( 404,"SERVER 404 ERROR",szErrorMsg ); } @Override public void traceSystem500Error( String szErrorMsg ) throws IOException, ServletException { this.traceSystemErrorMsg( 500,"SERVER 500 ERROR",szErrorMsg ); } public void echoIndexPage() throws IOException, ServletException { this.traceSystemErrorMsg( "WELCOME TO PINECONE JAVA" ,"

Everything should be fine.

" ); } protected void beforeDispath() throws ServletException, IOException { } protected void afterDispatch() throws ServletException, IOException { } @Override public void invokeDispatchBus() throws ServletException, IOException { this.requestReceived(); this.dispatch(); } @Override public void dispatch() throws IOException, ServletException { this.beforeDispath(); this.profileURL(); this.toSummon(); this.afterDispatch(); } @Override public void requestReceived() throws ServletException, IOException { try { this.mszWizardCommand = this.mConnection.$_GET().getString(this.mArchHostSystem.getWizardParameter()); } catch (JSONException e){ this.mszWizardCommand = ""; } try { this.mszModelCommand = this.mConnection.$_GET().getString(this.mArchHostSystem.getModelParameter()); } catch (JSONException e){ this.mszModelCommand = ""; } try { this.mszControlCommand = this.mConnection.$_GET().getString(this.mArchHostSystem.getControlParameter()); } catch (JSONException e){ this.mszControlCommand = ""; } this.mWizardSummoner = SystemSpawner.spawnWizardSummoner( this.mArchHostSystem.getWizardSummonerConfig(), this.mConnection ); } protected void profileURL() throws ServletException { HttpServletRequest request = this.mConnection.getRequest(); StringBuffer sbRequestURL = request.getRequestURL(); String szRequestURI = request.getRequestURI(); if( szRequestURI.equals( "/" ) ){ if( sbRequestURL.charAt( sbRequestURL.length() - 1 ) == '/' ){ this.mszDomainHref = sbRequestURL.deleteCharAt( sbRequestURL.length() - 1 ).toString(); } else { this.mszDomainHref = sbRequestURL.toString(); } } else { String szRequestURL = sbRequestURL.toString(); String[] debris = szRequestURL.split( szRequestURI ); if( debris.length >= 1 ) { this.mszDomainHref = debris[0]; } else { throw new ServletException( "Illegal URL given '" + szRequestURL + "'." ); } } this.mszURI = szRequestURI; this.mURIParts = StringUtils.trimEmptyElement( this.mszURI.split( "/" ) ); //Debug.trace( this.mURIParts, this.mszURI ); } protected void summonByQueryString() throws ServletException, IOException { switch ( this.mszWizardCommand ){ case "":{ this.echoIndexPage(); break; } default:{ this.mWizardSummoner.summonAndExecute( this.mszWizardCommand ); break; } } } protected void summonByRouterPath() throws ServletException, IOException { Object routum = this.mArchHostSystem.getPrimeRouterDispatcher().queryRoutum( this.mszURI ); if( routum != null ) { ArchRouterDispatcher.RouterClass routerClass = null; ArchRouterDispatcher.RouterMethod routerMethod = null; if( routum instanceof ArchRouterDispatcher.RouterClass ) { routerClass = (ArchRouterDispatcher.RouterClass) routum; } else if( routum instanceof ArchRouterDispatcher.RouterMethod ) { routerMethod = (ArchRouterDispatcher.RouterMethod) routum; routerClass = routerMethod.parent; } if( routerClass != null ) { this.mszWizardCommand = routerClass.antetype.getSuperclass().getSimpleName(); Wizard wizard = this.mWizardSummoner.summonIfExist( this.mszWizardCommand ); if( routerMethod != null ) { Pagesion pagesion = (Pagesion) wizard; pagesion.setRenderum( routerMethod.antetype ); try{ routerMethod.antetype.invoke( pagesion ); } catch ( IllegalAccessException | InvocationTargetException e ){ e.printStackTrace(); } pagesion.render(); } } } String szClass = ""; if( this.mURIParts.length > 0 ) { szClass = this.mURIParts[0]; } switch ( szClass ){ case "":{ this.echoIndexPage(); break; } default:{ this.mszWizardCommand = szClass; this.mWizardSummoner.summonAndExecute( szClass ); break; } } } protected void toSummon() throws ServletException, IOException { switch ( this.mRouterType ) { case QueryString:{ this.summonByQueryString(); break; } case PathRouter:{ this.summonByRouterPath(); break; } default:{ break; } } } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/ArchConnection.java ================================================ package com.pinecone.summer; import com.pinecone.framework.unit.LinkedMultiValueMap; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.summer.multiparts.MultipartFile; import com.pinecone.summer.multiparts.commons.CommonsMultipartFiles; import com.pinecone.summer.http.HttpEntityParser; import com.pinecone.summer.http.HttpMethod; import com.pinecone.summer.prototype.Connectson; import javax.servlet.ServletOutputStream; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.PrintWriter; import java.util.Map; import java.util.TreeMap; public abstract class ArchConnection implements Connectson { protected ArchConnectDispatcher mDispatcher; protected ArchHostSystem mHostSystem; protected Connectiom mConnectiom; protected HttpServletRequest mCurrentMultipartRequest; protected JSONObject mGlobalParameterContainer = null; /** GPC **/ protected JSONObject mGETMapContainer = null; /** GET **/ protected JSONObject mPOSTMapContainer = null; /** POST **/ protected HttpMethod mCurrentHttpMethod = HttpMethod.GET; protected CommonsMultipartFiles mMultipartFilesMaker = null ; protected HttpEntityParser mHttpEntityParser = null ; protected Map mFilesMapContainer = new LinkedMultiValueMap() ; protected Map mCookiesContainer = new TreeMap<>(); public ArchConnection( ArchConnectDispatcher dispatcher ) { this.mDispatcher = dispatcher; this.mHostSystem = this.mDispatcher.getHostSystem(); this.mHttpEntityParser = this.mDispatcher.getHttpEntityParser(); } protected ArchConnection(ArchConnectDispatcher dispatcher, Connectiom connectiom ) { this( dispatcher ); this.apply(connectiom); } protected ArchConnection apply( Connectiom connectiom ) { this.mConnectiom = connectiom; this.mMultipartFilesMaker = new CommonsMultipartFiles( this ); this.mConnectiom.afterConnectionRipe( this ); return this; } @Override public ArchConnectDispatcher getDispatcher(){ return this.mDispatcher; } @Override public ArchHostSystem getHostSystem() { return this.mHostSystem; } @Override public HttpServletRequest getRequest() { return this.mConnectiom.request; } @Override public HttpServletResponse getResponse() { return this.mConnectiom.response; } @Override public HttpServlet getServlet() { return this.mConnectiom.servlet; } @Override public HttpServletRequest getMultipartRequest() { return this.mCurrentMultipartRequest; } @Override public boolean isMultipartRequest() { return this.mMultipartFilesMaker.isMultipart(); } @Override public JSONObject $_GPC(){ return this.mGlobalParameterContainer; } @Override public JSONObject $_GET(){ return this.mGETMapContainer; } @Override public JSONObject $_POST(){ return this.mPOSTMapContainer; } @Override public PrintWriter writer() throws IOException { return this.getResponse().getWriter(); } @Override public ServletOutputStream out() throws IOException { return this.getResponse().getOutputStream(); } @Override public HttpServletRequest $_REQUEST(){ return this.$_REQUEST( false ); } @Override public HttpServletRequest $_REQUEST ( boolean bUsingMultipart ){ if ( bUsingMultipart && this.isMultipartRequest() ){ return this.mCurrentMultipartRequest; } return this.mConnectiom.request; } @Override public HttpServletResponse $_RESPONSE(){ return this.mConnectiom.response; } @Override public Map $_FILES() { return this.mFilesMapContainer; } @Override public Map $_COOKIE() { return this.mCookiesContainer; } @Override public HttpMethod currentHttpMethod(){ return this.mCurrentHttpMethod; } public CommonsMultipartFiles getMultipartFilesMaker() { return this.mMultipartFilesMaker; } public Connectiom getConnectiom() { return this.mConnectiom; } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/ArchHostSystem.java ================================================ package com.pinecone.summer; import com.pinecone.framework.util.io.FileUtils; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.summer.http.CommonHttpEntityParser; import com.pinecone.summer.http.HttpEntityParser; import com.pinecone.summer.prototype.HostSystem; import javax.servlet.ServletContext; import javax.servlet.ServletException; import java.io.IOException; import java.nio.charset.Charset; import java.text.SimpleDateFormat; import java.util.Date; public class ArchHostSystem implements HostSystem { public static SystemServlet G_SystemServlet; protected SystemSpawner mSystemSpawner = null; protected SystemServlet mSystemServlet = null; protected JSONObject mGlobalConfig = null; protected JSONObject mSystemConfig = null; protected JSONObject mPublicWizardConfig = null; protected ServletContext mServletContext = null ; protected String mszModelParameter = "action" ; protected String mszControlParameter = "control" ; protected String mszModelClassSuffix = "Model" ; protected String mszControlClassSuffix = "Control" ; protected String mszWizardParameter = "do" ; protected String mszServerCharset = "UTF-8" ; protected String mszTemplatePath ; protected String mszRealTemplatePath ; protected String mszConfigPath ; protected String mszSimpResourcesPath ; protected String mszJavaWebInfoSuffix ; protected String mszRootClassPath ; protected String mszSystemPath = null ; protected String mszResourcesPath = null ; protected HttpEntityParser mHttpEntityParser = null ; protected ArchRouterDispatcher mRouterDispatcher = null ; /** Class Function **/ public String readFileContentAll( String szPath ) throws IOException { return FileUtils.readAll( szPath, Charset.forName( this.getServerCharset() ) ); } private void parseConfig() throws IOException { this.mGlobalConfig = new JSONMaptron( this.readFileContentAll( this.mszConfigPath ) ); } private void construct() throws IOException { if( this.mServletContext != null ){ this.mszServerCharset = this.mServletContext.getInitParameter("encoding"); } this.parseConfig(); if( this.mGlobalConfig != null ){ this.mSystemConfig = this.mGlobalConfig.getJSONObject("SummerSystem"); this.mszWizardParameter = this.mSystemConfig.getString("WizardParameter"); this.mszModelParameter = this.mSystemConfig.getString("ModelParameter"); this.mszControlParameter = this.mSystemConfig.getString("ControlParameter"); this.mPublicWizardConfig = this.mSystemConfig.getJSONObject("PublicWizardConfig"); this.mszTemplatePath = this.mSystemConfig.getString("TemplatePath"); this.mszSimpResourcesPath = this.mSystemConfig.getString("ResourcesPath"); this.mszJavaWebInfoSuffix = this.mSystemConfig.getString("JavaWebInfoSuffix"); this.mszRealTemplatePath = this.mszJavaWebInfoSuffix + this.mszTemplatePath; this.mszModelClassSuffix = this.mSystemConfig.getString("ModelClassSuffix"); this.mszControlClassSuffix = this.mSystemConfig.getString("ControlClassSuffix"); } this.mSystemSpawner = new SystemSpawner(); if ( this.mSystemServlet != null ) { this.registerRootClassPath( this.mSystemServlet.getClassPath() ); } this.mHttpEntityParser = new CommonHttpEntityParser( this.getServerCharset() ); this.mRouterDispatcher = new ArchRouterDispatcher( this ); } public ArchHostSystem( String szResourcesPath, String szConfigFileName ) throws IOException { this.mszResourcesPath = szResourcesPath; this.mszConfigPath = szResourcesPath + szConfigFileName; this.construct(); } public ArchHostSystem( SystemServlet servlet ) throws IOException { this.mSystemServlet = servlet; this.mServletContext = this.mSystemServlet.getServletContext(); this.mszConfigPath = ArchHostSystem.getSystemConfigPath( this.mSystemServlet.getClassPath(), this.getServletContext().getInitParameter("IlluminationConfigLocation") ); this.construct(); } public HttpEntityParser getHttpEntityParser(){ return this.mHttpEntityParser; } public JSONObject getGlobalConfig() { return this.mGlobalConfig; } public JSONObject getSystemConfig() { return this.mSystemConfig; } public JSONObject getPublicWizardConfig() { return this.mPublicWizardConfig; } public String getControlParameter() { return this.mszControlParameter; } public String getWizardParameter() { return this.mszWizardParameter; } public String getModelParameter() { return this.mszModelParameter; } public String getModelClassSuffix() { return this.mszModelClassSuffix; } public String getControlClassSuffix() { return this.mszControlClassSuffix; } public String getTemplatePath() { return this.mszTemplatePath; } public String getRealTemplatePath() { return this.mszRealTemplatePath; } public String getServerCharset() { return this.mszServerCharset; } public ServletContext getServletContext() { return this.mServletContext; } public String getDefaultErrorPagePath() { return this.mSystemConfig.getString("DefaultErrorPageTpl"); } public JSONObject getHosts() { return this.mSystemConfig.getJSONObject("Hosts"); } public String getResourcesPath() { if( this.mszResourcesPath == null ){ this.mszResourcesPath = this.mSystemServlet.getClassPath() + this.mszSimpResourcesPath + "/"; } return this.mszResourcesPath; } public void savageSetResourcesPath( String szUncheckedResourcesPath ){ this.mszResourcesPath = szUncheckedResourcesPath; } /** Upload Function **/ public JSONObject getUploadConfig() { return this.mSystemConfig.getJSONObject("UploadConfig"); } public long getSingleFileSizeMax() { return this.getUploadConfig().getLong("SingleFileSizeMax"); } public long getSumFileSizeMax() { return this.getUploadConfig().getLong("SumFileSizeMax"); } public String getUploadEncode() { return this.getUploadConfig().getString("UploadEncode"); } public String getUploadTempDir() { return this.getUploadConfig().getString("UploadTempDir"); } /** System Class **/ @Override public String getWizardSummonerConfig() { return this.getSystemConfig().getString("WizardSummoner"); } @Override public String getWizardPackageName(){ return "Wizard"; } public SystemSpawner getSystemSpawner() { return this.mSystemSpawner; } @Override public ArchConnectDispatcher handleByDispatcher(RouterType routerType ) { return new ArchConnectDispatcher( this, routerType ); } @Override public ArchRouterDispatcher getPrimeRouterDispatcher() { return this.mRouterDispatcher; } public String getSystemPath() { if( this.mszRootClassPath != null && this.mszJavaWebInfoSuffix != null ){ if( this.mszSystemPath == null ) { String[] szPathChip = this.mszRootClassPath.split( this.mszJavaWebInfoSuffix ); if( szPathChip.length > 0 ){ this.mszSystemPath = szPathChip[0]; } } } return this.mszSystemPath; } public String getRootClassPath() { return this.mszRootClassPath; } protected void registerRootClassPath( String szRootClassPath ) { this.mszRootClassPath = szRootClassPath; } public SystemServlet getSystemServlet(){ return this.mSystemServlet; } public void init() throws ServletException { System.err.println( "----------------------------------------------" ); System.err.println( "Bean Nuts Pinecone PineconeJava Summer Has Been Initiated" ); System.err.println( "Time: " + ( new SimpleDateFormat("yyyy-MM-dd HH:mm:ss") ).format(new Date()) ); System.err.println( "----------------------------------------------" ); } protected static String getSystemConfigPath( String szClassPath , String szIlluminationConfigLocation ) { if( szIlluminationConfigLocation.startsWith("classpath:") ){ szIlluminationConfigLocation = szIlluminationConfigLocation.replaceFirst( "classpath:", szClassPath ); } return szIlluminationConfigLocation; } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/ArchPageson.java ================================================ package com.pinecone.summer; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.summer.prototype.ModelEnchanter; import com.pinecone.summer.prototype.Pagesion; import com.pinecone.summer.prototype.Pageson; import com.pinecone.summer.prototype.Wizard; import javax.servlet.ServletException; import java.io.IOException; import java.lang.annotation.Annotation; import java.lang.reflect.Method; public abstract class ArchPageson extends ArchWizardum implements Pageson { protected JSONObject mPageData = null ; protected boolean mbGlobalEnchanter = false ; public ArchPageson( ArchConnection session ) { super( session ); this.mPageData = new JSONMaptron(); if( this instanceof Pagesion ){ this.mbGlobalEnchanter = this.hasEnchanterTrait(); } this.appendDefaultPageDate(); } protected boolean hasEnchanterTrait() { Annotation[] annotations = this.getClass().getAnnotations(); for( Annotation annotation : annotations ){ if( annotation instanceof ModelEnchanter ){ return ((ModelEnchanter) annotation).value(); } } return false; } protected void appendDefaultPageDate(){ this.mPageData.put( "PrototypeName", this.prototypeName() ); this.mPageData.put( "szMainTitle", ((Wizard)this).getTitle() ); this.mPageData.put( "szWizardRole", ((Wizard)this).getModularRole() ); } public void forward ( ArchPageson that ) { this.mPageData = that.mPageData; } public JSONObject getPageData(){ return this.mPageData; } public String toJSONString(){ return this.mPageData.toString(); } public void setEnchanterRole( boolean bRole ){ this.mbGlobalEnchanter = bRole; } public boolean isEnchanter() { return this.mbGlobalEnchanter; } public void setRenderum( Method fnRenderum ) { } public void render() throws ServletException, IOException { if( this instanceof Pagesion && this.mbGlobalEnchanter ){ this.writer().print( ((Pagesion)this).toJSONString() ); } } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/ArchRouterDispatcher.java ================================================ package com.pinecone.summer; import com.pinecone.framework.util.lang.ClassNameFetcher; import com.pinecone.framework.util.lang.NamespaceCollector; import com.pinecone.summer.prototype.Controller; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.ClassUtils; import com.pinecone.framework.util.json.JSONArray; import com.pinecone.summer.prototype.*; import java.lang.annotation.Annotation; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; public class ArchRouterDispatcher implements RouterDispatcher { public class RouterClass { public boolean isPagesionController = false; public boolean isSimpleController = false; public RouterMapping routerMapping = null; public Class antetype = null; } public class RouterMethod { public RouterClass parent = null; public RouterMapping routerMapping = null; public Method antetype = null; } protected ArchHostSystem mArchHostSystem ; protected JSONArray mRoutumPackageNSs ; protected Map mDesignatedRouterClassMap = new LinkedHashMap<>(); protected Map mDesignatedRouterMethodMap = new LinkedHashMap<>(); protected NamespaceCollector mClassLoader = new ClassNameFetcher(); protected static String urlNormalize( String url ) { if( !url.startsWith( "/" ) ) { url = "/" + url; } return url; } protected static String[] fetchPaths( String[] urls, String szDefaultName ) { String[] paths = urls; if( paths.length == 0 ) { paths = new String[] { ArchRouterDispatcher.urlNormalize( szDefaultName ) }; } else { String[] norPaths = new String[paths.length]; for ( int i = 0; i < paths.length; i++ ) { String path = paths[i]; norPaths[i] = ArchRouterDispatcher.urlNormalize( path ); } paths = norPaths; } return paths; } protected void fetchRouterClass( String szRoutumPackageNS ) { List classNames = this.mClassLoader.fetch( szRoutumPackageNS ); if ( classNames != null ) { for ( String className : classNames ) { try { className = className.substring( className.indexOf( szRoutumPackageNS ) ); Class antetype = Thread.currentThread().getContextClassLoader().loadClass( className ); Annotation[] annotations = antetype.getAnnotations(); RouterClass routerClass = new RouterClass(); routerClass.isPagesionController = ClassUtils.isAssignable( Pagesion.class, antetype ); routerClass.isSimpleController = false; routerClass.antetype = antetype; for( Annotation annotation : annotations ) { if( annotation instanceof Controller) { routerClass.isSimpleController = true; } else if( annotation instanceof RouterMapping ) { routerClass.routerMapping = (RouterMapping)annotation; } } if( routerClass.isPagesionController || routerClass.isSimpleController ) { String[] paths = null; if( routerClass.routerMapping != null ) { RouterMapping routerMapping = routerClass.routerMapping; String szDefaultName = antetype.getSimpleName(); if ( routerClass.isPagesionController ) { szDefaultName = antetype.getSuperclass().getSimpleName(); } paths = ArchRouterDispatcher.fetchPaths( routerMapping.value(), szDefaultName ); for ( String path : paths ) { this.mDesignatedRouterClassMap.put( path, routerClass ); } } this.fetchRouterMethod( routerClass, paths, antetype ); } } catch ( ClassNotFoundException e ) { e.printStackTrace(); } } } } protected void fetchRouterMethod( RouterClass routerClass, String[] parentPaths, Class that ) { Method[] methods = that.getDeclaredMethods(); for ( Method method : methods ) { Annotation[] annotations = method.getAnnotations(); RouterMapping routerMapping = null; for( Annotation annotation : annotations ) { if( annotation instanceof RouterMapping ) { routerMapping = (RouterMapping)annotation; break; } } if( routerMapping != null ) { String szDefaultName = method.getName(); String[] paths = ArchRouterDispatcher.fetchPaths( routerMapping.value(), szDefaultName ); String[] finalPath; int nParentLen = 0; if( parentPaths != null ){ nParentLen = parentPaths.length; finalPath = new String[ parentPaths.length * paths.length ]; } else { finalPath = new String[ paths.length ]; } RouterMethod routerMethod = new RouterMethod(); routerMethod.antetype = method; routerMethod.parent = routerClass; routerMethod.routerMapping = routerMapping; int k = 0; nParentLen = nParentLen == 0 ? 1 : nParentLen; // 1 for only children loop. for ( int i = 0; i < nParentLen; i++ ) { for ( int j = 0; j < paths.length; j++ ) { if( routerMapping.relative() && parentPaths != null ) { finalPath[ k ] = parentPaths[i] + paths[j]; } else { finalPath[ k ] = paths[j]; } this.mDesignatedRouterMethodMap.put( finalPath[k], routerMethod ); ++k; } } Debug.trace( finalPath ); } } } protected void fetchRouterMaps() { for( Object obj : this.mRoutumPackageNSs ) { String szNS = (String) obj; this.fetchRouterClass( szNS ); } } public ArchRouterDispatcher( ArchHostSystem system ){ this.mArchHostSystem = system; this.mRoutumPackageNSs = this.mArchHostSystem.getSystemConfig().optJSONArray( "RoutumPackageNSs" ); this.fetchRouterMaps(); } public JSONArray getRoutumPackageNSs() { return this.mRoutumPackageNSs; } public Object queryRoutum( String szURI ){ RouterClass routerClass = this.mDesignatedRouterClassMap.get( szURI ); if( routerClass == null ) { return this.mDesignatedRouterMethodMap.get( szURI ); } return routerClass; } public Pagesion spawnPagesion( RouterClass routerClass, ArchConnection connection ) { Pagesion obj = null; try { try{ Constructor constructor = routerClass.antetype.getConstructor( ArchConnection.class ); obj = (Pagesion) constructor.newInstance( connection ); } catch ( NoSuchMethodException | InvocationTargetException e1 ){ e1.printStackTrace(); } } catch ( IllegalAccessException | InstantiationException e ){ System.err.println( "Summon Compromised: [" + e.toString() + "]" ); } return obj; } public Object spawnController( RouterClass routerClass ) { Object obj = null; try { try{ Constructor constructor = routerClass.antetype.getConstructor(); obj = constructor.newInstance(); } catch ( NoSuchMethodException | InvocationTargetException e1 ){ e1.printStackTrace(); } } catch ( IllegalAccessException | InstantiationException e ){ System.err.println( "Summon Compromised: [" + e.toString() + "]" ); } return obj; } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/ArchWizard.java ================================================ package com.pinecone.summer; import com.pinecone.summer.prototype.JSONBasedControl; import com.pinecone.summer.prototype.Pagesion; import com.pinecone.summer.prototype.Wizard; import javax.servlet.ServletException; import java.io.IOException; public abstract class ArchWizard implements Wizard { protected ArchConnection mConnection = null ; protected ArchHostSystem mParentSystem = null ; protected ArchConnectDispatcher mDispatcher = null ; private Pagesion mYokedModel = null ; private JSONBasedControl mYokedControl = null ; public ArchWizard ( ArchConnection session ) { this.mConnection = session; this.mDispatcher = this.mConnection.getDispatcher(); this.mParentSystem = this.mDispatcher.getHostSystem(); } @Override public ArchConnection getConnection() { return this.mConnection; } @Override public ArchHostSystem getHostSystem() { return this.mParentSystem; } @Override public ArchConnectDispatcher getDispatcher(){ return this.mDispatcher; } public void soulBound(Pagesion model, JSONBasedControl control ){ this.mYokedModel = model; this.mYokedControl = control; } public Pagesion revealYokedModel(){ return this.mYokedModel; } public JSONBasedControl revealYokedControl(){ return this.mYokedControl; } public void beforeSummon() { } public void summoning() throws ServletException, IOException { try{ if( this.mYokedControl != null ){ this.mYokedControl.beforeDispatch(); this.mYokedControl.dispatch(); this.mYokedControl.afterDispatch(); } if( this.mYokedModel != null ){ this.mYokedModel.beforeDispatch(); this.mYokedModel.dispatch(); this.mYokedModel.render(); this.mYokedModel.afterDispatch(); } } catch ( TerminateSessionException e ){ System.out.println( "Wizard: One of caught session or sequence has been terminated." ); } } public void afterSummon() {} } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/ArchWizardSummoner.java ================================================ package com.pinecone.summer; import com.pinecone.summer.prototype.*; import javax.servlet.ServletException; import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; public class ArchWizardSummoner implements WizardSummoner { protected HostSystem mParentSystem; protected ArchConnection mConnection; protected Wizard mLastWizard = null ; public ArchWizardSummoner(ArchConnection connection ) { this.mParentSystem = connection.getHostSystem(); this.mConnection = connection; } public HostSystem getSystem() { return this.mParentSystem; } public Wizard getLastSummoned(){ return this.mLastWizard; } @Override public String queryNamespace( String szNickName ){ return this.mParentSystem.getWizardPackageName() + "." + szNickName; } protected String spawnFullModelPrototypeName( String szNickName ){ return this.queryNamespace(szNickName) + "." + szNickName + this.mParentSystem.getModelClassSuffix(); } protected String spawnFullControlPrototypeName( String szNickName ){ return this.queryNamespace(szNickName) + "." + szNickName + this.mParentSystem.getControlClassSuffix(); } protected Pagesion spawnWizardModelByCallHisName(String szClassName ){ Pagesion obj = null; try { Class pVoid = Class.forName( szClassName ); try{ Constructor constructor = pVoid.getConstructor( ArchConnection.class ); obj = (Pagesion) constructor.newInstance( this.mConnection ); } catch (NoSuchMethodException | InvocationTargetException e1){ e1.printStackTrace(); } } catch (ClassNotFoundException | IllegalAccessException | InstantiationException e){ System.err.println( "Summon Compromised: [" + e.toString() + "]" ); //e.printStackTrace(); } return obj; } protected JSONBasedControl spawnWizardControlByCallHisName( String szClassName ){ JSONBasedControl obj = null; try { Class pVoid = Class.forName( szClassName ); try{ Constructor constructor = pVoid.getConstructor( ArchConnection.class ); obj = (JSONBasedControl) constructor.newInstance( this.mConnection ); } catch (NoSuchMethodException | InvocationTargetException e1){ e1.printStackTrace(); } } catch (ClassNotFoundException | IllegalAccessException | InstantiationException e){ System.err.println( "Summon Compromised: [" + e.toString() + "]" ); //e.printStackTrace(); } return obj; } public Pagesion spawnWizardModel(String szNickName ) { return this.spawnWizardModelByCallHisName( this.spawnFullModelPrototypeName( szNickName ) ); } public JSONBasedControl spawnWizardControl( String szNickName ) { return this.spawnWizardControlByCallHisName( this.spawnFullControlPrototypeName( szNickName ) ); } @Override public Wizard summon( String szNickName, Object... args ) throws ServletException, IOException { JSONBasedControl hControl = this.spawnWizardControl( szNickName ); Pagesion hModel = this.spawnWizardModel( szNickName ); ArchWizard hArchetype = this.revealArchetype( hModel ,hControl ); if( hArchetype == null ){ return null; } this.beforeSummon( hModel, hControl ); this.soulBound( hModel, hControl ); this.mLastWizard = (Wizard) hArchetype; return this.mLastWizard; } @Override public void executeAfterSummonSequence() throws ServletException, IOException { if( this.mLastWizard != null ){ ((ArchWizard)this.mLastWizard).summoning(); ((ArchWizard)this.mLastWizard).afterSummon(); } } @Override public Wizard summonIfExist( String szNickName ) throws ServletException, IOException { Wizard wizard = this.summon( szNickName ); if( wizard == null ){ this.mConnection.getDispatcher().traceSystem404Error(); } return wizard; } @Override public Wizard summonAndExecute( String szNickName ) throws ServletException, IOException { Wizard wizard = this.summonIfExist( szNickName ); this.executeAfterSummonSequence(); return wizard; } public ArchWizard revealArchetype( Pagesion hModel, JSONBasedControl hControl ){ if( hModel instanceof ArchWizard){ return (ArchWizard)hModel; } else if( hControl instanceof ArchWizard){ return (ArchWizard)hControl; } return null; } public void beforeSummon(Pagesion hModel, JSONBasedControl hControl ){ if( hModel instanceof ArchWizard){ ( (ArchWizard) hModel ).beforeSummon(); } if( hControl instanceof ArchWizard){ ( (ArchWizard) hControl ).beforeSummon(); } } public void soulBound(Pagesion hModel, JSONBasedControl hControl ){ if( hModel instanceof ArchWizard){ ( (ArchWizard) hModel ).soulBound( hModel, hControl ); } if( hControl instanceof ArchWizard){ ( (ArchWizard) hControl ).soulBound( hModel, hControl ); } } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/ArchWizardum.java ================================================ package com.pinecone.summer; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.summer.multiparts.MultipartFile; import com.pinecone.summer.prototype.Wizardum; import javax.servlet.ServletException; import javax.servlet.ServletOutputStream; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.PrintWriter; import java.util.Map; public abstract class ArchWizardum extends ArchWizard implements Wizardum { protected PrintWriter mConnectWriter = null; protected ServletOutputStream mConnectOut = null; public ArchWizardum( ArchConnection session ) { super( session ); } public ArchHostSystem system(){ return this.mParentSystem; } public ArchConnectDispatcher getSystemDispatcher() { return this.mDispatcher; } public JSONObject $_GPC(){ return this.getConnection().$_GPC(); } public JSONObject $_GET(){ return this.getConnection().$_GET(); } public JSONObject $_POST(){ return this.getConnection().$_POST(); } @Override public PrintWriter writer() { try{ if( this.mConnectWriter == null ) { this.mConnectWriter = this.getConnection().writer(); } return this.mConnectWriter; } catch ( IOException e ) { throw new IllegalStateException( "Illegal invoke writer.", e ); } } @Override public ServletOutputStream out() { try{ if( this.mConnectOut == null ) { this.mConnectOut = this.getConnection().out(); } return this.mConnectOut; } catch ( IOException e ) { throw new IllegalStateException( "Illegal invoke out.", e ); } } public HttpServletRequest $_REQUEST(){ return this.getConnection().$_REQUEST(); } public HttpServletRequest getCurrentMultipartRequest() { return this.getConnection().getMultipartRequest(); } public HttpServletResponse $_RESPONSE(){ return this.getConnection().$_RESPONSE(); } @Override public Map $_COOKIE() { return this.getConnection().$_COOKIE(); } @Override public Map $_FILES() { throw new IllegalStateException("Notice: $_FILES() is abstract."); } public void beforeDispatch() throws IOException, ServletException {} public void afterDispatch() throws IOException, ServletException {} public void stop() throws RuntimeException { this.mDispatcher.stop(); } public String getWizardCommand() { return this.mDispatcher.getWizardCommand(); } public String getModelCommand() { return this.mDispatcher.getModelCommand(); } public String getControlCommand() { return this.mDispatcher.getControlCommand(); } public void redirect( String szURL ) throws IOException { this.$_RESPONSE().sendRedirect( szURL ); } public String spawnWizardQuerySpell( String szPrototype ){ return "?" + this.system().getWizardParameter() + "=" + szPrototype; } public String spawnActionQuerySpell( String szActionFnName ){ return this.spawnActionControlSpell( szActionFnName, null ); } public String spawnControlQuerySpell( String szControlFnName ) { return this.spawnActionControlSpell( null, szControlFnName ); } public String spawnActionControlSpell( String szActionFnName, String szControlFnName ) { String szQueryString = "?" + this.system().getWizardParameter() + "=" + this.prototypeName(); if( szActionFnName != null && !szActionFnName.isEmpty() ){ szQueryString += "&" + this.system().getModelParameter() + "=" + szActionFnName; } if( szControlFnName != null && !szControlFnName.isEmpty() ){ szQueryString += "&" + this.system().getControlParameter() + "=" + szControlFnName; } return szQueryString; } public Object summonNormalGenieByCallHisName(String szGenieName) throws NaughtyGenieInvokedException { throw new IllegalStateException("Notice: summonNormalGenieByCallHisName() is abstract."); } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/Connectiom.java ================================================ package com.pinecone.summer; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; public class Connectiom { protected ArchConnection connection; public HttpServletRequest request; public HttpServletResponse response; public HttpServlet servlet; public Connectiom( HttpServletRequest request, HttpServletResponse response, HttpServlet servlet ) { this.request = request; this.response = response; this.servlet = servlet; } protected void afterConnectionRipe( ArchConnection connection ) { this.connection = connection; } public ArchConnection getConnection() { return this.connection; } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/GetConnection.java ================================================ package com.pinecone.summer; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.summer.http.HttpMethod; public class GetConnection extends ArchConnection { public GetConnection(ArchConnectDispatcher dispatcher ) { super( dispatcher ); } protected GetConnection(ArchConnectDispatcher dispatcher, Connectiom connectiom ) { super( dispatcher, connectiom ); } @Override protected ArchConnection apply(Connectiom connectiom) { super.apply(connectiom); this.mCurrentHttpMethod = HttpMethod.GET; this.mGETMapContainer = this.mHttpEntityParser.parseQueryString( this.mConnectiom.request.getQueryString(), false ); this.mPOSTMapContainer = new JSONMaptron(); this.mGlobalParameterContainer = this.mHttpEntityParser.requestMapJsonify( this.mConnectiom.request, false ); this.mCookiesContainer = this.mHttpEntityParser.cookiesMapify( this.mCookiesContainer, this.mConnectiom.request ); this.mMultipartFilesMaker.refresh(); return this; } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/NaughtyGenieInvokedException.java ================================================ package com.pinecone.summer; public class NaughtyGenieInvokedException extends ReflectiveOperationException { public enum NaughtyGenieType { N_NAUGHTY, N_GHOST, N_HETEROGENEOUS, N_ILLEGAL } private NaughtyGenieType mNaughtyGenieType = NaughtyGenieType.N_NAUGHTY; public NaughtyGenieInvokedException() { super(); } public NaughtyGenieInvokedException(String s) { super(s); } public NaughtyGenieInvokedException( String s, NaughtyGenieType naughtyGenieType ) { super(s); this.mNaughtyGenieType = naughtyGenieType; } public NaughtyGenieInvokedException( String s, NaughtyGenieType naughtyGenieType, Throwable cause ) { super( s, cause ); this.mNaughtyGenieType = naughtyGenieType; } public NaughtyGenieType getType() { return this.mNaughtyGenieType; } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/PostConnection.java ================================================ package com.pinecone.summer; import com.pinecone.summer.http.HttpMethod; public class PostConnection extends ArchConnection { public PostConnection( ArchConnectDispatcher dispatcher ) { super( dispatcher ); } protected PostConnection(ArchConnectDispatcher dispatcher, Connectiom connectiom ) { super( dispatcher, connectiom); } @Override protected ArchConnection apply( Connectiom connectiom ) { super.apply(connectiom); this.mCurrentHttpMethod = HttpMethod.POST; this.mGETMapContainer = this.mHttpEntityParser.parseQueryString ( this.mConnectiom.request.getQueryString(), false ); this.mCookiesContainer = this.mHttpEntityParser.cookiesMapify( this.mCookiesContainer, this.mConnectiom.request ); this.mMultipartFilesMaker.refresh(); /* Notice: 2020-12-25 * Java Servlet abandoned multipart post. * Pinecone be forced to redefined $_POST. * **/ if( this.mMultipartFilesMaker.isMultipart() ){ this.mMultipartFilesMaker.interceptMultipartFiles(); this.mFilesMapContainer = this.mMultipartFilesMaker.getCurrentFilesMap(); this.mCurrentMultipartRequest = this.mMultipartFilesMaker.getCurrentMultipartRequest(); this.mPOSTMapContainer = this.mHttpEntityParser.siftPostFromParameterMap ( this.mCurrentMultipartRequest, false ); this.mGlobalParameterContainer = this.mHttpEntityParser.requestMapJsonify ( this.mCurrentMultipartRequest,false ); } else { this.mPOSTMapContainer = this.mHttpEntityParser.siftPostFromParameterMap ( this.mConnectiom.request, false ); this.mGlobalParameterContainer = this.mHttpEntityParser.requestMapJsonify ( this.mConnectiom.request,false ); } return this; } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/RouterType.java ================================================ package com.pinecone.summer; public enum RouterType { QueryString, PathRouter } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/SystemRoutlet.java ================================================ package com.pinecone.summer; import com.pinecone.summer.prototype.Servletson; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; /** * Pinecone System Router Servlet * Like spring framework for map and dispatch all path based router. */ public class SystemRoutlet extends HttpServlet implements Servletson { private ArchHostSystem mSystem = null ; public ArchHostSystem getHostSystem(){ return this.mSystem; } @Override public void init() throws ServletException { this.mSystem = ArchHostSystem.G_SystemServlet.getHostSystem(); } @Override protected void doGet( HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException { this.mSystem.handleByDispatcher( RouterType.PathRouter ).handleGet( new Connectiom( request, response, this ) ); } @Override protected void doPost( HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException { this.mSystem.handleByDispatcher( RouterType.PathRouter ).handlePost( new Connectiom( request, response, this ) ); } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/SystemServlet.java ================================================ package com.pinecone.summer; import com.pinecone.summer.prototype.Servletson; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; /** * Pinecone System Servlet * System Primary Servlet for router(xxx:/host/), for map and dispatch all query-string based router. */ public class SystemServlet extends HttpServlet implements Servletson { private ArchHostSystem mSystem = null ; private String mszClassPath ; private String mszArchSystemClassName ; public String getClassPath() { return this.mszClassPath; } public String getServletMatrixConfig() { return this.mszArchSystemClassName; } public ArchHostSystem getHostSystem(){ return this.mSystem; } @Override public void init() throws ServletException { this.mszClassPath = ArchHostSystem.class.getProtectionDomain().getCodeSource().getLocation().getPath(); this.mszArchSystemClassName = this.getServletContext().getInitParameter("HostSystem"); this.mSystem = SystemSpawner.spawnSystem( this.mszArchSystemClassName, this ); this.mSystem.init(); ArchHostSystem.G_SystemServlet = this; } @Override protected void doGet( HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException { this.mSystem.handleByDispatcher().handleGet( new Connectiom( request, response, this ) ); } @Override protected void doPost( HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException { this.mSystem.handleByDispatcher().handlePost( new Connectiom( request, response, this ) ); } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/SystemSpawner.java ================================================ package com.pinecone.summer; import com.pinecone.framework.system.prototype.Factory; import com.pinecone.framework.system.prototype.Pinenut; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; public class SystemSpawner implements Pinenut { public static ArchHostSystem spawnSystem ( String szMatrixPrototypeName, SystemServlet servlet ){ ArchHostSystem hMatrix = null; try { Class pVoid = Class.forName( szMatrixPrototypeName ); try{ Constructor constructor = pVoid.getConstructor( SystemServlet.class ); hMatrix = (ArchHostSystem) constructor.newInstance( servlet ); } catch (NoSuchMethodException | InvocationTargetException e1){ System.err.println( "Critical Error: Servlet Class is error defined." ); e1.printStackTrace(); } } catch ( ClassNotFoundException | IllegalAccessException | InstantiationException e ){ System.err.println( "Critical Error: Checking 'web.xml' [ServletSystem] to find what was happened. " ); e.printStackTrace(); } return hMatrix; } public static ArchConnectDispatcher spawnDispatcher (String szDispatcherPrototypeName, ArchHostSystem system, RouterType routerType ){ ArchConnectDispatcher archControlDispatcher = null; try { Class pVoid = Class.forName( szDispatcherPrototypeName ); try{ Constructor constructor = pVoid.getConstructor( ArchHostSystem.class, RouterType.class ); archControlDispatcher = (ArchConnectDispatcher) constructor.newInstance( system, routerType ); } catch (NoSuchMethodException | InvocationTargetException e1){ System.err.println( "Critical Error: system Dispatcher is error defined." ); e1.printStackTrace(); } } catch ( ClassNotFoundException | IllegalAccessException | InstantiationException e ){ System.err.println( "Critical Error: Checking 'web.xml' [SystemDispatcher] to find what was happened. " ); e.printStackTrace(); } return archControlDispatcher; } public static ArchWizardSummoner spawnWizardSummoner (String szSummonerPrototypeName, ArchConnection connection ) { ArchWizardSummoner summoner = null; try { Class pVoid = Class.forName( szSummonerPrototypeName ); try{ Constructor constructor = pVoid.getConstructor( ArchConnection.class ); summoner = (ArchWizardSummoner) constructor.newInstance( connection ); } catch (NoSuchMethodException | InvocationTargetException e1){ e1.printStackTrace(); } } catch (ClassNotFoundException | IllegalAccessException | InstantiationException e){ e.printStackTrace(); } return summoner; } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/TerminateSessionException.java ================================================ package com.pinecone.summer; public class TerminateSessionException extends RuntimeException { public TerminateSessionException() { } public TerminateSessionException(String message) { super(message); } public TerminateSessionException(String message, Throwable rootCause) { super(message, rootCause); } public TerminateSessionException(Throwable rootCause) { super(rootCause); } public Throwable getRootCause() { return this.getCause(); } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/WizardGeniesInvoker.java ================================================ package com.pinecone.summer; import com.pinecone.framework.system.prototype.Prototype; import com.pinecone.framework.util.json.JSONArray; import com.pinecone.framework.util.json.JSONArraytron; import com.pinecone.framework.util.json.JSONException; import com.pinecone.summer.prototype.GenieBottle; import com.pinecone.summer.prototype.JSONBasedControl; import com.pinecone.summer.prototype.Pagesion; import com.pinecone.summer.prototype.Wizard; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Arrays; import java.util.HashSet; public class WizardGeniesInvoker { private HashSet mNaughtyGeniesReel = null ; private HashSet mPrivateGeniesReel = null ; private Object mWizardProto = null ; private boolean mbSiftNaughtyGenies = false ; public WizardGeniesInvoker( Object wizardProto ){ this.mWizardProto = wizardProto; this.siftNaughtyGenies( null ); this.siftPrivateGenies(); } public WizardGeniesInvoker( Object wizardProto, boolean bSiftNaughtyGenies ){ this.mWizardProto = wizardProto; this.mbSiftNaughtyGenies = bSiftNaughtyGenies; this.siftNaughtyGenies(null ); this.siftPrivateGenies(); } public WizardGeniesInvoker( Object wizardProto, ArchHostSystem matrix ){ this.mWizardProto = wizardProto; this.mbSiftNaughtyGenies = matrix.getPublicWizardConfig().optBoolean("NaughtyGeniesSifted"); this.siftNaughtyGenies( matrix ); this.siftPrivateGenies(); } private void siftFromJSONArray( JSONArray jsonArray ){ if( jsonArray != null ){ for (int i = 0; i < jsonArray.length(); i++) { this.mNaughtyGeniesReel.add( jsonArray.optString( i ) ); } } } private void siftNaughtyGenies( ArchHostSystem matrix ){ if( this.mbSiftNaughtyGenies ){ this.prospectReel(); Prototype.getDeclaredMethodsNameSet( this.mNaughtyGeniesReel, GenieBottle.class ); Prototype.getDeclaredMethodsNameSet( this.mNaughtyGeniesReel, Pagesion.class ); Prototype.getDeclaredMethodsNameSet( this.mNaughtyGeniesReel, JSONBasedControl.class ); if( matrix != null ){ JSONArray otherNaughtyGenies = matrix.getPublicWizardConfig().optJSONArray("OtherNaughtyGenies"); this.siftFromJSONArray( otherNaughtyGenies ); } if( this.mWizardProto instanceof Wizard){ try{ JSONArray myNaughtyGenies = ( (Wizard)(this.mWizardProto) ).getMyNaughtyGenies(); this.siftFromJSONArray( myNaughtyGenies ); } catch (JSONException e){ e.printStackTrace(); } } } } private void siftPrivateGenies(){ if( this.mbSiftNaughtyGenies ){ if( this.mPrivateGeniesReel == null ){ this.mPrivateGeniesReel = new HashSet<>(); } Prototype.getDeclaredMethodsNameSet( this.mPrivateGeniesReel, this.mWizardProto.getClass() ); } } private void prospectReel(){ if( this.mNaughtyGeniesReel == null ){ this.mNaughtyGeniesReel = new HashSet<>(); } } public HashSet getNaughtyGeniesReel(){ return this.mNaughtyGeniesReel; } public HashSet getPrivateGeniesReel(){ return this.mPrivateGeniesReel; } public boolean willSiftNaughtyGenies(){ return this.mbSiftNaughtyGenies; } public Object invokeNormalGenieByCallHisName(String szGenieName) throws NaughtyGenieInvokedException { if( this.mNaughtyGeniesReel != null && this.mNaughtyGeniesReel.contains( szGenieName ) ){ throw new NaughtyGenieInvokedException( "Naughty genie has been invoked." ); } if( this.mPrivateGeniesReel != null && !this.mPrivateGeniesReel.contains( szGenieName ) ){ throw new NaughtyGenieInvokedException( "Naughty genie has been invoked." ); } try{ return Prototype.invokeNoParameterMethod( this.mWizardProto, szGenieName ); } catch ( NoSuchMethodException e1 ){ throw new NaughtyGenieInvokedException( "Ghost genie has been invoked.", NaughtyGenieInvokedException.NaughtyGenieType.N_GHOST ); } catch ( InvocationTargetException e2 ){ if( e2.getCause() instanceof TerminateSessionException ){ throw (TerminateSessionException) e2.getCause(); } throw new NaughtyGenieInvokedException( "Heterogeneous genie has been invoked.", NaughtyGenieInvokedException.NaughtyGenieType.N_HETEROGENEOUS, e2 ); } catch ( IllegalAccessException e3 ){ throw new NaughtyGenieInvokedException( "Illegal genie has been invoked.", NaughtyGenieInvokedException.NaughtyGenieType.N_ILLEGAL,e3 ); } } public void removeNaughtyGenie( String szGenieName ){ if( this.mNaughtyGeniesReel != null ){ this.mNaughtyGeniesReel.remove( szGenieName ); } } public void removeNaughtyGenie( String[] genieNames ){ if( this.mNaughtyGeniesReel != null ){ this.mNaughtyGeniesReel.removeAll( Arrays.asList(genieNames) ); } } public void removeNaughtyGenie( JSONArray genieNames ){ if( this.mNaughtyGeniesReel != null ){ for (int i = 0; i < genieNames.length(); i++) { this.mNaughtyGeniesReel.remove( genieNames.optString( i ) ); } } } public void removeNaughtyGenie( Method[] genies ){ if( this.mNaughtyGeniesReel != null ) { for (Method row : genies) { this.mNaughtyGeniesReel.remove(row.getName()); } } } public void removeNaughtyGenie( Class wizard ){ if( this.mNaughtyGeniesReel != null ) { for (Method row : wizard.getDeclaredMethods()) { this.mNaughtyGeniesReel.remove(row.getName()); } } } public void removeNaughtyGenie( Object wizard ){ this.prospectReel(); for (Method row : wizard.getClass().getDeclaredMethods()) { this.mNaughtyGeniesReel.remove(row.getName()); } } public void removeNaughtyGeniesByJSON( String szGeniesJSON ){ this.removeNaughtyGenie( new JSONArraytron( szGeniesJSON ) ); } public void addNaughtyGenie( String szGenieName ){ this.prospectReel(); this.mNaughtyGeniesReel.add( szGenieName ); } public void addNaughtyGenie( String[] genieNames ){ this.prospectReel(); this.mNaughtyGeniesReel.addAll( Arrays.asList(genieNames) ); } public void addNaughtyGenie( JSONArray genieNames ){ this.prospectReel(); if( genieNames != null ){ for (int i = 0; i < genieNames.length(); i++) { this.mNaughtyGeniesReel.add( genieNames.optString( i ) ); } } } public void addNaughtyGenie( Method[] genies ){ this.prospectReel(); for ( Method row : genies ) { this.mNaughtyGeniesReel.add( row.getName() ); } } public void addNaughtyGenie( Class wizard ){ this.prospectReel(); Prototype.getDeclaredMethodsNameSet( this.mNaughtyGeniesReel, wizard ); } public void addNaughtyGenie( Object wizard ){ this.prospectReel(); Prototype.getDeclaredMethodsNameSet( this.mNaughtyGeniesReel, wizard ); } public void addNaughtyGeniesByJSON( String szGeniesJSON ){ this.addNaughtyGenie( new JSONArraytron( szGeniesJSON ) ); } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/context/ServletContextAware.java ================================================ package com.pinecone.summer.context; import javax.servlet.ServletContext; public interface ServletContextAware { void setServletContext(ServletContext var1); } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/http/CommonHttpEntityParser.java ================================================ package com.pinecone.summer.http; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.json.JSONArray; import com.pinecone.framework.util.json.JSONArraytron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.JSONMaptron; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import java.io.UnsupportedEncodingException; import java.util.Arrays; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.Map; /** * Pinecone For Java HttpEntityParser * Copyright © Bean Nuts Foundation All rights reserved. [Mr.A.R.B / WJH] * Tip: * ***************************************************************************************** * Java Servlet is incredible stupid !!! * What fuck did author think of merge $_GET and $_POST to ParameterMap ??? * And there is no separation method to separate it. * So many scenarios, we need to separate those. * Oh my goodness !!! Jesus fucking christ !!! * ***************************************************************************************** */ public class CommonHttpEntityParser implements HttpEntityParser { protected String mszCharset = "UTF-8"; public CommonHttpEntityParser( String szCharset ){ if( szCharset != null ){ this.mszCharset = szCharset; } } protected String getCharset(){ return this.mszCharset; } private Object realValue( Object value, boolean bSafe ){ if( value instanceof String ){ String szValue = (String) value; return bSafe ? this.valueSafety( szValue ) : szValue; } return value; } private LinkedHashSet parameterMapStrings2Set( String[] strings ) { if( strings != null ){ return new LinkedHashSet<>( Arrays.asList(strings) ); } return null; } private Object linkedHashSet2JSONValue( LinkedHashSet set, boolean bSafe ) { JSONArray jsonArray = new JSONArraytron(); if( set != null ){ int nSetSize = set.size(); for ( Object row : set ) { row = this.realValue( row, bSafe ); if( nSetSize != 1 ){ jsonArray.put( row ); } else { return row; } } } return jsonArray; } private Object parameterMapValue2JSONValue( Object parameterMapValue, boolean bSafe ){ Object jsonValue; if( null == parameterMapValue ){ jsonValue = null; } else if ( parameterMapValue instanceof String[] ) { String[] strings = (String[]) parameterMapValue; jsonValue = new JSONArraytron(); if( strings.length == 1 ){ jsonValue = this.realValue( strings[0], bSafe ); } else { for ( String str : strings ) { ((JSONArray) jsonValue).put( this.realValue( str, bSafe ) ); } } } else { jsonValue = this.realValue( parameterMapValue, bSafe ); } return jsonValue; } /** And java have no pointer, fuck! **/ public Object valueSafety( Object value ) { if( value instanceof String ){ return StringUtils.addSlashes( (String) value ); } return value; } public JSONObject parseQueryString ( String szQueryString, boolean bSafe ){ int nParseAt = 0; JSONObject hObject = new JSONMaptron(); if( szQueryString != null && !szQueryString.isEmpty() ){ while ( nParseAt < szQueryString.length() ) { StringBuilder hKeyBuf = new StringBuilder(); StringBuilder hValueBuf = new StringBuilder(); while ( nParseAt < szQueryString.length() ) { if( szQueryString.charAt(nParseAt) == '&' ){ break; } if( szQueryString.charAt(nParseAt) == '=' ){ nParseAt++; break; } hKeyBuf.append( szQueryString.charAt(nParseAt) ); nParseAt++; } while ( nParseAt < szQueryString.length() ) { if( szQueryString.charAt(nParseAt) == '&' ){ nParseAt++; break; } hValueBuf.append( szQueryString.charAt(nParseAt) ); nParseAt++; } try { String szDecodedValueBuf = HttpURLParser.decode( hValueBuf.toString(), this.getCharset() ); hValueBuf = new StringBuilder( bSafe ? (String) this.valueSafety(szDecodedValueBuf) : szDecodedValueBuf ) ; } catch ( UnsupportedEncodingException e ){ e.printStackTrace(); } if( hKeyBuf.length() != 0 ){ try { hKeyBuf = new StringBuilder( HttpURLParser.decode(hKeyBuf.toString(), this.getCharset()) ); } catch ( UnsupportedEncodingException e ){ e.printStackTrace(); } if( hKeyBuf.length() >= 2 && hKeyBuf.charAt( hKeyBuf.length() - 2 ) == '[' && hKeyBuf.charAt( hKeyBuf.length() - 1 ) == ']' ){ /** Notice: Java Servlet Key of Array Value is different with PHP. { Such as 'key[]' would not trim to 'key' }**/ /*hKeyBuf.replace( hKeyBuf.length() - 1,hKeyBuf.length(), "" ); hKeyBuf.replace( hKeyBuf.length() - 1,hKeyBuf.length(), "" );*/ String szKeyBuf = hKeyBuf.toString(); Object rRow = hObject.opt( szKeyBuf ); if( !(rRow instanceof JSONArray) ){ hObject.remove( szKeyBuf ); hObject.put( szKeyBuf, new JSONArraytron() ); } hObject.optJSONArray( szKeyBuf ).put( hValueBuf.toString() ); } else{ hObject.put( hKeyBuf.toString(), hValueBuf.toString() ); } } } } return hObject; } public JSONObject parseFormData ( HttpServletRequest request, boolean bSafe ){ return this.siftPostFromParameterMap( request, bSafe ); } public JSONObject requestMapJsonify ( HttpServletRequest request, boolean bSafe ) { Map properties = request.getParameterMap(); JSONObject jsonObject = new JSONMaptron(); Iterator entries = properties.entrySet().iterator(); Map.Entry entry; while ( entries.hasNext() ) { entry = (Map.Entry) entries.next(); String szKey = (String) entry.getKey(); jsonObject.put( szKey, this.parameterMapValue2JSONValue( entry.getValue(), bSafe ) ); } return jsonObject; } public JSONObject siftPostFromParameterMap( HttpServletRequest request, boolean bSafe ){ JSONObject queryMap = this.parseQueryString( request.getQueryString(), false ); JSONObject postMap = new JSONMaptron(); Map unionMap = request.getParameterMap(); Iterator entries = unionMap.entrySet().iterator(); Map.Entry entry; while ( entries.hasNext() ) { entry = (Map.Entry) entries.next(); String szKey = (String) entry.getKey(); Object parameterMapValue = entry.getValue(); Object queryMapValue = queryMap.opt( szKey ); if( queryMapValue != null ){ if( parameterMapValue instanceof String[] ){ LinkedHashSet hashSet = this.parameterMapStrings2Set( (String[])parameterMapValue ); if( queryMapValue instanceof String ){ hashSet.remove( (String) queryMapValue ); } else if( queryMapValue instanceof JSONArray ){ for( Object row : ( (JSONArray)queryMapValue ).getArray() ){ hashSet.remove( (String) row ); } } if( !hashSet.isEmpty() ){ postMap.put( szKey, this.linkedHashSet2JSONValue( hashSet, bSafe ) ); } } else if( parameterMapValue instanceof String ){ boolean bQualified = true; if( queryMapValue instanceof String && queryMap == parameterMapValue ){ bQualified = false; } else if( queryMapValue instanceof JSONArray ){ for( Object row : ( (JSONArray)queryMapValue ).getArray() ){ if( row == parameterMapValue ){ bQualified = false; break; } } } if( bQualified ){ postMap.put( szKey, this.realValue( queryMapValue, bSafe ) ); } } } else { postMap.put( szKey, this.parameterMapValue2JSONValue( parameterMapValue, bSafe ) ); } } return postMap; } public Map cookiesMapify ( Map map, HttpServletRequest request ) { Cookie[] cookies = request.getCookies(); map.clear(); if( cookies != null ) { for ( Cookie cookie : cookies ) { map.put( cookie.getName(), cookie ); } } return map; } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/http/HttpEntityParser.java ================================================ package com.pinecone.summer.http; import com.pinecone.framework.util.json.JSONObject; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import java.util.Map; public interface HttpEntityParser { Object valueSafety( Object value ); JSONObject parseQueryString (String szQueryString, boolean bSafe ); JSONObject parseFormData (HttpServletRequest request, boolean bSafe ); JSONObject requestMapJsonify ( HttpServletRequest request, boolean bSafe ); JSONObject siftPostFromParameterMap( HttpServletRequest request, boolean bSafe ); Map cookiesMapify ( Map map, HttpServletRequest request ); } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/http/HttpHeaders.java ================================================ package com.pinecone.summer.http; import com.pinecone.framework.unit.AbstractMultiValueMap; import com.pinecone.framework.util.Assert; import com.pinecone.framework.unit.LinkedCaseInsensitiveMap; import com.pinecone.framework.unit.MultiValueMap; import com.pinecone.framework.util.StringUtils; import java.io.Serializable; import java.net.URI; import java.nio.charset.Charset; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.EnumSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.TimeZone; public class HttpHeaders extends AbstractMultiValueMap implements MultiValueMap, Serializable { private static final long serialVersionUID = -8578554704772377436L; public static final String ACCEPT = "Accept"; public static final String ACCEPT_CHARSET = "Accept-Charset"; public static final String ACCEPT_ENCODING = "Accept-Encoding"; public static final String ACCEPT_LANGUAGE = "Accept-Language"; public static final String ACCEPT_RANGES = "Accept-Ranges"; public static final String AGE = "Age"; public static final String ALLOW = "Allow"; public static final String AUTHORIZATION = "Authorization"; public static final String CACHE_CONTROL = "Cache-Control"; public static final String CONNECTION = "Connection"; public static final String CONTENT_ENCODING = "Content-Encoding"; public static final String CONTENT_DISPOSITION = "Content-Disposition"; public static final String CONTENT_LANGUAGE = "Content-Language"; public static final String CONTENT_LENGTH = "Content-Length"; public static final String CONTENT_LOCATION = "Content-Location"; public static final String CONTENT_RANGE = "Content-Range"; public static final String CONTENT_TYPE = "Content-Type"; public static final String COOKIE = "Cookie"; public static final String DATE = "Date"; public static final String ETAG = "ETag"; public static final String EXPECT = "Expect"; public static final String EXPIRES = "Expires"; public static final String FROM = "From"; public static final String HOST = "Host"; public static final String IF_MATCH = "If-Match"; public static final String IF_MODIFIED_SINCE = "If-Modified-Since"; public static final String IF_NONE_MATCH = "If-None-Match"; public static final String IF_RANGE = "If-Range"; public static final String IF_UNMODIFIED_SINCE = "If-Unmodified-Since"; public static final String LAST_MODIFIED = "Last-Modified"; public static final String LINK = "Link"; public static final String LOCATION = "Location"; public static final String MAX_FORWARDS = "Max-Forwards"; public static final String ORIGIN = "Origin"; public static final String PRAGMA = "Pragma"; public static final String PROXY_AUTHENTICATE = "Proxy-Authenticate"; public static final String PROXY_AUTHORIZATION = "Proxy-Authorization"; public static final String RANGE = "Range"; public static final String REFERER = "Referer"; public static final String RETRY_AFTER = "Retry-After"; public static final String SERVER = "Server"; public static final String SET_COOKIE = "Set-Cookie"; public static final String SET_COOKIE2 = "Set-Cookie2"; public static final String TE = "TE"; public static final String TRAILER = "Trailer"; public static final String TRANSFER_ENCODING = "Transfer-Encoding"; public static final String UPGRADE = "Upgrade"; public static final String USER_AGENT = "User-Agent"; public static final String VARY = "Vary"; public static final String VIA = "Via"; public static final String WARNING = "Warning"; public static final String WWW_AUTHENTICATE = "WWW-Authenticate"; private static final String[] DATE_FORMATS = new String[]{"EEE, dd MMM yyyy HH:mm:ss zzz", "EEE, dd-MMM-yy HH:mm:ss zzz", "EEE MMM dd HH:mm:ss yyyy"}; private static TimeZone GMT = TimeZone.getTimeZone("GMT"); private final Map> headers; public HttpHeaders() { this(new LinkedCaseInsensitiveMap(8, Locale.ENGLISH), false); } private HttpHeaders(Map> headers, boolean readOnly) { Assert.notNull(headers, "'headers' must not be null"); if (readOnly) { Map> map = new LinkedCaseInsensitiveMap(headers.size(), Locale.ENGLISH); Iterator var4 = headers.entrySet().iterator(); while(var4.hasNext()) { Map.Entry> entry = (Map.Entry)var4.next(); List values = Collections.unmodifiableList((List)entry.getValue()); map.put(entry.getKey(), values); } this.headers = Collections.unmodifiableMap(map); } else { this.headers = headers; } } public void setAccept(List acceptableMediaTypes) { this.set("Accept", MediaType.toString(acceptableMediaTypes)); } public List getAccept() { String value = this.getFirst("Accept"); List result = value != null ? MediaType.parseMediaTypes(value) : Collections.emptyList(); if (result.size() == 1) { List acceptHeader = this.get("Accept"); if (acceptHeader.size() > 1) { value = StringUtils.collectionToCommaDelimitedString(acceptHeader); result = MediaType.parseMediaTypes(value); } } return result; } public void setAcceptCharset(List acceptableCharsets) { StringBuilder builder = new StringBuilder(); Iterator iterator = acceptableCharsets.iterator(); while(iterator.hasNext()) { Charset charset = (Charset)iterator.next(); builder.append(charset.name().toLowerCase(Locale.ENGLISH)); if (iterator.hasNext()) { builder.append(", "); } } this.set("Accept-Charset", builder.toString()); } public List getAcceptCharset() { List result = new ArrayList(); String value = this.getFirst("Accept-Charset"); if (value != null) { String[] tokens = value.split(",\\s*"); String[] var4 = tokens; int var5 = tokens.length; for(int var6 = 0; var6 < var5; ++var6) { String token = var4[var6]; int paramIdx = token.indexOf(59); String charsetName; if (paramIdx == -1) { charsetName = token; } else { charsetName = token.substring(0, paramIdx); } if (!charsetName.equals("*")) { result.add(Charset.forName(charsetName)); } } } return result; } public void setAllow(Set allowedMethods) { this.set("Allow", StringUtils.collectionToCommaDelimitedString(allowedMethods)); } public Set getAllow() { String value = this.getFirst("Allow"); if (StringUtils.isEmpty(value)) { return EnumSet.noneOf(HttpMethod.class); } else { List allowedMethod = new ArrayList(5); String[] tokens = value.split(",\\s*"); String[] var4 = tokens; int var5 = tokens.length; for(int var6 = 0; var6 < var5; ++var6) { String token = var4[var6]; allowedMethod.add(HttpMethod.valueOf(token)); } return EnumSet.copyOf(allowedMethod); } } public void setCacheControl(String cacheControl) { this.set("Cache-Control", cacheControl); } public String getCacheControl() { return this.getFirst("Cache-Control"); } public void setConnection(String connection) { this.set("Connection", connection); } public void setConnection(List connection) { this.set("Connection", this.toCommaDelimitedString(connection)); } public List getConnection() { return this.getFirstValueAsList("Connection"); } public void setContentDispositionFormData(String name, String filename) { Assert.notNull(name, "'name' must not be null"); StringBuilder builder = new StringBuilder("form-data; name=\""); builder.append(name).append('"'); if (filename != null) { builder.append("; filename=\""); builder.append(filename).append('"'); } this.set("Content-Disposition", builder.toString()); } public void setContentLength(long contentLength) { this.set("Content-Length", Long.toString(contentLength)); } public long getContentLength() { String value = this.getFirst("Content-Length"); return value != null ? Long.parseLong(value) : -1L; } public void setContentType(MediaType mediaType) { Assert.isTrue(!mediaType.isWildcardType(), "'Content-Type' cannot contain wildcard type '*'"); Assert.isTrue(!mediaType.isWildcardSubtype(), "'Content-Type' cannot contain wildcard subtype '*'"); this.set("Content-Type", mediaType.toString()); } public MediaType getContentType() { String value = this.getFirst("Content-Type"); return StringUtils.hasLength(value) ? MediaType.parseMediaType(value) : null; } public void setDate(long date) { this.setDate("Date", date); } public long getDate() { return this.getFirstDate("Date"); } public void setETag(String eTag) { if (eTag != null) { Assert.isTrue(eTag.startsWith("\"") || eTag.startsWith("W/"), "Invalid eTag, does not start with W/ or \""); Assert.isTrue(eTag.endsWith("\""), "Invalid eTag, does not end with \""); } this.set("ETag", eTag); } public String getETag() { return this.getFirst("ETag"); } public void setExpires(long expires) { this.setDate("Expires", expires); } public long getExpires() { try { return this.getFirstDate("Expires"); } catch (IllegalArgumentException var2) { return -1L; } } public void setIfModifiedSince(long ifModifiedSince) { this.setDate("If-Modified-Since", ifModifiedSince); } /** @deprecated */ @Deprecated public long getIfNotModifiedSince() { return this.getIfModifiedSince(); } public long getIfModifiedSince() { return this.getFirstDate("If-Modified-Since"); } public void setIfNoneMatch(String ifNoneMatch) { this.set("If-None-Match", ifNoneMatch); } public void setIfNoneMatch(List ifNoneMatchList) { this.set("If-None-Match", this.toCommaDelimitedString(ifNoneMatchList)); } protected String toCommaDelimitedString(List list) { StringBuilder builder = new StringBuilder(); Iterator iterator = list.iterator(); while(iterator.hasNext()) { String ifNoneMatch = (String)iterator.next(); builder.append(ifNoneMatch); if (iterator.hasNext()) { builder.append(", "); } } return builder.toString(); } public List getIfNoneMatch() { return this.getFirstValueAsList("If-None-Match"); } protected List getFirstValueAsList(String header) { List result = new ArrayList(); String value = this.getFirst(header); if (value != null) { String[] tokens = value.split(",\\s*"); String[] var5 = tokens; int var6 = tokens.length; for(int var7 = 0; var7 < var6; ++var7) { String token = var5[var7]; result.add(token); } } return result; } public void setLastModified(long lastModified) { this.setDate("Last-Modified", lastModified); } public long getLastModified() { return this.getFirstDate("Last-Modified"); } public void setLocation(URI location) { this.set("Location", location.toASCIIString()); } public URI getLocation() { String value = this.getFirst("Location"); return value != null ? URI.create(value) : null; } public void setOrigin(String origin) { this.set("Origin", origin); } public String getOrigin() { return this.getFirst("Origin"); } public void setPragma(String pragma) { this.set("Pragma", pragma); } public String getPragma() { return this.getFirst("Pragma"); } public void setUpgrade(String upgrade) { this.set("Upgrade", upgrade); } public String getUpgrade() { return this.getFirst("Upgrade"); } public long getFirstDate(String headerName) { String headerValue = this.getFirst(headerName); if (headerValue == null) { return -1L; } else { String[] var3 = DATE_FORMATS; int var4 = var3.length; int var5 = 0; while(var5 < var4) { String dateFormat = var3[var5]; SimpleDateFormat simpleDateFormat = new SimpleDateFormat(dateFormat, Locale.US); simpleDateFormat.setTimeZone(GMT); try { return simpleDateFormat.parse(headerValue).getTime(); } catch (ParseException var9) { ++var5; } } throw new IllegalArgumentException("Cannot parse date value \"" + headerValue + "\" for \"" + headerName + "\" header"); } } public void setDate(String headerName, long date) { SimpleDateFormat dateFormat = new SimpleDateFormat(DATE_FORMATS[0], Locale.US); dateFormat.setTimeZone(GMT); this.set(headerName, dateFormat.format(new Date(date))); } public String getFirst(String headerName) { List headerValues = (List)this.headers.get(headerName); return headerValues != null ? (String)headerValues.get(0) : null; } public String add(String headerName, String headerValue) { List headerValues = (List)this.headers.get(headerName); if (headerValues == null) { headerValues = new LinkedList(); this.headers.put(headerName, headerValues); } ((List)headerValues).add(headerValue); return headerValue; } public String set( String headerName, String headerValue ) { List headerValues = new LinkedList(); headerValues.add(headerValue); this.headers.put(headerName, headerValues); return headerValue; } public void setAll(Map values) { Iterator var2 = values.entrySet().iterator(); while(var2.hasNext()) { Map.Entry entry = (Map.Entry)var2.next(); this.set((String)entry.getKey(), (String)entry.getValue()); } } public Map toSingleValueMap() { LinkedHashMap singleValueMap = new LinkedHashMap(this.headers.size()); Iterator var2 = this.headers.entrySet().iterator(); while(var2.hasNext()) { Map.Entry> entry = (Map.Entry)var2.next(); singleValueMap.put(entry.getKey(), (String) ((List)entry.getValue()).get(0)); } return singleValueMap; } public int size() { return this.headers.size(); } public boolean isEmpty() { return this.headers.isEmpty(); } public boolean containsKey(Object key) { return this.headers.containsKey(key); } public boolean containsValue(Object value) { return this.headers.containsValue(value); } public List get(Object key) { return (List)this.headers.get(key); } public List put(String key, List value) { return (List)this.headers.put(key, value); } public List remove(Object key) { return (List)this.headers.remove(key); } public void putAll(Map> map) { this.headers.putAll(map); } public void clear() { this.headers.clear(); } public Set keySet() { return this.headers.keySet(); } public Collection> values() { return this.headers.values(); } public Set>> entrySet() { return this.headers.entrySet(); } public boolean equals(Object other) { if (this == other) { return true; } else if (!(other instanceof HttpHeaders)) { return false; } else { HttpHeaders otherHeaders = (HttpHeaders)other; return this.headers.equals(otherHeaders.headers); } } public int hashCode() { return this.headers.hashCode(); } public String toString() { return this.headers.toString(); } public static HttpHeaders readOnlyHttpHeaders(HttpHeaders headers) { return new HttpHeaders(headers, true); } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/http/HttpMethod.java ================================================ package com.pinecone.summer.http; public enum HttpMethod { GET, POST, HEAD, OPTIONS, PUT, PATCH, DELETE, TRACE; private HttpMethod() { } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/http/HttpURLParser.java ================================================ package com.pinecone.summer.http; import java.io.CharArrayWriter; import java.io.UnsupportedEncodingException; import java.nio.charset.Charset; import java.nio.charset.IllegalCharsetNameException; import java.nio.charset.UnsupportedCharsetException; import java.util.BitSet; public class HttpURLParser { static BitSet dontNeedEncoding; static final int caseDiff = ('a' - 'A'); static String dfltEncName = null; static { /* The list of characters that are not encoded has been * determined as follows: * * RFC 2396 states: * ----- * Data characters that are allowed in a URI but do not have a * reserved purpose are called unreserved. These include upper * and lower case letters, decimal digits, and a limited set of * punctuation marks and symbols. * * unreserved = alphanum | mark * * mark = "-" | "_" | "." | "!" | "~" | "*" | "'" | "(" | ")" * * Unreserved characters can be escaped without changing the * semantics of the URI, but this should not be done unless the * URI is being used in a context that does not allow the * unescaped character to appear. * ----- * * It appears that both Netscape and Internet Explorer escape * all special characters from this list with the exception * of "-", "_", ".", "*". While it is not clear why they are * escaping the other characters, perhaps it is safest to * assume that there might be contexts in which the others * are unsafe if not escaped. Therefore, we will use the same * list. It is also noteworthy that this is consistent with * O'Reilly's "HTML: The Definitive Guide" (page 164). * * As a last note, Intenet Explorer does not encode the "@" * character which is clearly not unreserved according to the * RFC. We are being consistent with the RFC in this matter, * as is Netscape. * */ dontNeedEncoding = new BitSet(256); int i; for (i = 'a'; i <= 'z'; i++) { dontNeedEncoding.set(i); } for (i = 'A'; i <= 'Z'; i++) { dontNeedEncoding.set(i); } for (i = '0'; i <= '9'; i++) { dontNeedEncoding.set(i); } dontNeedEncoding.set(' '); /* encoding a space to a + is done * in the encode() method */ dontNeedEncoding.set('-'); dontNeedEncoding.set('_'); dontNeedEncoding.set('.'); dontNeedEncoding.set('*'); dfltEncName = (String) System.getProperties().get("file.encoding"); } public static String decode(String s) { String str = null; try { str = decode(s, dfltEncName); } catch (UnsupportedEncodingException e) { // The system should always have the platform default } return str; } public static String decode(String s, String enc) throws UnsupportedEncodingException { boolean needToChange = false; int numChars = s.length(); StringBuffer sb = new StringBuffer(numChars > 500 ? numChars / 2 : numChars); int i = 0; if ( enc.length() == 0 ) { throw new UnsupportedEncodingException ("URLDecoder: empty string enc parameter"); } char c; byte[] bytes = null; while (i < numChars) { c = s.charAt(i); switch (c) { case '+':{ sb.append(' '); i++; needToChange = true; break; } case '%':{ if (bytes == null) { bytes = new byte[(numChars-i)/3]; } int pos = 0; boolean bUsingRaw = false; while ( (( i + 2 ) < numChars) && (c=='%') ) { int v = 0; try { v = Integer.parseInt(s.substring( i + 1,i + 3),16); } catch ( NumberFormatException e ) { //throw new IllegalArgumentException("URLDecoder: Illegal hex characters in escape (%) pattern - " + e.getMessage()); /** Fuck Java **/ sb.append( c ); sb.append( s, i + 1, i + 3 ); i+=3; c = s.charAt(i); bUsingRaw = true; continue; } if ( v < 0 ) { throw new IllegalArgumentException("URLDecoder: Illegal hex characters in escape (%) pattern - negative value"); } bytes[pos++] = (byte) v; i+= 3; if ( i < numChars ) { c = s.charAt(i); } } if ( (i < numChars) && (c=='%') ) { //throw new IllegalArgumentException("URLDecoder: Incomplete trailing escape (%) pattern"); /** Fuck Java **/ sb.append(c); i++; continue; } if( !bUsingRaw ){ sb.append(new String(bytes, 0, pos, enc)); } needToChange = true; break; } default:{ sb.append(c); i++; break; } } } return (needToChange? sb.toString() : s); } public static String encode(String s) { String str = null; try { str = encode(s, dfltEncName); } catch (UnsupportedEncodingException e) { // The system should always have the platform default } return str; } public static String encode(String s, String enc) throws UnsupportedEncodingException { boolean needToChange = false; StringBuffer out = new StringBuffer(s.length()); Charset charset; CharArrayWriter charArrayWriter = new CharArrayWriter(); if (enc == null) throw new NullPointerException("charsetName"); try { charset = Charset.forName(enc); } catch (IllegalCharsetNameException e) { throw new UnsupportedEncodingException(enc); } catch (UnsupportedCharsetException e) { throw new UnsupportedEncodingException(enc); } for (int i = 0; i < s.length();) { int c = (int) s.charAt(i); //System.out.println("Examining character: " + c); if (dontNeedEncoding.get(c)) { if (c == ' ') { c = '+'; needToChange = true; } //System.out.println("Storing: " + c); out.append((char)c); i++; } else { // convert to external encoding before hex conversion do { charArrayWriter.write(c); /* * If this character represents the start of a Unicode * surrogate pair, then pass in two characters. It's not * clear what should be done if a bytes reserved in the * surrogate pairs range occurs outside of a legal * surrogate pair. For now, just treat it as if it were * any other character. */ if (c >= 0xD800 && c <= 0xDBFF) { /* System.out.println(Integer.toHexString(c) + " is high surrogate"); */ if ( (i+1) < s.length()) { int d = (int) s.charAt(i+1); /* System.out.println("\tExamining " + Integer.toHexString(d)); */ if (d >= 0xDC00 && d <= 0xDFFF) { /* System.out.println("\t" + Integer.toHexString(d) + " is low surrogate"); */ charArrayWriter.write(d); i++; } } } i++; } while (i < s.length() && !dontNeedEncoding.get((c = (int) s.charAt(i)))); charArrayWriter.flush(); String str = new String(charArrayWriter.toCharArray()); byte[] ba = str.getBytes(charset); for (int j = 0; j < ba.length; j++) { out.append('%'); char ch = Character.forDigit((ba[j] >> 4) & 0xF, 16); // converting to use uppercase letter as part of // the hex value if ch is a letter. if (Character.isLetter(ch)) { ch -= caseDiff; } out.append(ch); ch = Character.forDigit(ba[j] & 0xF, 16); if (Character.isLetter(ch)) { ch -= caseDiff; } out.append(ch); } charArrayWriter.reset(); needToChange = true; } } return (needToChange? out.toString() : s); } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/http/InvalidMediaTypeException.java ================================================ package com.pinecone.summer.http; import com.pinecone.summer.util.InvalidMimeTypeException; public class InvalidMediaTypeException extends IllegalArgumentException { private String mediaType; public InvalidMediaTypeException(String mediaType, String message) { super("Invalid media type \"" + mediaType + "\": " + message); this.mediaType = mediaType; } InvalidMediaTypeException(InvalidMimeTypeException ex) { super(ex.getMessage(), ex); this.mediaType = ex.getMimeType(); } public String getMediaType() { return this.mediaType; } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/http/MediaType.java ================================================ package com.pinecone.summer.http; import java.io.Serializable; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import com.pinecone.framework.util.Assert; import com.pinecone.framework.util.comparator.CompoundComparator; import com.pinecone.framework.util.StringUtils; import com.pinecone.summer.util.InvalidMimeTypeException; import com.pinecone.summer.util.MimeType; import com.pinecone.summer.util.MimeTypeUtils; public class MediaType extends MimeType implements Serializable { private static final long serialVersionUID = 2069937152339670231L; public static final MediaType ALL = valueOf("*/*"); public static final String ALL_VALUE = "*/*"; public static final MediaType APPLICATION_ATOM_XML = valueOf("application/atom+xml"); public static final String APPLICATION_ATOM_XML_VALUE = "application/atom+xml"; public static final MediaType APPLICATION_FORM_URLENCODED = valueOf("application/x-www-form-urlencoded"); public static final String APPLICATION_FORM_URLENCODED_VALUE = "application/x-www-form-urlencoded"; public static final MediaType APPLICATION_JSON = valueOf("application/json"); public static final String APPLICATION_JSON_VALUE = "application/json"; public static final MediaType APPLICATION_OCTET_STREAM = valueOf("application/octet-stream"); public static final String APPLICATION_OCTET_STREAM_VALUE = "application/octet-stream"; public static final MediaType APPLICATION_XHTML_XML = valueOf("application/xhtml+xml"); public static final String APPLICATION_XHTML_XML_VALUE = "application/xhtml+xml"; public static final MediaType APPLICATION_XML = valueOf("application/xml"); public static final String APPLICATION_XML_VALUE = "application/xml"; public static final MediaType IMAGE_GIF = valueOf("image/gif"); public static final String IMAGE_GIF_VALUE = "image/gif"; public static final MediaType IMAGE_JPEG = valueOf("image/jpeg"); public static final String IMAGE_JPEG_VALUE = "image/jpeg"; public static final MediaType IMAGE_PNG = valueOf("image/png"); public static final String IMAGE_PNG_VALUE = "image/png"; public static final MediaType MULTIPART_FORM_DATA = valueOf("multipart/form-data"); public static final String MULTIPART_FORM_DATA_VALUE = "multipart/form-data"; public static final MediaType TEXT_HTML = valueOf("text/html"); public static final String TEXT_HTML_VALUE = "text/html"; public static final MediaType TEXT_PLAIN = valueOf("text/plain"); public static final String TEXT_PLAIN_VALUE = "text/plain"; public static final MediaType TEXT_XML = valueOf("text/xml"); public static final String TEXT_XML_VALUE = "text/xml"; private static final String PARAM_QUALITY_FACTOR = "q"; public static final Comparator QUALITY_VALUE_COMPARATOR = new Comparator() { public int compare(MediaType mediaType1, MediaType mediaType2) { double quality1 = mediaType1.getQualityValue(); double quality2 = mediaType2.getQualityValue(); int qualityComparison = Double.compare(quality2, quality1); if (qualityComparison != 0) { return qualityComparison; } else if (mediaType1.isWildcardType() && !mediaType2.isWildcardType()) { return 1; } else if (mediaType2.isWildcardType() && !mediaType1.isWildcardType()) { return -1; } else if (!mediaType1.getType().equals(mediaType2.getType())) { return 0; } else if (mediaType1.isWildcardSubtype() && !mediaType2.isWildcardSubtype()) { return 1; } else if (mediaType2.isWildcardSubtype() && !mediaType1.isWildcardSubtype()) { return -1; } else if (!mediaType1.getSubtype().equals(mediaType2.getSubtype())) { return 0; } else { int paramsSize1 = mediaType1.getParameters().size(); int paramsSize2 = mediaType2.getParameters().size(); return paramsSize2 < paramsSize1 ? -1 : (paramsSize2 == paramsSize1 ? 0 : 1); } } }; public static final Comparator SPECIFICITY_COMPARATOR = new SpecificityComparator() { protected int compareParameters(MediaType mediaType1, MediaType mediaType2) { double quality1 = mediaType1.getQualityValue(); double quality2 = mediaType2.getQualityValue(); int qualityComparison = Double.compare(quality2, quality1); return qualityComparison != 0 ? qualityComparison : super.compareParameters(mediaType1, mediaType2); } }; public MediaType(String type) { super(type); } public MediaType(String type, String subtype) { super(type, subtype, Collections.emptyMap()); } public MediaType(String type, String subtype, Charset charset) { super(type, subtype, charset); } public MediaType(String type, String subtype, double qualityValue) { this(type, subtype, Collections.singletonMap("q", Double.toString(qualityValue))); } public MediaType(MediaType other, Map parameters) { super(other.getType(), other.getSubtype(), parameters); } public MediaType(String type, String subtype, Map parameters) { super(type, subtype, parameters); } protected void checkParameters(String attribute, String value) { super.checkParameters(attribute, value); if ("q".equals(attribute)) { value = this.unquote(value); double d = Double.parseDouble(value); Assert.isTrue(d >= 0.0D && d <= 1.0D, "Invalid quality value \"" + value + "\": should be between 0.0 and 1.0"); } } public double getQualityValue() { String qualityFactory = this.getParameter("q"); return qualityFactory != null ? Double.parseDouble(this.unquote(qualityFactory)) : 1.0D; } public boolean includes(MediaType other) { return super.includes(other); } public boolean isCompatibleWith(MediaType other) { return super.isCompatibleWith(other); } public MediaType copyQualityValue(MediaType mediaType) { if (!mediaType.getParameters().containsKey("q")) { return this; } else { Map params = new LinkedHashMap(this.getParameters()); params.put("q", mediaType.getParameters().get("q")); return new MediaType(this, params); } } public MediaType removeQualityValue() { if (!this.getParameters().containsKey("q")) { return this; } else { Map params = new LinkedHashMap(this.getParameters()); params.remove("q"); return new MediaType(this, params); } } public static MediaType valueOf(String value) { return parseMediaType(value); } public static MediaType parseMediaType(String mediaType) { MimeType type; try { type = MimeTypeUtils.parseMimeType(mediaType); } catch (InvalidMimeTypeException var4) { throw new InvalidMediaTypeException(var4); } try { return new MediaType(type.getType(), type.getSubtype(), type.getParameters()); } catch (IllegalArgumentException var3) { throw new InvalidMediaTypeException(mediaType, var3.getMessage()); } } public static List parseMediaTypes(String mediaTypes) { if (!StringUtils.hasLength(mediaTypes)) { return Collections.emptyList(); } else { String[] tokens = mediaTypes.split(",\\s*"); List result = new ArrayList(tokens.length); String[] var3 = tokens; int var4 = tokens.length; for(int var5 = 0; var5 < var4; ++var5) { String token = var3[var5]; result.add(parseMediaType(token)); } return result; } } public static String toString(Collection mediaTypes) { return MimeTypeUtils.toString(mediaTypes); } public static void sortBySpecificity(List mediaTypes) { Assert.notNull(mediaTypes, "'mediaTypes' must not be null"); if (mediaTypes.size() > 1) { Collections.sort(mediaTypes, SPECIFICITY_COMPARATOR); } } public static void sortByQualityValue(List mediaTypes) { Assert.notNull(mediaTypes, "'mediaTypes' must not be null"); if (mediaTypes.size() > 1) { Collections.sort(mediaTypes, QUALITY_VALUE_COMPARATOR); } } public static void sortBySpecificityAndQuality(List mediaTypes) { Assert.notNull(mediaTypes, "'mediaTypes' must not be null"); if (mediaTypes.size() > 1) { Collections.sort(mediaTypes, new CompoundComparator(new Comparator[]{SPECIFICITY_COMPARATOR, QUALITY_VALUE_COMPARATOR})); } } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/io/AbstractResource.java ================================================ package com.pinecone.summer.io; import com.pinecone.framework.util.Assert; import com.pinecone.summer.util.ResourceUtils; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; public abstract class AbstractResource implements Resource { public AbstractResource() { } public boolean exists() { try { return this.getFile().exists(); } catch (IOException var4) { try { InputStream is = this.getInputStream(); is.close(); return true; } catch (Throwable var3) { return false; } } } public boolean isReadable() { return true; } public boolean isOpen() { return false; } public URL getURL() throws IOException { throw new FileNotFoundException(this.getDescription() + " cannot be resolved to URL"); } public URI getURI() throws IOException { URL url = this.getURL(); try { return ResourceUtils.toURI(url); } catch (URISyntaxException var3) { throw new IOException("Invalid URI [" + url + "]", var3); } } public File getFile() throws IOException { throw new FileNotFoundException(this.getDescription() + " cannot be resolved to absolute file path"); } public long contentLength() throws IOException { InputStream is = this.getInputStream(); Assert.state(is != null, "resource input stream must not be null"); try { long size = 0L; int read; for(byte[] buf = new byte[255]; (read = is.read(buf)) != -1; size += (long)read) { } long var6 = size; return var6; } finally { try { is.close(); } catch (IOException var14) { } } } public long lastModified() throws IOException { long lastModified = this.getFileForLastModifiedCheck().lastModified(); if (lastModified == 0L) { throw new FileNotFoundException(this.getDescription() + " cannot be resolved in the file system for resolving its last-modified timestamp"); } else { return lastModified; } } protected File getFileForLastModifiedCheck() throws IOException { return this.getFile(); } public Resource createRelative(String relativePath) throws IOException { throw new FileNotFoundException("Cannot create a relative resource for " + this.getDescription()); } public String getFilename() { return null; } public String toString() { return this.getDescription(); } public boolean equals(Object obj) { return obj == this || obj instanceof Resource && ((Resource)obj).getDescription().equals(this.getDescription()); } public int hashCode() { return this.getDescription().hashCode(); } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/io/InputStreamSource.java ================================================ package com.pinecone.summer.io; import java.io.IOException; import java.io.InputStream; public interface InputStreamSource { InputStream getInputStream() throws IOException; } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/io/PathResource.java ================================================ package com.pinecone.summer.io; import com.pinecone.framework.util.Assert; import java.io.*; import java.net.URI; import java.net.URL; import java.nio.file.Files; import java.nio.file.LinkOption; import java.nio.file.Path; import java.nio.file.Paths; public class PathResource extends AbstractResource implements WritableResource { private final Path path; public PathResource(Path path) { Assert.notNull(path, "Path must not be null"); this.path = path.normalize(); } public PathResource(String path) { Assert.notNull(path, "Path must not be null"); this.path = Paths.get(path).normalize(); } public PathResource(URI uri) { Assert.notNull(uri, "URI must not be null"); this.path = Paths.get(uri).normalize(); } public final String getPath() { return this.path.toString(); } public boolean exists() { return Files.exists(this.path, new LinkOption[0]); } public boolean isReadable() { return Files.isReadable(this.path) && !Files.isDirectory(this.path, new LinkOption[0]); } public InputStream getInputStream() throws IOException { if (!this.exists()) { throw new FileNotFoundException(this.getPath() + " (no such file or directory)"); } else if (Files.isDirectory(this.path, new LinkOption[0])) { throw new FileNotFoundException(this.getPath() + " (is a directory)"); } else { return Files.newInputStream(this.path); } } public URL getURL() throws IOException { return this.path.toUri().toURL(); } public URI getURI() throws IOException { return this.path.toUri(); } public File getFile() throws IOException { try { return this.path.toFile(); } catch (UnsupportedOperationException var2) { throw new FileNotFoundException(this.path + " cannot be resolved to " + "absolute file path"); } } public long contentLength() throws IOException { return Files.size(this.path); } public long lastModified() throws IOException { return Files.getLastModifiedTime(this.path).toMillis(); } public Resource createRelative(String relativePath) throws IOException { return new PathResource(this.path.resolve(relativePath)); } public String getFilename() { return this.path.getFileName().toString(); } public String getDescription() { return "path [" + this.path.toAbsolutePath() + "]"; } public boolean isWritable() { return Files.isWritable(this.path) && !Files.isDirectory(this.path, new LinkOption[0]); } public OutputStream getOutputStream() throws IOException { if (Files.isDirectory(this.path, new LinkOption[0])) { throw new FileNotFoundException(this.getPath() + " (is a directory)"); } else { return Files.newOutputStream(this.path); } } public boolean equals(Object obj) { return this == obj || obj instanceof PathResource && this.path.equals(((PathResource)obj).path); } public int hashCode() { return this.path.hashCode(); } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/io/Resource.java ================================================ package com.pinecone.summer.io; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URL; public interface Resource extends InputStreamSource { boolean exists(); boolean isReadable(); boolean isOpen(); URL getURL() throws IOException; URI getURI() throws IOException; File getFile() throws IOException; long contentLength() throws IOException; long lastModified() throws IOException; Resource createRelative(String var1) throws IOException; String getFilename(); String getDescription(); } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/io/WritableResource.java ================================================ package com.pinecone.summer.io; import java.io.IOException; import java.io.OutputStream; public interface WritableResource extends Resource { boolean isWritable(); OutputStream getOutputStream() throws IOException; } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/MaxUploadSizeExceededException.java ================================================ package com.pinecone.summer.multiparts; public class MaxUploadSizeExceededException extends MultipartException { private final long maxUploadSize; public MaxUploadSizeExceededException(long maxUploadSize) { this(maxUploadSize, (Throwable)null); } public MaxUploadSizeExceededException(long maxUploadSize, Throwable ex) { super("Maximum upload size of " + maxUploadSize + " bytes exceeded", ex); this.maxUploadSize = maxUploadSize; } public long getMaxUploadSize() { return this.maxUploadSize; } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/MultipartException.java ================================================ package com.pinecone.summer.multiparts; public class MultipartException extends RuntimeException { public MultipartException(String msg) { super(msg); } public MultipartException(String msg, Throwable cause) { super(msg, cause); } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/MultipartFile.java ================================================ package com.pinecone.summer.multiparts; import java.io.File; import java.io.IOException; import java.io.InputStream; public interface MultipartFile { String getName(); String getOriginalFilename(); String getContentType(); boolean isEmpty(); long getSize(); byte[] getBytes() throws IOException; InputStream getInputStream() throws IOException; void transferTo(File dest) throws IOException, IllegalStateException; } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/MultipartHttpServletRequest.java ================================================ package com.pinecone.summer.multiparts; import com.pinecone.summer.http.HttpHeaders; import com.pinecone.summer.http.HttpMethod; import javax.servlet.http.HttpServletRequest; public interface MultipartHttpServletRequest extends HttpServletRequest, MultipartRequest { HttpMethod getRequestMethod(); HttpHeaders getRequestHeaders(); HttpHeaders getMultipartHeaders(String var1); } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/MultipartRequest.java ================================================ package com.pinecone.summer.multiparts; import com.pinecone.framework.unit.MultiValueMap; import java.util.Iterator; import java.util.List; import java.util.Map; public interface MultipartRequest { Iterator getFileNames(); MultipartFile getFile(String szFileFieldName); List getFiles(String szName); Map getFileMap(); MultiValueMap getMultiFileMap(); String getMultipartContentType(String szParamOrFileName); } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/MultipartResolver.java ================================================ package com.pinecone.summer.multiparts; import javax.servlet.http.HttpServletRequest; public interface MultipartResolver { boolean isMultipart(javax.servlet.http.HttpServletRequest hHttpServletRequest); MultipartHttpServletRequest resolveMultipart(HttpServletRequest hHttpServletRequest) throws MultipartException; void cleanupMultipart(MultipartHttpServletRequest hMultipartHttpServletRequest); } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/commons/CommonsFileUploadSupport.java ================================================ package com.pinecone.summer.multiparts.commons; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.nio.charset.Charset; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.StringUtils; import com.pinecone.summer.multiparts.MultipartFile; import com.pinecone.summer.http.MediaType; import org.apache.commons.fileupload.FileItem; import org.apache.commons.fileupload.FileItemFactory; import org.apache.commons.fileupload.FileUpload; import org.apache.commons.fileupload.disk.DiskFileItemFactory; import com.pinecone.summer.io.Resource; import com.pinecone.framework.unit.LinkedMultiValueMap; import com.pinecone.framework.unit.MultiValueMap; public abstract class CommonsFileUploadSupport { private final DiskFileItemFactory fileItemFactory = this.newFileItemFactory(); private final FileUpload fileUpload = this.newFileUpload(this.getFileItemFactory()); private boolean uploadTempDirSpecified = false; public CommonsFileUploadSupport() { } public DiskFileItemFactory getFileItemFactory() { return this.fileItemFactory; } public FileUpload getFileUpload() { return this.fileUpload; } public void setMaxUploadSize(long maxUploadSize) { this.fileUpload.setSizeMax(maxUploadSize); } public void setSingleUploadSize(long maxUploadSize) { this.fileUpload.setFileSizeMax(maxUploadSize); } public void setMaxInMemorySize(int maxInMemorySize) { this.fileItemFactory.setSizeThreshold(maxInMemorySize); } public void setDefaultEncoding(String defaultEncoding) { this.fileUpload.setHeaderEncoding(defaultEncoding); } protected String getDefaultEncoding() { String encoding = this.getFileUpload().getHeaderEncoding(); if (encoding == null) { encoding = "ISO-8859-1"; } return encoding; } public void setUploadTempDir(Resource uploadTempDir) throws IOException { if (!uploadTempDir.exists() && !uploadTempDir.getFile().mkdirs()) { throw new IllegalArgumentException("Given uploadTempDir [" + uploadTempDir + "] could not be created"); } else { this.fileItemFactory.setRepository(uploadTempDir.getFile()); this.uploadTempDirSpecified = true; } } protected boolean isUploadTempDirSpecified() { return this.uploadTempDirSpecified; } protected DiskFileItemFactory newFileItemFactory() { return new DiskFileItemFactory(); } protected abstract FileUpload newFileUpload(FileItemFactory var1); protected FileUpload prepareFileUpload(String encoding) { FileUpload fileUpload = this.getFileUpload(); FileUpload actualFileUpload = fileUpload; if (encoding != null && !encoding.equals(fileUpload.getHeaderEncoding())) { actualFileUpload = this.newFileUpload(this.getFileItemFactory()); actualFileUpload.setSizeMax(fileUpload.getSizeMax()); actualFileUpload.setHeaderEncoding(encoding); } return actualFileUpload; } protected CommonsFileUploadSupport.MultipartParsingResult parseFileItems(List fileItems, String encoding) { MultiValueMap multipartFiles = new LinkedMultiValueMap(); Map multipartParameters = new HashMap(); Map multipartParameterContentTypes = new HashMap(); Iterator var6 = fileItems.iterator(); while(true) { while(var6.hasNext()) { FileItem fileItem = (FileItem)var6.next(); if (fileItem.isFormField()) { String partEncoding = this.determineEncoding(fileItem.getContentType(), encoding); String value; if (partEncoding != null) { try { value = fileItem.getString(partEncoding); } catch (UnsupportedEncodingException var12) { System.err.println("Could not decode multipart item '" + fileItem.getFieldName() + "' with encoding '" + partEncoding + "': using platform default"); value = fileItem.getString(); } } else { value = fileItem.getString(); } String[] curParam = (String[])multipartParameters.get(fileItem.getFieldName()); if (curParam == null) { multipartParameters.put(fileItem.getFieldName(), new String[]{value}); } else { String[] newParam = StringUtils.addStringToArray(curParam, value); multipartParameters.put(fileItem.getFieldName(), newParam); } multipartParameterContentTypes.put(fileItem.getFieldName(), fileItem.getContentType()); } else { CommonsMultipartFile file = new CommonsMultipartFile(fileItem); multipartFiles.add(file.getName(), file); Debug.trace( "Found multipart file [" + file.getName() + "] of size " + file.getSize() + " bytes with original filename [" + file.getOriginalFilename() + "], stored " + file.getStorageDescription() ); } } return new CommonsFileUploadSupport.MultipartParsingResult(multipartFiles, multipartParameters, multipartParameterContentTypes); } } protected void cleanupFileItems(MultiValueMap multipartFiles) { Iterator var2 = multipartFiles.values().iterator(); while(var2.hasNext()) { List files = (List)var2.next(); Iterator var4 = files.iterator(); while(var4.hasNext()) { MultipartFile file = (MultipartFile)var4.next(); if (file instanceof CommonsMultipartFile) { CommonsMultipartFile cmf = (CommonsMultipartFile)file; cmf.getFileItem().delete(); Debug.trace("Cleaning up multipart file [" + cmf.getName() + "] with original filename [" + cmf.getOriginalFilename() + "], stored " + cmf.getStorageDescription()); } } } } private String determineEncoding(String contentTypeHeader, String defaultEncoding) { if (!StringUtils.hasText(contentTypeHeader)) { return defaultEncoding; } else { MediaType contentType = MediaType.parseMediaType(contentTypeHeader); Charset charset = contentType.getCharSet(); return charset != null ? charset.name() : defaultEncoding; } } protected static class MultipartParsingResult { private final MultiValueMap multipartFiles; private final Map multipartParameters; private final Map multipartParameterContentTypes; public MultipartParsingResult(MultiValueMap mpFiles, Map mpParams, Map mpParamContentTypes) { this.multipartFiles = mpFiles; this.multipartParameters = mpParams; this.multipartParameterContentTypes = mpParamContentTypes; } public MultiValueMap getMultipartFiles() { return this.multipartFiles; } public Map getMultipartParameters() { return this.multipartParameters; } public Map getMultipartParameterContentTypes() { return this.multipartParameterContentTypes; } } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/commons/CommonsMultipartFile.java ================================================ package com.pinecone.summer.multiparts.commons; import com.pinecone.framework.util.Debug; import com.pinecone.summer.multiparts.MultipartFile; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.Serializable; import org.apache.commons.fileupload.FileItem; import org.apache.commons.fileupload.FileUploadException; import org.apache.commons.fileupload.disk.DiskFileItem; public class CommonsMultipartFile implements MultipartFile, Serializable { private final FileItem fileItem; private final long size; public CommonsMultipartFile(FileItem fileItem) { this.fileItem = fileItem; this.size = this.fileItem.getSize(); } public final FileItem getFileItem() { return this.fileItem; } public String getName() { return this.fileItem.getFieldName(); } public String getOriginalFilename() { String filename = this.fileItem.getName(); if (filename == null) { return ""; } else { int pos = filename.lastIndexOf("/"); if (pos == -1) { pos = filename.lastIndexOf("\\"); } return pos != -1 ? filename.substring(pos + 1) : filename; } } public String getContentType() { return this.fileItem.getContentType(); } public boolean isEmpty() { return this.size == 0L; } public long getSize() { return this.size; } public byte[] getBytes() { if (!this.isAvailable()) { throw new IllegalStateException("File has been moved - cannot be read again"); } else { byte[] bytes = this.fileItem.get(); return bytes != null ? bytes : new byte[0]; } } public InputStream getInputStream() throws IOException { if (!this.isAvailable()) { throw new IllegalStateException("File has been moved - cannot be read again"); } else { InputStream inputStream = this.fileItem.getInputStream(); return (InputStream)(inputStream != null ? inputStream : new ByteArrayInputStream(new byte[0])); } } public void transferTo(File dest) throws IOException, IllegalStateException { if (!this.isAvailable()) { throw new IllegalStateException("File has already been moved - cannot be transferred again"); } else if (dest.exists() && !dest.delete()) { throw new IOException("Destination file [" + dest.getAbsolutePath() + "] already exists and could not be deleted"); } else { try { this.fileItem.write(dest); String action = "transferred"; if (!this.fileItem.isInMemory()) { action = this.isAvailable() ? "copied" : "moved"; } Debug.trace("Multipart file '" + this.getName() + "' with original filename [" + this.getOriginalFilename() + "], stored " + this.getStorageDescription() + ": " + action + " to [" + dest.getAbsolutePath() + "]"); } catch (FileUploadException e) { throw new IllegalStateException(e.getMessage()); } catch (IOException ioException) { throw ioException; } catch (Exception e2) { Debug.trace("Could not transfer to file"); throw new IOException("Could not transfer to file: " + e2.getMessage()); } } } protected boolean isAvailable() { if (this.fileItem.isInMemory()) { return true; } else if (this.fileItem instanceof DiskFileItem) { return ((DiskFileItem)this.fileItem).getStoreLocation().exists(); } else { return this.fileItem.getSize() == this.size; } } public String getStorageDescription() { if (this.fileItem.isInMemory()) { return "in memory"; } else { return this.fileItem instanceof DiskFileItem ? "at [" + ((DiskFileItem)this.fileItem).getStoreLocation().getAbsolutePath() + "]" : "on disk"; } } public String getStoragePath(){ return this.fileItem instanceof DiskFileItem ? ((DiskFileItem)this.fileItem).getStoreLocation().getAbsolutePath() : ""; } public void finalize() throws Throwable{ super.finalize(); /* if ( !this.fileItem.isInMemory() ){ String szGarbage = this.getStoragePath(); File fGarbage = new File( szGarbage ); if ( fGarbage.exists() ) { *//* Jesus fucking christ with tomcat.. **//* //System.err.println( "Upload garbage annihilating." ); if( !fGarbage.delete() ){ System.err.println( "Error after upload garbage annihilated." ); } } }*/ } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/commons/CommonsMultipartFiles.java ================================================ package com.pinecone.summer.multiparts.commons; import com.pinecone.framework.unit.LinkedMultiValueMap; import com.pinecone.summer.ArchConnection; import com.pinecone.summer.ArchHostSystem; import com.pinecone.summer.multiparts.MultipartException; import com.pinecone.summer.multiparts.MultipartFile; import com.pinecone.summer.multiparts.MultipartHttpServletRequest; import com.pinecone.summer.ArchConnectDispatcher; import com.pinecone.summer.io.PathResource; import javax.servlet.http.HttpServletRequest; import java.io.IOException; import java.util.Map; public class CommonsMultipartFiles { protected ArchConnection mConnection; protected ArchHostSystem mSystem; protected CommonsMultipartResolver mMultipartResolver; protected MultipartHttpServletRequest mCurrentMultipartHttpServletRequest = null; protected Map mFilesMap = null; public CommonsMultipartFiles( ArchConnection connection ) { this.mConnection = connection; this.mSystem = this.mConnection.getHostSystem(); this.init(); } public ArchHostSystem getHostSystem(){ return this.mSystem; } public ArchConnectDispatcher getSystemDispathcher(){ return this.mConnection.getDispatcher(); } public CommonsMultipartResolver getMultipartResolver(){ return this.mMultipartResolver; } private void init(){ this.mMultipartResolver = new CommonsMultipartResolver( this.mSystem.getSystemServlet().getServletContext() ); this.mMultipartResolver.setSingleUploadSize( this.mSystem.getSingleFileSizeMax() ); this.mMultipartResolver.setMaxUploadSize( this.mSystem.getSumFileSizeMax() ); this.mMultipartResolver.setDefaultEncoding( this.mSystem.getUploadEncode() ); String szUploadTempDir = this.mSystem.getUploadTempDir(); if( szUploadTempDir != null && !szUploadTempDir.isEmpty() ){ try { this.mMultipartResolver.setUploadTempDir( new PathResource(szUploadTempDir) ); } catch ( IOException e ) { e.printStackTrace(); } } } private HttpServletRequest getHttpServletRequest(){ return this.mConnection.$_REQUEST(); } public boolean isMultipart(){ return this.mMultipartResolver.isMultipart( this.getHttpServletRequest() ); } public void interceptMultipartFiles() throws MultipartException { if ( this.isMultipart() ){ this.mCurrentMultipartHttpServletRequest = this.mMultipartResolver.resolveMultipart( this.getHttpServletRequest() ); this.mFilesMap = this.mCurrentMultipartHttpServletRequest.getFileMap(); } else { this.refresh(); } } public MultipartHttpServletRequest getCurrentMultipartRequest(){ return this.mCurrentMultipartHttpServletRequest; } public Map getCurrentFilesMap(){ return this.mFilesMap; } public void refresh() { if( this.mFilesMap != null ){ if( !this.mFilesMap.isEmpty() ){ this.mFilesMap.clear(); } } else { this.mFilesMap = new LinkedMultiValueMap() ; } } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/commons/CommonsMultipartResolver.java ================================================ package com.pinecone.summer.multiparts.commons; import java.util.List; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import com.pinecone.framework.util.Assert; import com.pinecone.summer.multiparts.MaxUploadSizeExceededException; import com.pinecone.summer.multiparts.MultipartException; import com.pinecone.summer.multiparts.MultipartHttpServletRequest; import com.pinecone.summer.multiparts.MultipartResolver; import com.pinecone.summer.multiparts.support.DefaultMultipartHttpServletRequest; import com.pinecone.summer.context.ServletContextAware; import com.pinecone.summer.util.WebUtils; import org.apache.commons.fileupload.FileItem; import org.apache.commons.fileupload.FileItemFactory; import org.apache.commons.fileupload.FileUpload; import org.apache.commons.fileupload.FileUploadException; import org.apache.commons.fileupload.FileUploadBase.SizeLimitExceededException; import org.apache.commons.fileupload.servlet.ServletFileUpload; public class CommonsMultipartResolver extends CommonsFileUploadSupport implements MultipartResolver, ServletContextAware { private boolean resolveLazily; public CommonsMultipartResolver() { this.resolveLazily = false; } public CommonsMultipartResolver(ServletContext servletContext) { this(); this.setServletContext(servletContext); } public void setResolveLazily(boolean resolveLazily) { this.resolveLazily = resolveLazily; } protected FileUpload newFileUpload(FileItemFactory fileItemFactory) { return new ServletFileUpload(fileItemFactory); } public void setServletContext(ServletContext servletContext) { if (!this.isUploadTempDirSpecified()) { this.getFileItemFactory().setRepository(WebUtils.getTempDir(servletContext)); } } public boolean isMultipart(HttpServletRequest request) { return request != null && ServletFileUpload.isMultipartContent(request); } public MultipartHttpServletRequest resolveMultipart(final HttpServletRequest request) throws MultipartException { Assert.notNull(request, "Request must not be null"); if (this.resolveLazily) { return new DefaultMultipartHttpServletRequest(request) { protected void initializeMultipart() { MultipartParsingResult parsingResult = CommonsMultipartResolver.this.parseRequest(request); this.setMultipartFiles(parsingResult.getMultipartFiles()); this.setMultipartParameters(parsingResult.getMultipartParameters()); this.setMultipartParameterContentTypes(parsingResult.getMultipartParameterContentTypes()); } }; } else { MultipartParsingResult parsingResult = this.parseRequest(request); return new DefaultMultipartHttpServletRequest(request, parsingResult.getMultipartFiles(), parsingResult.getMultipartParameters(), parsingResult.getMultipartParameterContentTypes()); } } protected MultipartParsingResult parseRequest(HttpServletRequest request) throws MultipartException { String encoding = this.determineEncoding(request); FileUpload fileUpload = this.prepareFileUpload(encoding); try { List fileItems = ((ServletFileUpload)fileUpload).parseRequest(request); return this.parseFileItems(fileItems, encoding); } catch (SizeLimitExceededException var5) { throw new MaxUploadSizeExceededException(fileUpload.getSizeMax(), var5); } catch (FileUploadException var6) { throw new MultipartException("Could not parse multipart servlet request", var6); } } protected String determineEncoding(HttpServletRequest request) { String encoding = request.getCharacterEncoding(); if (encoding == null) { encoding = this.getDefaultEncoding(); } return encoding; } public void cleanupMultipart(MultipartHttpServletRequest request) { if (request != null) { try { this.cleanupFileItems(request.getMultiFileMap()); } catch (Throwable var3) { System.err.println("Failed to perform multipart cleanup for servlet request"); } } } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/support/AbstractMultipartHttpServletRequest.java ================================================ package com.pinecone.summer.multiparts.support; import com.pinecone.framework.unit.LinkedMultiValueMap; import com.pinecone.framework.unit.MultiValueMap; import com.pinecone.summer.multiparts.MultipartFile; import com.pinecone.summer.multiparts.MultipartHttpServletRequest; import com.pinecone.summer.http.HttpHeaders; import com.pinecone.summer.http.HttpMethod; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequestWrapper; import java.util.*; public abstract class AbstractMultipartHttpServletRequest extends HttpServletRequestWrapper implements MultipartHttpServletRequest { private MultiValueMap multipartFiles; protected AbstractMultipartHttpServletRequest(HttpServletRequest request) { super(request); } public HttpServletRequest getRequest() { return (HttpServletRequest)super.getRequest(); } public HttpMethod getRequestMethod() { return HttpMethod.valueOf(this.getRequest().getMethod()); } public HttpHeaders getRequestHeaders() { HttpHeaders headers = new HttpHeaders(); Enumeration headerNames = this.getHeaderNames(); while(headerNames.hasMoreElements()) { String headerName = (String)headerNames.nextElement(); headers.put(headerName, Collections.list(this.getHeaders(headerName))); } return headers; } public Iterator getFileNames() { return this.getMultipartFiles().keySet().iterator(); } public MultipartFile getFile(String name) { return (MultipartFile)this.getMultipartFiles().getFirst(name); } public List getFiles(String name) { List multipartFiles = (List)this.getMultipartFiles().get(name); return multipartFiles != null ? multipartFiles : Collections.emptyList(); } public Map getFileMap() { return this.getMultipartFiles().toSingleValueMap(); } public MultiValueMap getMultiFileMap() { return this.getMultipartFiles(); } protected final void setMultipartFiles(MultiValueMap multipartFiles) { this.multipartFiles = new LinkedMultiValueMap<>(Collections.unmodifiableMap(multipartFiles)); } protected MultiValueMap getMultipartFiles() { if (this.multipartFiles == null) { this.initializeMultipart(); } return this.multipartFiles; } protected void initializeMultipart() { throw new IllegalStateException("Multipart request not initialized"); } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/support/DefaultMultipartHttpServletRequest.java ================================================ package com.pinecone.summer.multiparts.support; import com.pinecone.framework.unit.MultiValueMap; import com.pinecone.summer.multiparts.MultipartFile; import com.pinecone.summer.http.HttpHeaders; import javax.servlet.http.HttpServletRequest; import java.util.*; public class DefaultMultipartHttpServletRequest extends AbstractMultipartHttpServletRequest { private static final String CONTENT_TYPE = "Content-Type"; private Map multipartParameters; private Map multipartParameterContentTypes; public DefaultMultipartHttpServletRequest(HttpServletRequest request, MultiValueMap mpFiles, Map mpParams, Map mpParamContentTypes) { super(request); this.setMultipartFiles(mpFiles); this.setMultipartParameters(mpParams); this.setMultipartParameterContentTypes(mpParamContentTypes); } public DefaultMultipartHttpServletRequest(HttpServletRequest request) { super(request); } public String getParameter(String name) { String[] values = (String[])this.getMultipartParameters().get(name); if (values != null) { return values.length > 0 ? values[0] : null; } else { return super.getParameter(name); } } public String[] getParameterValues(String name) { String[] values = (String[])this.getMultipartParameters().get(name); return values != null ? values : super.getParameterValues(name); } public Enumeration getParameterNames() { Map multipartParameters = this.getMultipartParameters(); if (multipartParameters.isEmpty()) { return super.getParameterNames(); } else { Set paramNames = new LinkedHashSet<>(); Enumeration paramEnum = super.getParameterNames(); while(paramEnum.hasMoreElements()) { paramNames.add((String) paramEnum.nextElement()); } paramNames.addAll(multipartParameters.keySet()); return Collections.enumeration(paramNames); } } public Map getParameterMap() { Map multipartParameters = this.getMultipartParameters(); if (multipartParameters.isEmpty()) { return super.getParameterMap(); } else { Map paramMap = new LinkedHashMap<>(); paramMap.putAll(super.getParameterMap()); paramMap.putAll(multipartParameters); return paramMap; } } public String getMultipartContentType(String paramOrFileName) { MultipartFile file = this.getFile(paramOrFileName); return file != null ? file.getContentType() : (String)this.getMultipartParameterContentTypes().get(paramOrFileName); } public HttpHeaders getMultipartHeaders(String paramOrFileName) { String contentType = this.getMultipartContentType(paramOrFileName); if (contentType != null) { HttpHeaders headers = new HttpHeaders(); headers.add("Content-Type", contentType); return headers; } else { return null; } } protected final void setMultipartParameters(Map multipartParameters) { this.multipartParameters = multipartParameters; } protected Map getMultipartParameters() { if (this.multipartParameters == null) { this.initializeMultipart(); } return this.multipartParameters; } protected final void setMultipartParameterContentTypes(Map multipartParameterContentTypes) { this.multipartParameterContentTypes = multipartParameterContentTypes; } protected Map getMultipartParameterContentTypes() { if (this.multipartParameterContentTypes == null) { this.initializeMultipart(); } return this.multipartParameterContentTypes; } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/Citizen.java ================================================ package com.pinecone.summer.prototype; public interface Citizen { String vocationName(); } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/Component.java ================================================ package com.pinecone.summer.prototype; import java.lang.annotation.*; @Target({ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface Component { String value() default ""; } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/ConnectDispatcher.java ================================================ package com.pinecone.summer.prototype; import com.pinecone.summer.Connectiom; import javax.servlet.ServletException; import java.io.IOException; public interface ConnectDispatcher extends SequentialDispatcher { void invokeDispatchBus() throws ServletException, IOException ; void requestReceived() throws ServletException, IOException ; void afterConnectionAccepted( Connectiom connectiom) throws ServletException, IOException; /** Http Method Handler **/ void handleGet( Connectiom connectiom ) throws ServletException, IOException; void handlePost( Connectiom connectiom ) throws ServletException, IOException; void handleHead( Connectiom connectiom ) throws ServletException, IOException; void handleOptions( Connectiom connectiom ) throws ServletException, IOException; void handlePut( Connectiom connectiom ) throws ServletException, IOException; void handlePatch( Connectiom connectiom ) throws ServletException, IOException; void handleDelete( Connectiom connectiom ) throws ServletException, IOException; void handleTrace( Connectiom connectiom ) throws ServletException, IOException; /** Tracer **/ void traceSystemErrorMsg( String szTitle, String szErrorMsg ) throws IOException, ServletException; void traceSystemErrorMsg( int nErrorID, String szTitle, String szErrorMsg ) throws IOException, ServletException; void traceSystem404Error() throws IOException, ServletException; void traceSystem404Error( String szErrorMsg ) throws IOException, ServletException; void traceSystem500Error( String szErrorMsg ) throws IOException, ServletException; } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/Connection.java ================================================ package com.pinecone.summer.prototype; import com.pinecone.summer.http.HttpMethod; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.PrintWriter; public interface Connection { PrintWriter writer() throws IOException; ServletOutputStream out() throws IOException; HttpServletRequest getRequest(); HttpServletRequest getMultipartRequest(); boolean isMultipartRequest(); HttpServletResponse getResponse(); HttpServlet getServlet(); HttpMethod currentHttpMethod() ; ConnectDispatcher getDispatcher(); HostSystem getHostSystem(); } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/Connectson.java ================================================ package com.pinecone.summer.prototype; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.summer.multiparts.MultipartFile; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.util.Map; public interface Connectson extends Connection { JSONObject $_GPC(); JSONObject $_GET(); JSONObject $_POST(); default HttpServletRequest $_REQUEST(){ return this.getRequest(); } HttpServletRequest $_REQUEST ( boolean bUsingMultipart ); default HttpServletResponse $_RESPONSE() { return this.getResponse(); } Map $_FILES(); Map $_COOKIE(); } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/Controller.java ================================================ package com.pinecone.summer.prototype; import java.lang.annotation.*; @Target({ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) @Documented @Component public @interface Controller { String value() default ""; } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/GenieBottle.java ================================================ package com.pinecone.summer.prototype; import javax.servlet.ServletException; import java.io.IOException; public interface GenieBottle extends Wizardum, SequentialDispatcher { void dispatch() throws IOException, ServletException; void defaultGenie() throws Exception ; void beforeGenieInvoke() throws Exception ; void afterGenieInvoked() throws Exception ; } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/HostSystem.java ================================================ package com.pinecone.summer.prototype; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.summer.RouterType; import javax.servlet.ServletException; public interface HostSystem { JSONObject getGlobalConfig() ; JSONObject getSystemConfig() ; JSONObject getPublicWizardConfig(); String getControlParameter() ; String getWizardParameter() ; String getModelParameter() ; void init() throws ServletException; String getSystemPath(); String getRootClassPath(); String getWizardSummonerConfig(); String getWizardPackageName(); String getModelClassSuffix(); String getControlClassSuffix(); ConnectDispatcher handleByDispatcher(RouterType routerType ); default ConnectDispatcher handleByDispatcher() { return this.handleByDispatcher( RouterType.QueryString ); } RouterDispatcher getPrimeRouterDispatcher(); } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/JSONBasedControl.java ================================================ package com.pinecone.summer.prototype; import javax.servlet.ServletException; import java.io.IOException; public interface JSONBasedControl { void beforeDispatch() throws IOException, ServletException; void dispatch() throws IOException, ServletException ; void afterDispatch() throws IOException, ServletException; String getControlCommand(); } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/JasperBasedModel.java ================================================ package com.pinecone.summer.prototype; interface JasperBasedModel { } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/ModelEnchanter.java ================================================ package com.pinecone.summer.prototype; import java.lang.annotation.*; @Target({ElementType.METHOD, ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface ModelEnchanter { boolean value() default true; } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/Pagesion.java ================================================ package com.pinecone.summer.prototype; import javax.servlet.ServletException; import java.io.IOException; /** * Summer JSON Based Pagina(Page) Ion, for template stereotype. * Default as a controller. equal-> @Controller. */ public interface Pagesion extends Pageson, Wizard { void beforeDispatch() throws IOException, ServletException; void dispatch() throws IOException, ServletException ; void afterDispatch() throws IOException, ServletException; } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/Pageson.java ================================================ package com.pinecone.summer.prototype; import com.pinecone.framework.util.json.JSONObject; import javax.servlet.ServletException; import java.io.IOException; import java.lang.reflect.Method; public interface Pageson extends Wizard { JSONObject getPageData(); String toJSONString(); String getModelCommand(); void setRenderum( Method fnRenderum ); void render() throws ServletException, IOException; void setEnchanterRole( boolean bRole ); boolean isEnchanter(); } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/RouterDispatcher.java ================================================ package com.pinecone.summer.prototype; public interface RouterDispatcher { } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/RouterMapping.java ================================================ package com.pinecone.summer.prototype; import com.pinecone.summer.http.HttpMethod; import java.lang.annotation.*; @Target({ElementType.METHOD, ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface RouterMapping { String name() default ""; String[] value() default {}; boolean relative() default true; // Only for methods. HttpMethod[] method() default {}; String[] params() default {}; String[] headers() default {}; String[] consumes() default {}; String[] produces() default {}; } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/SequentialDispatcher.java ================================================ package com.pinecone.summer.prototype; import javax.servlet.ServletException; import java.io.IOException; public interface SequentialDispatcher { void dispatch() throws IOException, ServletException; void stop() throws RuntimeException; } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/Servletson.java ================================================ package com.pinecone.summer.prototype; public interface Servletson { } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/Wizard.java ================================================ package com.pinecone.summer.prototype; import com.pinecone.framework.system.prototype.Ally; import com.pinecone.framework.util.json.JSONArray; import com.pinecone.framework.util.json.JSONObject; /** * Bean Nuts Pinecone PineconeJava Summer - Wizard * **************************************************************************************************************** * Summer: JSON Based Java Servlet [C/C++ Style] * Matrix: Bean Nuts Pinecone C/CPP Runtime Framework Extension Fast CGI Servlet Summer (JSON Based MVC) * Notice: Pinecone is base on JSON Prototype, * Notice: All functions or methods are based on JSON. We highly recommend you using JSON as data format, it is * easy to compatible with JS, PHP and other platforms. * Notice: For sub modular extends this interface is necessary. Add any function if your json config haves. * **************************************************************************************************************** */ public interface Wizard extends Ally, Citizen { @Override default String vocationName(){ return this.getClass().getSimpleName(); } String prototypeName(); String getTitle(); JSONObject getModularConfig(); String getModularRole(); int getModularRoleIndex(); JSONArray getMyNaughtyGenies(); String getWizardCommand(); /*** Parent getter methods ***/ Connectson getConnection(); HostSystem getHostSystem(); ConnectDispatcher getDispatcher(); } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/WizardSummoner.java ================================================ package com.pinecone.summer.prototype; import com.pinecone.framework.system.executum.ExecutableSummoner; import javax.servlet.ServletException; import java.io.IOException; public interface WizardSummoner extends ExecutableSummoner { HostSystem getSystem(); String queryNamespace( String szNickName ); Wizard getLastSummoned(); Wizard summonIfExist( String szNickName ) throws ServletException, IOException ; Wizard summonAndExecute( String szNickName ) throws ServletException, IOException ; } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/Wizardum.java ================================================ package com.pinecone.summer.prototype; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.summer.multiparts.MultipartFile; import com.pinecone.summer.NaughtyGenieInvokedException; import javax.servlet.ServletOutputStream; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.PrintWriter; import java.util.Map; /** * Pinecone For Java Wizardum [ Wizard Kernel Layer Prototype Interface ] * Copyright © 2008 - 2028 Bean Nuts Foundation ( DR.Undefined ) All rights reserved. [Mr.A.R.B / WJH] * ***************************************************************************************** * JSON Based: All dynamic map variables are based on JSON. * PHP Style: QueryString, Form, Files, and etc. are overridden to $_GET, $_POST, and etc. * ***************************************************************************************** */ public interface Wizardum extends Wizard { JSONObject $_GPC(); JSONObject $_GET(); JSONObject $_POST(); PrintWriter writer() ; ServletOutputStream out() ; HttpServletRequest $_REQUEST(); HttpServletRequest getCurrentMultipartRequest(); HttpServletResponse $_RESPONSE(); Map $_COOKIE(); Map $_FILES(); void redirect( String szURL ) throws IOException; String spawnWizardQuerySpell( String szPrototype ); String spawnActionQuerySpell( String szActionFunctionName ) ; String spawnControlQuerySpell( String szControlFunctionName ) ; String spawnActionControlSpell( String szActionFnName, String szControlFnName ); Object summonNormalGenieByCallHisName( String szGenieName ) throws NaughtyGenieInvokedException; String getWizardCommand(); String getModelCommand(); String getControlCommand(); void stop() throws RuntimeException; } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/util/InvalidMimeTypeException.java ================================================ package com.pinecone.summer.util; public class InvalidMimeTypeException extends IllegalArgumentException { private String mimeType; public InvalidMimeTypeException(String mimeType, String message) { super("Invalid mime type \"" + mimeType + "\": " + message); this.mimeType = mimeType; } public String getMimeType() { return this.mimeType; } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/util/MimeType.java ================================================ package com.pinecone.summer.util; import com.pinecone.framework.util.Assert; import com.pinecone.framework.util.CollectionUtils; import com.pinecone.framework.unit.LinkedCaseInsensitiveMap; import java.io.Serializable; import java.nio.charset.Charset; import java.util.BitSet; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.Locale; import java.util.Map; import java.util.TreeSet; import java.util.Map.Entry; public class MimeType implements Comparable, Serializable { private static final long serialVersionUID = 4085923477777865903L; protected static final String WILDCARD_TYPE = "*"; private static final BitSet TOKEN; private static final String PARAM_CHARSET = "charset"; private final String type; private final String subtype; private final Map parameters; public MimeType(String type) { this(type, "*"); } public MimeType(String type, String subtype) { this(type, subtype, Collections.emptyMap()); } public MimeType(String type, String subtype, Charset charSet) { this(type, subtype, Collections.singletonMap("charset", charSet.name())); } public MimeType(MimeType other, Map parameters) { this(other.getType(), other.getSubtype(), parameters); } public MimeType(String type, String subtype, Map parameters) { Assert.hasLength(type, "type must not be empty"); Assert.hasLength(subtype, "subtype must not be empty"); this.checkToken(type); this.checkToken(subtype); this.type = type.toLowerCase(Locale.ENGLISH); this.subtype = subtype.toLowerCase(Locale.ENGLISH); if (!CollectionUtils.isEmpty(parameters)) { Map map = new LinkedCaseInsensitiveMap<>(parameters.size(), Locale.ENGLISH); Iterator var5 = parameters.entrySet().iterator(); while(var5.hasNext()) { Entry entry = (Entry)var5.next(); String attribute = (String)entry.getKey(); String value = (String)entry.getValue(); this.checkParameters(attribute, value); map.put(attribute, value); } this.parameters = Collections.unmodifiableMap(map); } else { this.parameters = Collections.emptyMap(); } } private void checkToken(String token) { for(int i = 0; i < token.length(); ++i) { char ch = token.charAt(i); if (!TOKEN.get(ch)) { throw new IllegalArgumentException("Invalid token character '" + ch + "' in token \"" + token + "\""); } } } protected void checkParameters(String attribute, String value) { Assert.hasLength(attribute, "parameter attribute must not be empty"); Assert.hasLength(value, "parameter value must not be empty"); this.checkToken(attribute); if ("charset".equals(attribute)) { value = this.unquote(value); Charset.forName(value); } else if (!this.isQuotedString(value)) { this.checkToken(value); } } private boolean isQuotedString(String s) { if (s.length() < 2) { return false; } else { return s.startsWith("\"") && s.endsWith("\"") || s.startsWith("'") && s.endsWith("'"); } } protected String unquote(String s) { if (s == null) { return null; } else { return this.isQuotedString(s) ? s.substring(1, s.length() - 1) : s; } } public boolean isWildcardType() { return "*".equals(this.getType()); } public boolean isWildcardSubtype() { return "*".equals(this.getSubtype()) || this.getSubtype().startsWith("*+"); } public boolean isConcrete() { return !this.isWildcardType() && !this.isWildcardSubtype(); } public String getType() { return this.type; } public String getSubtype() { return this.subtype; } public Charset getCharSet() { String charSet = this.getParameter("charset"); return charSet != null ? Charset.forName(this.unquote(charSet)) : null; } public String getParameter(String name) { return (String)this.parameters.get(name); } public Map getParameters() { return this.parameters; } public boolean includes(MimeType other) { if (other == null) { return false; } else if (this.isWildcardType()) { return true; } else { if (this.getType().equals(other.getType())) { if (this.getSubtype().equals(other.getSubtype())) { return true; } if (this.isWildcardSubtype()) { int thisPlusIdx = this.getSubtype().indexOf(43); if (thisPlusIdx == -1) { return true; } int otherPlusIdx = other.getSubtype().indexOf(43); if (otherPlusIdx != -1) { String thisSubtypeNoSuffix = this.getSubtype().substring(0, thisPlusIdx); String thisSubtypeSuffix = this.getSubtype().substring(thisPlusIdx + 1); String otherSubtypeSuffix = other.getSubtype().substring(otherPlusIdx + 1); if (thisSubtypeSuffix.equals(otherSubtypeSuffix) && "*".equals(thisSubtypeNoSuffix)) { return true; } } } } return false; } } public boolean isCompatibleWith(MimeType other) { if (other == null) { return false; } else if (!this.isWildcardType() && !other.isWildcardType()) { if (this.getType().equals(other.getType())) { if (this.getSubtype().equals(other.getSubtype())) { return true; } if (this.isWildcardSubtype() || other.isWildcardSubtype()) { int thisPlusIdx = this.getSubtype().indexOf(43); int otherPlusIdx = other.getSubtype().indexOf(43); if (thisPlusIdx == -1 && otherPlusIdx == -1) { return true; } if (thisPlusIdx != -1 && otherPlusIdx != -1) { String thisSubtypeNoSuffix = this.getSubtype().substring(0, thisPlusIdx); String otherSubtypeNoSuffix = other.getSubtype().substring(0, otherPlusIdx); String thisSubtypeSuffix = this.getSubtype().substring(thisPlusIdx + 1); String otherSubtypeSuffix = other.getSubtype().substring(otherPlusIdx + 1); if (thisSubtypeSuffix.equals(otherSubtypeSuffix) && ("*".equals(thisSubtypeNoSuffix) || "*".equals(otherSubtypeNoSuffix))) { return true; } } } } return false; } else { return true; } } public int compareTo(MimeType other) { int comp = this.getType().compareToIgnoreCase(other.getType()); if (comp != 0) { return comp; } else { comp = this.getSubtype().compareToIgnoreCase(other.getSubtype()); if (comp != 0) { return comp; } else { comp = this.getParameters().size() - other.getParameters().size(); if (comp != 0) { return comp; } else { TreeSet thisAttributes = new TreeSet(String.CASE_INSENSITIVE_ORDER); thisAttributes.addAll(this.getParameters().keySet()); TreeSet otherAttributes = new TreeSet(String.CASE_INSENSITIVE_ORDER); otherAttributes.addAll(other.getParameters().keySet()); Iterator thisAttributesIterator = thisAttributes.iterator(); Iterator otherAttributesIterator = otherAttributes.iterator(); do { if (!thisAttributesIterator.hasNext()) { return 0; } String thisAttribute = (String)thisAttributesIterator.next(); String otherAttribute = (String)otherAttributesIterator.next(); comp = thisAttribute.compareToIgnoreCase(otherAttribute); if (comp != 0) { return comp; } String thisValue = (String)this.getParameters().get(thisAttribute); String otherValue = (String)other.getParameters().get(otherAttribute); if (otherValue == null) { otherValue = ""; } comp = thisValue.compareTo(otherValue); } while(comp == 0); return comp; } } } } public boolean equals(Object other) { if (this == other) { return true; } else if (!(other instanceof MimeType)) { return false; } else { MimeType otherType = (MimeType)other; return this.type.equalsIgnoreCase(otherType.type) && this.subtype.equalsIgnoreCase(otherType.subtype) && this.parameters.equals(otherType.parameters); } } public int hashCode() { int result = this.type.hashCode(); result = 31 * result + this.subtype.hashCode(); result = 31 * result + this.parameters.hashCode(); return result; } public String toString() { StringBuilder builder = new StringBuilder(); this.appendTo(builder); return builder.toString(); } protected void appendTo(StringBuilder builder) { builder.append(this.type); builder.append('/'); builder.append(this.subtype); this.appendTo(this.parameters, builder); } private void appendTo(Map map, StringBuilder builder) { Iterator var3 = map.entrySet().iterator(); while(var3.hasNext()) { Entry entry = (Entry)var3.next(); builder.append(';'); builder.append((String)entry.getKey()); builder.append('='); builder.append((String)entry.getValue()); } } public static MimeType valueOf(String value) { return MimeTypeUtils.parseMimeType(value); } static { BitSet ctl = new BitSet(128); for(int i = 0; i <= 31; ++i) { ctl.set(i); } ctl.set(127); BitSet separators = new BitSet(128); separators.set(40); separators.set(41); separators.set(60); separators.set(62); separators.set(64); separators.set(44); separators.set(59); separators.set(58); separators.set(92); separators.set(34); separators.set(47); separators.set(91); separators.set(93); separators.set(63); separators.set(61); separators.set(123); separators.set(125); separators.set(32); separators.set(9); TOKEN = new BitSet(128); TOKEN.set(0, 128); TOKEN.andNot(ctl); TOKEN.andNot(separators); } public static class SpecificityComparator implements Comparator { public SpecificityComparator() { } public int compare(T mimeType1, T mimeType2) { if (mimeType1.isWildcardType() && !mimeType2.isWildcardType()) { return 1; } else if (mimeType2.isWildcardType() && !mimeType1.isWildcardType()) { return -1; } else if (!mimeType1.getType().equals(mimeType2.getType())) { return 0; } else if (mimeType1.isWildcardSubtype() && !mimeType2.isWildcardSubtype()) { return 1; } else if (mimeType2.isWildcardSubtype() && !mimeType1.isWildcardSubtype()) { return -1; } else { return !mimeType1.getSubtype().equals(mimeType2.getSubtype()) ? 0 : this.compareParameters(mimeType1, mimeType2); } } protected int compareParameters(T mimeType1, T mimeType2) { int paramsSize1 = mimeType1.getParameters().size(); int paramsSize2 = mimeType2.getParameters().size(); return paramsSize2 < paramsSize1 ? -1 : (paramsSize2 == paramsSize1 ? 0 : 1); } } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/util/MimeTypeUtils.java ================================================ package com.pinecone.summer.util; import com.pinecone.framework.util.Assert; import com.pinecone.framework.util.StringUtils; import java.nio.charset.UnsupportedCharsetException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import com.pinecone.summer.util.MimeType.SpecificityComparator; public abstract class MimeTypeUtils { public static final MimeType ALL = MimeType.valueOf("*/*"); public static final String ALL_VALUE = "*/*"; public static final MimeType APPLICATION_ATOM_XML = MimeType.valueOf("application/atom+xml"); public static final String APPLICATION_ATOM_XML_VALUE = "application/atom+xml"; public static final MimeType APPLICATION_FORM_URLENCODED = MimeType.valueOf("application/x-www-form-urlencoded"); public static final String APPLICATION_FORM_URLENCODED_VALUE = "application/x-www-form-urlencoded"; public static final MimeType APPLICATION_JSON = MimeType.valueOf("application/json"); public static final String APPLICATION_JSON_VALUE = "application/json"; public static final MimeType APPLICATION_OCTET_STREAM = MimeType.valueOf("application/octet-stream"); public static final String APPLICATION_OCTET_STREAM_VALUE = "application/octet-stream"; public static final MimeType APPLICATION_XHTML_XML = MimeType.valueOf("application/xhtml+xml"); public static final String APPLICATION_XHTML_XML_VALUE = "application/xhtml+xml"; public static final MimeType APPLICATION_XML = MimeType.valueOf("application/xml"); public static final String APPLICATION_XML_VALUE = "application/xml"; public static final MimeType IMAGE_GIF = MimeType.valueOf("image/gif"); public static final String IMAGE_GIF_VALUE = "image/gif"; public static final MimeType IMAGE_JPEG = MimeType.valueOf("image/jpeg"); public static final String IMAGE_JPEG_VALUE = "image/jpeg"; public static final MimeType IMAGE_PNG = MimeType.valueOf("image/png"); public static final String IMAGE_PNG_VALUE = "image/png"; public static final MimeType MULTIPART_FORM_DATA = MimeType.valueOf("multipart/form-data"); public static final String MULTIPART_FORM_DATA_VALUE = "multipart/form-data"; public static final MimeType TEXT_HTML = MimeType.valueOf("text/html"); public static final String TEXT_HTML_VALUE = "text/html"; public static final MimeType TEXT_PLAIN = MimeType.valueOf("text/plain"); public static final String TEXT_PLAIN_VALUE = "text/plain"; public static final MimeType TEXT_XML = MimeType.valueOf("text/xml"); public static final String TEXT_XML_VALUE = "text/xml"; public static final Comparator SPECIFICITY_COMPARATOR = new SpecificityComparator(); public MimeTypeUtils() { } public static MimeType parseMimeType(String mimeType) { if (!StringUtils.hasLength(mimeType)) { throw new InvalidMimeTypeException(mimeType, "'mimeType' must not be empty"); } else { String[] parts = StringUtils.tokenizeToStringArray(mimeType, ";"); String fullType = parts[0].trim(); if ("*".equals(fullType)) { fullType = "*/*"; } int subIndex = fullType.indexOf(47); if (subIndex == -1) { throw new InvalidMimeTypeException(mimeType, "does not contain '/'"); } else if (subIndex == fullType.length() - 1) { throw new InvalidMimeTypeException(mimeType, "does not contain subtype after '/'"); } else { String type = fullType.substring(0, subIndex); String subtype = fullType.substring(subIndex + 1, fullType.length()); if ("*".equals(type) && !"*".equals(subtype)) { throw new InvalidMimeTypeException(mimeType, "wildcard type is legal only in '*/*' (all mime types)"); } else { Map parameters = null; if (parts.length > 1) { parameters = new LinkedHashMap(parts.length - 1); for(int i = 1; i < parts.length; ++i) { String parameter = parts[i]; int eqIndex = parameter.indexOf(61); if (eqIndex != -1) { String attribute = parameter.substring(0, eqIndex); String value = parameter.substring(eqIndex + 1, parameter.length()); parameters.put(attribute, value); } } } try { return new MimeType(type, subtype, parameters); } catch (UnsupportedCharsetException var12) { throw new InvalidMimeTypeException(mimeType, "unsupported charset '" + var12.getCharsetName() + "'"); } catch (IllegalArgumentException var13) { throw new InvalidMimeTypeException(mimeType, var13.getMessage()); } } } } } public static List parseMimeTypes(String mimeTypes) { if (!StringUtils.hasLength(mimeTypes)) { return Collections.emptyList(); } else { String[] tokens = mimeTypes.split(",\\s*"); List result = new ArrayList(tokens.length); String[] var3 = tokens; int var4 = tokens.length; for(int var5 = 0; var5 < var4; ++var5) { String token = var3[var5]; result.add(parseMimeType(token)); } return result; } } public static String toString(Collection mimeTypes) { StringBuilder builder = new StringBuilder(); Iterator iterator = mimeTypes.iterator(); while(iterator.hasNext()) { MimeType mimeType = (MimeType)iterator.next(); mimeType.appendTo(builder); if (iterator.hasNext()) { builder.append(", "); } } return builder.toString(); } public static void sortBySpecificity(List mimeTypes) { Assert.notNull(mimeTypes, "'mimeTypes' must not be null"); if (mimeTypes.size() > 1) { Collections.sort(mimeTypes, SPECIFICITY_COMPARATOR); } } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/util/ResourceUtils.java ================================================ package com.pinecone.summer.util; import com.pinecone.framework.util.Assert; import com.pinecone.framework.util.ClassUtils; import com.pinecone.framework.util.StringUtils; import java.io.File; import java.io.FileNotFoundException; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLConnection; public abstract class ResourceUtils { public static final String CLASSPATH_URL_PREFIX = "classpath:"; public static final String FILE_URL_PREFIX = "file:"; public static final String JAR_URL_PREFIX = "jar:"; public static final String URL_PROTOCOL_FILE = "file"; public static final String URL_PROTOCOL_JAR = "jar"; public static final String URL_PROTOCOL_ZIP = "zip"; public static final String URL_PROTOCOL_WSJAR = "wsjar"; public static final String URL_PROTOCOL_VFSZIP = "vfszip"; public static final String URL_PROTOCOL_VFSFILE = "vfsfile"; public static final String URL_PROTOCOL_VFS = "vfs"; public static final String JAR_FILE_EXTENSION = ".jar"; public static final String JAR_URL_SEPARATOR = "!/"; public ResourceUtils() { } public static boolean isUrl(String resourceLocation) { if (resourceLocation == null) { return false; } else if (resourceLocation.startsWith("classpath:")) { return true; } else { try { new URL(resourceLocation); return true; } catch (MalformedURLException var2) { return false; } } } public static URL getURL(String resourceLocation) throws FileNotFoundException { Assert.notNull(resourceLocation, "Resource location must not be null"); if (resourceLocation.startsWith("classpath:")) { String path = resourceLocation.substring("classpath:".length()); ClassLoader cl = ClassUtils.getDefaultClassLoader(); URL url = cl != null ? cl.getResource(path) : ClassLoader.getSystemResource(path); if (url == null) { String description = "class path resource [" + path + "]"; throw new FileNotFoundException(description + " cannot be resolved to URL because it does not exist"); } else { return url; } } else { try { return new URL(resourceLocation); } catch (MalformedURLException var6) { try { return (new File(resourceLocation)).toURI().toURL(); } catch (MalformedURLException var5) { throw new FileNotFoundException("Resource location [" + resourceLocation + "] is neither a URL not a well-formed file path"); } } } } public static File getFile(String resourceLocation) throws FileNotFoundException { Assert.notNull(resourceLocation, "Resource location must not be null"); if (resourceLocation.startsWith("classpath:")) { String path = resourceLocation.substring("classpath:".length()); String description = "class path resource [" + path + "]"; ClassLoader cl = ClassUtils.getDefaultClassLoader(); URL url = cl != null ? cl.getResource(path) : ClassLoader.getSystemResource(path); if (url == null) { throw new FileNotFoundException(description + " cannot be resolved to absolute file path because it does not exist"); } else { return getFile(url, description); } } else { try { return getFile(new URL(resourceLocation)); } catch (MalformedURLException var5) { return new File(resourceLocation); } } } public static File getFile(URL resourceUrl) throws FileNotFoundException { return getFile(resourceUrl, "URL"); } public static File getFile(URL resourceUrl, String description) throws FileNotFoundException { Assert.notNull(resourceUrl, "Resource URL must not be null"); if (!"file".equals(resourceUrl.getProtocol())) { throw new FileNotFoundException(description + " cannot be resolved to absolute file path " + "because it does not reside in the file system: " + resourceUrl); } else { try { return new File(toURI(resourceUrl).getSchemeSpecificPart()); } catch (URISyntaxException var3) { return new File(resourceUrl.getFile()); } } } public static File getFile(URI resourceUri) throws FileNotFoundException { return getFile(resourceUri, "URI"); } public static File getFile(URI resourceUri, String description) throws FileNotFoundException { Assert.notNull(resourceUri, "Resource URI must not be null"); if (!"file".equals(resourceUri.getScheme())) { throw new FileNotFoundException(description + " cannot be resolved to absolute file path " + "because it does not reside in the file system: " + resourceUri); } else { return new File(resourceUri.getSchemeSpecificPart()); } } public static boolean isFileURL(URL url) { String protocol = url.getProtocol(); return "file".equals(protocol) || "vfsfile".equals(protocol) || "vfs".equals(protocol); } public static boolean isJarURL(URL url) { String protocol = url.getProtocol(); return "jar".equals(protocol) || "zip".equals(protocol) || "vfszip".equals(protocol) || "wsjar".equals(protocol); } public static boolean isJarFileURL(URL url) { return "file".equals(url.getProtocol()) && url.getPath().toLowerCase().endsWith(".jar"); } public static URL extractJarFileURL(URL jarUrl) throws MalformedURLException { String urlFile = jarUrl.getFile(); int separatorIndex = urlFile.indexOf("!/"); if (separatorIndex != -1) { String jarFile = urlFile.substring(0, separatorIndex); try { return new URL(jarFile); } catch (MalformedURLException var5) { if (!jarFile.startsWith("/")) { jarFile = "/" + jarFile; } return new URL("file:" + jarFile); } } else { return jarUrl; } } public static URI toURI(URL url) throws URISyntaxException { return toURI(url.toString()); } public static URI toURI(String location) throws URISyntaxException { return new URI(StringUtils.replace(location, " ", "%20")); } public static void useCachesIfNecessary(URLConnection con) { con.setUseCaches(con.getClass().getSimpleName().startsWith("JNLP")); } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/util/RouteUtils.java ================================================ package com.pinecone.summer.util; import javax.servlet.http.HttpServletRequest; public abstract class RouteUtils { private static final String[] HEADERS_TO_TRY = { "X-Forwarded-For", "x-forwarded-for", "Proxy-Client-IP", "WL-Proxy-Client-IP", "HTTP_X_FORWARDED_FOR", "HTTP_X_FORWARDED", "HTTP_X_CLUSTER_CLIENT_IP", "HTTP_CLIENT_IP", "HTTP_FORWARDED_FOR", "HTTP_FORWARDED", "HTTP_VIA", "REMOTE_ADDR", "X-Real-IP" }; public static String getRealRemoteAddr( HttpServletRequest request ) { for ( String header : RouteUtils.HEADERS_TO_TRY ) { String ip = request.getHeader(header); if ( ip != null && ip.length() != 0 && !"unknown".equalsIgnoreCase(ip) ) { return ip; } } return request.getRemoteAddr(); } } ================================================ FILE: Pinecones/Summer/src/main/java/com/pinecone/summer/util/WebUtils.java ================================================ package com.pinecone.summer.util; import com.pinecone.framework.util.Assert; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.unit.LinkedMultiValueMap; import com.pinecone.framework.unit.MultiValueMap; import javax.servlet.*; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import java.io.File; import java.io.FileNotFoundException; import java.util.*; public abstract class WebUtils { public static final String INCLUDE_REQUEST_URI_ATTRIBUTE = "javax.servlet.include.request_uri"; public static final String INCLUDE_CONTEXT_PATH_ATTRIBUTE = "javax.servlet.include.context_path"; public static final String INCLUDE_SERVLET_PATH_ATTRIBUTE = "javax.servlet.include.servlet_path"; public static final String INCLUDE_PATH_INFO_ATTRIBUTE = "javax.servlet.include.path_info"; public static final String INCLUDE_QUERY_STRING_ATTRIBUTE = "javax.servlet.include.query_string"; public static final String FORWARD_REQUEST_URI_ATTRIBUTE = "javax.servlet.forward.request_uri"; public static final String FORWARD_CONTEXT_PATH_ATTRIBUTE = "javax.servlet.forward.context_path"; public static final String FORWARD_SERVLET_PATH_ATTRIBUTE = "javax.servlet.forward.servlet_path"; public static final String FORWARD_PATH_INFO_ATTRIBUTE = "javax.servlet.forward.path_info"; public static final String FORWARD_QUERY_STRING_ATTRIBUTE = "javax.servlet.forward.query_string"; public static final String ERROR_STATUS_CODE_ATTRIBUTE = "javax.servlet.error.status_code"; public static final String ERROR_EXCEPTION_TYPE_ATTRIBUTE = "javax.servlet.error.exception_type"; public static final String ERROR_MESSAGE_ATTRIBUTE = "javax.servlet.error.message"; public static final String ERROR_EXCEPTION_ATTRIBUTE = "javax.servlet.error.exception"; public static final String ERROR_REQUEST_URI_ATTRIBUTE = "javax.servlet.error.request_uri"; public static final String ERROR_SERVLET_NAME_ATTRIBUTE = "javax.servlet.error.servlet_name"; public static final String CONTENT_TYPE_CHARSET_PREFIX = ";charset="; public static final String DEFAULT_CHARACTER_ENCODING = "ISO-8859-1"; public static final String TEMP_DIR_CONTEXT_ATTRIBUTE = "javax.servlet.context.tempdir"; public static final String HTML_ESCAPE_CONTEXT_PARAM = "defaultHtmlEscape"; public static final String RESPONSE_ENCODED_HTML_ESCAPE_CONTEXT_PARAM = "responseEncodedHtmlEscape"; public static final String WEB_APP_ROOT_KEY_PARAM = "webAppRootKey"; public static final String DEFAULT_WEB_APP_ROOT_KEY = "webapp.root"; public static final String[] SUBMIT_IMAGE_SUFFIXES = new String[]{".x", ".y"}; public static final String SESSION_MUTEX_ATTRIBUTE = WebUtils.class.getName() + ".MUTEX"; public WebUtils() { } public static void setWebAppRootSystemProperty(ServletContext servletContext) throws IllegalStateException { Assert.notNull(servletContext, "ServletContext must not be null"); String root = servletContext.getRealPath("/"); if (root == null) { throw new IllegalStateException("Cannot set web app root system property when WAR file is not expanded"); } else { String param = servletContext.getInitParameter("webAppRootKey"); String key = param != null ? param : "webapp.root"; String oldValue = System.getProperty(key); if (oldValue != null && !StringUtils.pathEquals(oldValue, root)) { throw new IllegalStateException("Web app root system property already set to different value: '" + key + "' = [" + oldValue + "] instead of [" + root + "] - " + "Choose unique values for the 'webAppRootKey' context-param in your web.xml files!"); } else { System.setProperty(key, root); servletContext.log("Set web app root system property: '" + key + "' = [" + root + "]"); } } } public static void removeWebAppRootSystemProperty(ServletContext servletContext) { Assert.notNull(servletContext, "ServletContext must not be null"); String param = servletContext.getInitParameter("webAppRootKey"); String key = param != null ? param : "webapp.root"; System.getProperties().remove(key); } @Deprecated public static boolean isDefaultHtmlEscape(ServletContext servletContext) { if (servletContext == null) { return false; } else { String param = servletContext.getInitParameter("defaultHtmlEscape"); return Boolean.valueOf(param); } } public static Boolean getDefaultHtmlEscape(ServletContext servletContext) { if (servletContext == null) { return null; } else { String param = servletContext.getInitParameter("defaultHtmlEscape"); return StringUtils.hasText(param) ? Boolean.valueOf(param) : null; } } public static Boolean getResponseEncodedHtmlEscape(ServletContext servletContext) { if (servletContext == null) { return null; } else { String param = servletContext.getInitParameter("responseEncodedHtmlEscape"); return StringUtils.hasText(param) ? Boolean.valueOf(param) : null; } } public static File getTempDir(ServletContext servletContext) { Assert.notNull(servletContext, "ServletContext must not be null"); return (File)servletContext.getAttribute("javax.servlet.context.tempdir"); } public static String getRealPath(ServletContext servletContext, String path) throws FileNotFoundException { Assert.notNull(servletContext, "ServletContext must not be null"); if (!path.startsWith("/")) { path = "/" + path; } String realPath = servletContext.getRealPath(path); if (realPath == null) { throw new FileNotFoundException("ServletContext resource [" + path + "] cannot be resolved to absolute file path - " + "web application archive not expanded?"); } else { return realPath; } } public static String getSessionId(HttpServletRequest request) { Assert.notNull(request, "Request must not be null"); HttpSession session = request.getSession(false); return session != null ? session.getId() : null; } public static Object getSessionAttribute(HttpServletRequest request, String name) { Assert.notNull(request, "Request must not be null"); HttpSession session = request.getSession(false); return session != null ? session.getAttribute(name) : null; } public static Object getRequiredSessionAttribute(HttpServletRequest request, String name) throws IllegalStateException { Object attr = getSessionAttribute(request, name); if (attr == null) { throw new IllegalStateException("No session attribute '" + name + "' found"); } else { return attr; } } public static void setSessionAttribute(HttpServletRequest request, String name, Object value) { Assert.notNull(request, "Request must not be null"); if (value != null) { request.getSession().setAttribute(name, value); } else { HttpSession session = request.getSession(false); if (session != null) { session.removeAttribute(name); } } } public static Object getOrCreateSessionAttribute(HttpSession session, String name, Class clazz) throws IllegalArgumentException { Assert.notNull(session, "Session must not be null"); Object sessionObject = session.getAttribute(name); if (sessionObject == null) { try { sessionObject = clazz.newInstance(); } catch (InstantiationException var5) { throw new IllegalArgumentException("Could not instantiate class [" + clazz.getName() + "] for session attribute '" + name + "': " + var5.getMessage()); } catch (IllegalAccessException var6) { throw new IllegalArgumentException("Could not access default constructor of class [" + clazz.getName() + "] for session attribute '" + name + "': " + var6.getMessage()); } session.setAttribute(name, sessionObject); } return sessionObject; } public static Object getSessionMutex(HttpSession session) { Assert.notNull(session, "Session must not be null"); Object mutex = session.getAttribute(SESSION_MUTEX_ATTRIBUTE); if (mutex == null) { mutex = session; } return mutex; } public static T getNativeRequest(ServletRequest request, Class requiredType) { if (requiredType != null) { if (requiredType.isInstance(request)) { return (T) request; } if (request instanceof ServletRequestWrapper) { return getNativeRequest(((ServletRequestWrapper)request).getRequest(), requiredType); } } return null; } public static T getNativeResponse(ServletResponse response, Class requiredType) { if (requiredType != null) { if (requiredType.isInstance(response)) { return (T) response; } if (response instanceof ServletResponseWrapper) { return getNativeResponse(((ServletResponseWrapper)response).getResponse(), requiredType); } } return null; } public static boolean isIncludeRequest(ServletRequest request) { return request.getAttribute("javax.servlet.include.request_uri") != null; } public static void exposeErrorRequestAttributes(HttpServletRequest request, Throwable ex, String servletName) { exposeRequestAttributeIfNotPresent(request, "javax.servlet.error.status_code", 200); exposeRequestAttributeIfNotPresent(request, "javax.servlet.error.exception_type", ex.getClass()); exposeRequestAttributeIfNotPresent(request, "javax.servlet.error.message", ex.getMessage()); exposeRequestAttributeIfNotPresent(request, "javax.servlet.error.exception", ex); exposeRequestAttributeIfNotPresent(request, "javax.servlet.error.request_uri", request.getRequestURI()); exposeRequestAttributeIfNotPresent(request, "javax.servlet.error.servlet_name", servletName); } private static void exposeRequestAttributeIfNotPresent(ServletRequest request, String name, Object value) { if (request.getAttribute(name) == null) { request.setAttribute(name, value); } } public static void clearErrorRequestAttributes(HttpServletRequest request) { request.removeAttribute("javax.servlet.error.status_code"); request.removeAttribute("javax.servlet.error.exception_type"); request.removeAttribute("javax.servlet.error.message"); request.removeAttribute("javax.servlet.error.exception"); request.removeAttribute("javax.servlet.error.request_uri"); request.removeAttribute("javax.servlet.error.servlet_name"); } public static void exposeRequestAttributes(ServletRequest request, Map attributes) { Assert.notNull(request, "Request must not be null"); Assert.notNull(attributes, "Attributes Map must not be null"); Iterator var2 = attributes.entrySet().iterator(); while(var2.hasNext()) { Map.Entry entry = (Map.Entry)var2.next(); request.setAttribute((String)entry.getKey(), entry.getValue()); } } public static Cookie getCookie(HttpServletRequest request, String name) { Assert.notNull(request, "Request must not be null"); Cookie[] cookies = request.getCookies(); if (cookies != null) { Cookie[] var3 = cookies; int var4 = cookies.length; for(int var5 = 0; var5 < var4; ++var5) { Cookie cookie = var3[var5]; if (name.equals(cookie.getName())) { return cookie; } } } return null; } public static boolean hasSubmitParameter(ServletRequest request, String name) { Assert.notNull(request, "Request must not be null"); if (request.getParameter(name) != null) { return true; } else { String[] var2 = SUBMIT_IMAGE_SUFFIXES; int var3 = var2.length; for(int var4 = 0; var4 < var3; ++var4) { String suffix = var2[var4]; if (request.getParameter(name + suffix) != null) { return true; } } return false; } } public static String findParameterValue(ServletRequest request, String name) { return findParameterValue(request.getParameterMap(), name); } public static String findParameterValue(Map parameters, String name) { Object value = parameters.get(name); if (value instanceof String[]) { String[] values = (String[])((String[])value); return values.length > 0 ? values[0] : null; } else if (value != null) { return value.toString(); } else { String prefix = name + "_"; Iterator var4 = parameters.keySet().iterator(); String paramName; do { if (!var4.hasNext()) { return null; } paramName = (String)var4.next(); } while(!paramName.startsWith(prefix)); String[] var6 = SUBMIT_IMAGE_SUFFIXES; int var7 = var6.length; for(int var8 = 0; var8 < var7; ++var8) { String suffix = var6[var8]; if (paramName.endsWith(suffix)) { return paramName.substring(prefix.length(), paramName.length() - suffix.length()); } } return paramName.substring(prefix.length()); } } public static Map getParametersStartingWith(ServletRequest request, String prefix) { Assert.notNull(request, "Request must not be null"); Enumeration paramNames = request.getParameterNames(); Map params = new TreeMap(); if (prefix == null) { prefix = ""; } while(paramNames != null && paramNames.hasMoreElements()) { String paramName = (String)paramNames.nextElement(); if ("".equals(prefix) || paramName.startsWith(prefix)) { String unprefixed = paramName.substring(prefix.length()); String[] values = request.getParameterValues(paramName); if (values != null && values.length != 0) { if (values.length > 1) { params.put(unprefixed, values); } else { params.put(unprefixed, values[0]); } } } } return params; } public static int getTargetPage(ServletRequest request, String paramPrefix, int currentPage) { Enumeration paramNames = request.getParameterNames(); String paramName; do { if (!paramNames.hasMoreElements()) { return currentPage; } paramName = (String)paramNames.nextElement(); } while(!paramName.startsWith(paramPrefix)); for(int i = 0; i < SUBMIT_IMAGE_SUFFIXES.length; ++i) { String suffix = SUBMIT_IMAGE_SUFFIXES[i]; if (paramName.endsWith(suffix)) { paramName = paramName.substring(0, paramName.length() - suffix.length()); } } return Integer.parseInt(paramName.substring(paramPrefix.length())); } public static String extractFilenameFromUrlPath(String urlPath) { String filename = extractFullFilenameFromUrlPath(urlPath); int dotIndex = filename.lastIndexOf(46); if (dotIndex != -1) { filename = filename.substring(0, dotIndex); } return filename; } public static String extractFullFilenameFromUrlPath(String urlPath) { int end = urlPath.indexOf(59); if (end == -1) { end = urlPath.indexOf(63); if (end == -1) { end = urlPath.length(); } } int begin = urlPath.lastIndexOf(47, end) + 1; return urlPath.substring(begin, end); } public static MultiValueMap parseMatrixVariables(String matrixVariables) { MultiValueMap result = new LinkedMultiValueMap(); if (!StringUtils.hasText(matrixVariables)) { return result; } else { StringTokenizer pairs = new StringTokenizer(matrixVariables, ";"); while(true) { while(pairs.hasMoreTokens()) { String pair = pairs.nextToken(); int index = pair.indexOf(61); if (index != -1) { String name = pair.substring(0, index); String rawValue = pair.substring(index + 1); String[] var7 = StringUtils.commaDelimitedListToStringArray(rawValue); int var8 = var7.length; for(int var9 = 0; var9 < var8; ++var9) { String value = var7[var9]; result.add(name, value); } } else { result.add(pair, ""); } } return result; } } } } ================================================ FILE: Pinecones/Ulfhedinn/pom.xml ================================================ pinecones com.pinecones 2.5.1 org.apache.maven.plugins maven-surefire-plugin 3.1.2 **/*Tests.java **/*Test.java **/Test*.java org.apache.maven.plugins maven-compiler-plugin 9 9 4.0.0 com.pinecone.ulf ulfhedinn 1.2.1 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile commons-collections commons-collections 3.2.2 commons-lang commons-lang 2.6 org.apache.commons commons-lang3 3.12.0 org.javassist javassist 3.29.0-GA org.thymeleaf thymeleaf 3.0.12.RELEASE org.freemarker freemarker 2.3.31 org.junit.jupiter junit-jupiter-api 5.8.2 test org.junit.jupiter junit-jupiter-engine 5.0.1 test org.assertj assertj-core 3.24.2 test ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/bson/ArchJSONDecompiler.java ================================================ package com.pinecone.ulf.util.bson; import com.pinecone.framework.util.Bytes; import com.pinecone.framework.util.json.JSON; import com.pinecone.framework.util.json.JSONCompilerException; import com.pinecone.framework.util.json.JSONDecompiler; import java.io.IOException; import java.io.InputStream; import java.math.BigDecimal; import java.math.BigInteger; import java.util.List; import java.util.Map; public abstract class ArchJSONDecompiler implements JSONDecompiler { protected int mnParseAt ; protected InputStream mInputStream; public ArchJSONDecompiler( InputStream is ) { this.mInputStream = is; this.mnParseAt = 0; } protected int nextByte() throws JSONCompilerException { try { int b = this.mInputStream.read(); if( b < 0 ) { throw new JSONCompilerException( "Illegal decompiled byte or IO error.", this.mnParseAt ); } ++this.mnParseAt; return b; } catch ( IOException e ){ throw new JSONCompilerException( e, this.mnParseAt ); } } protected byte[] nextBytes( int length ) throws JSONCompilerException { byte[] bytes = new byte[ length ]; try { int read = this.mInputStream.read( bytes ); if ( read != length ) { throw new JSONCompilerException( "Unexpected end of stream.", this.mnParseAt ); } this.mnParseAt += length; return bytes; } catch ( IOException e ) { throw new JSONCompilerException( e, this.mnParseAt ); } } protected String nextString() throws JSONCompilerException { int length = Bytes.bytesToInt32LE( this.nextBytes( 4 ) ); byte[] bytes = this.nextBytes( length ); return new String( bytes ); } protected short nextInt16() throws JSONCompilerException { return Bytes.bytesToInt16LE( this.nextBytes(2) ); } protected int nextInt32() throws JSONCompilerException { return Bytes.bytesToInt32LE( this.nextBytes(4) ); } protected long nextInt64() throws JSONCompilerException { return Bytes.bytesToInt64LE( this.nextBytes(8) ); } protected float nextFloat32() throws JSONCompilerException { return Bytes.bytesToFloat32LE( this.nextBytes(4) ); } protected double nextFloat64() throws JSONCompilerException { return Bytes.bytesToFloat64LE( this.nextBytes(8) ); } protected boolean nextBool() throws JSONCompilerException { return this.nextByte() != 0; } protected BigInteger nextBigInteger() throws JSONCompilerException { int length = Bytes.bytesToInt32LE( this.nextBytes(4) ); byte[] bytes = this.nextBytes(length); return new BigInteger(bytes); } protected BigDecimal nextBigDecimal() throws JSONCompilerException { int length = Bytes.bytesToInt32LE( this.nextBytes(4) ); byte[] bytes = nextBytes(length); int scale = Bytes.bytesToInt32LE( this.nextBytes(4) ); return new BigDecimal( new BigInteger(bytes), scale ); } protected abstract List newJSONArray( Object parent ); protected abstract Map newJSONObject( Object parent ); protected Object nextJSONObject( Object parent ) throws JSONCompilerException { Map map = this.newJSONObject( parent ); int length = (int) Bytes.bytesToInt64LE( this.nextBytes(8) ); for ( int i = 0; i < length; ++i ) { Object k = this.nextValue(); String key ; if( !(k instanceof String) ) { throw new JSONCompilerException( "Illegal JSONObject::Key, key should be String.", this.mnParseAt ); } key = (String) k; Object value = this.nextValue( map ); map.put( key, value ); } int endType = this.nextByte(); if ( endType != DataTypeCode.JSONOBJECT_END.getValue() ) { throw new JSONCompilerException("Expected end of JSON object.", this.mnParseAt); } return map; } protected Object nextJSONArray( Object parent ) throws JSONCompilerException { List list = this.newJSONArray( parent ); int length = (int) Bytes.bytesToInt64LE( this.nextBytes(8) ); for ( int i = 0; i < length; ++i ) { Object value = this.nextValue( list ); list.add( value ); } int endType = this.nextByte(); if ( endType != DataTypeCode.JSONARRAY_END.getValue() ) { throw new JSONCompilerException( "Expected end of JSON array.", this.mnParseAt ); } return list; } protected Object nextUnidentifiedObject( int type ) throws JSONCompilerException { throw new JSONCompilerException( "Unidentified compiled bytecode `[0x" + Integer.toHexString( type ).toUpperCase() + "]`, with unknown version or damaged binary data.", this.mnParseAt ); } @Override public Object nextValue( Object parent ) throws JSONCompilerException { int type = this.nextByte(); try{ DataTypeCode typeCode = DataTypeCode.asCode( type ); switch ( typeCode ) { case NULL: case UNDEFINED: { return JSON.NULL; } case STRING: { return this.nextString(); } case BYTE8: { return this.nextByte(); } case INT16: { return this.nextInt16(); } case INT32: { return this.nextInt32(); } case INT64: { return this.nextInt64(); } case FLOAT32: { return this.nextFloat32(); } case FLOAT64: { return this.nextFloat64(); } case BOOL: { return this.nextBool(); } case BIG_INTEGER: { return this.nextBigInteger(); } case BIG_DECIMAL: { return this.nextBigDecimal(); } case JSONOBJECT: { return this.nextJSONObject( parent ); } case JSONARRAY: { return this.nextJSONArray( parent ); } default: { return this.nextUnidentifiedObject( type ); } } } catch ( IllegalArgumentException e ) { return this.nextUnidentifiedObject( type ); } } @Override public Object nextValue() throws JSONCompilerException { return this.nextValue( null ); } @Override public Object decompile( Object parent ) { try{ return this.nextValue( parent ); } catch ( JSONCompilerException e ) { return null; } } @Override public Object decompile() { return this.decompile( null ); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/bson/DataTypeCode.java ================================================ package com.pinecone.ulf.util.bson; public enum DataTypeCode { UNDEFINED ( 0x01, "Undefined" ), NULL ( 0x02, "Null" ), BYTE8 ( 0x03, "Byte8" ), INT16 ( 0x04, "Int16" ), INT32 ( 0x05, "Int32" ), INT64 ( 0x06, "Int64" ), FLOAT32 ( 0x07, "Float32" ), FLOAT64 ( 0x08, "Float64" ), BOOL ( 0x09, "Bool" ), BIG_INTEGER ( 0x0A, "BigInteger" ), BIG_DECIMAL ( 0x0B, "BigDecimal" ), STRING ( 0x0C, "String" ), JSONOBJECT ( 0xFA, "JSONObject" ), JSONARRAY ( 0xFB, "JSONArray" ), JSONOBJECT_END ( 0xEA, "JSONObject$End" ), JSONARRAY_END ( 0xEB, "JSONArray$End" ), SERIALIZABLE_OBJ ( 0xFC, "SerializableObj" ); private final int value; private final String name; DataTypeCode( int codeVal, String name ){ this.value = codeVal; this.name = name; } public String getName(){ return this.name; } public int getValue() { return this.value; } public byte getByteValue() { return (byte) this.value; } @Override public String toString() { return this.getName(); } public static DataTypeCode asCode( int codeVal ) { for ( DataTypeCode type : DataTypeCode.values() ) { if ( type.getValue() == codeVal ) { return type; } } throw new IllegalArgumentException( "Invalid DataTypeCode value: " + codeVal ); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/bson/UlfJSONCompiler.java ================================================ package com.pinecone.ulf.util.bson; import com.pinecone.framework.util.Bytes; import com.pinecone.framework.util.json.JSON; import com.pinecone.framework.util.json.JSONCompiler; import com.pinecone.framework.util.json.JSONString; import com.pinecone.framework.util.json.binary.BsonTraits; import com.pinecone.framework.util.json.binary.Bsonut; import java.io.IOException; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.io.Serializable; import java.math.BigDecimal; import java.math.BigInteger; import java.util.Collection; import java.util.Map; public class UlfJSONCompiler implements JSONCompiler { public UlfJSONCompiler() { } protected OutputStream compileUnidentifiedObject ( Object that, OutputStream outputStream ) throws IOException { // Ignore them return outputStream; } protected OutputStream compileUnknownAnyObject ( Object that, OutputStream outputStream ) throws IOException { if ( that != null ) { try { BsonTraits.invokeBsonSerialize( that, outputStream ); } catch ( Exception e ){ try { outputStream.write( BsonTraits.invokeToBsonBytes( that ) ); } catch ( Exception e1 ){ return this.compileUnidentifiedObject( that, outputStream ) ; } } } else { outputStream.write( DataTypeCode.NULL.getValue() ); } return outputStream; } @Override public OutputStream compile( Map that, OutputStream outputStream ) throws IOException { outputStream.write( DataTypeCode.JSONOBJECT.getValue() ); Map map = (Map) that; outputStream.write( Bytes.int64ToBytesLE( map.size() ) ); for ( Map.Entry entry : map.entrySet() ) { this.compile( entry.getKey(), outputStream ); this.compile( entry.getValue(), outputStream ); } outputStream.write( DataTypeCode.JSONOBJECT_END.getValue() ); return outputStream; } @Override public OutputStream compile( Collection that, OutputStream outputStream ) throws IOException { outputStream.write( DataTypeCode.JSONARRAY.getValue() ); Collection collection = (Collection) that; outputStream.write( Bytes.int64ToBytesLE( collection.size() ) ); for ( Object item : collection ) { this.compile( item, outputStream ); } outputStream.write( DataTypeCode.JSONARRAY_END.getValue() ); return outputStream; } @Override public OutputStream compile( Object[] those, OutputStream outputStream ) throws IOException { outputStream.write( DataTypeCode.JSONARRAY.getValue() ); int length = those.length; outputStream.write( Bytes.int64ToBytesLE( length ) ); for ( int i = 0; i < length; ++i ) { this.compile( those[i], outputStream ); } outputStream.write( DataTypeCode.JSONARRAY_END.getValue() ); return outputStream; } @Override public OutputStream compile( Object that, OutputStream outputStream ) throws IOException { if ( that != null ) { if ( that == JSON.NULL ) { outputStream.write( DataTypeCode.NULL.getValue() ); } else if ( that instanceof Map ) { this.compile( (Map) that, outputStream ); } else if ( that instanceof Collection ) { this.compile( (Collection) that, outputStream ); } else if ( that instanceof String ) { String str = (String) that; outputStream.write( DataTypeCode.STRING.getValue() ); outputStream.write( Bytes.int32ToBytesLE( str.length() ) ); outputStream.write( str.getBytes()); } else if ( that.getClass().isArray() ) { this.compile( (Object[]) that, outputStream ); } else if ( that instanceof Number ) { if ( that instanceof Byte ) { outputStream.write( DataTypeCode.BYTE8.getValue() ); outputStream.write( (Byte) that ); } else if ( that instanceof Short ) { outputStream.write( DataTypeCode.INT16.getValue() ); outputStream.write( Bytes.int16ToBytesLE((Short) that)); } else if ( that instanceof Integer ) { outputStream.write( DataTypeCode.INT32.getValue() ); outputStream.write( Bytes.int32ToBytesLE((Integer) that)); } else if ( that instanceof Long ) { outputStream.write( DataTypeCode.INT64.getValue() ); outputStream.write( Bytes.int64ToBytesLE((Long) that)); } else if ( that instanceof Float ) { outputStream.write( DataTypeCode.FLOAT32.getValue() ); outputStream.write( Bytes.float32ToBytesLE((Float) that)); } else if ( that instanceof Double ) { outputStream.write( DataTypeCode.FLOAT64.getValue() ); outputStream.write( Bytes.float64ToBytesLE((Double) that)); } else if ( that instanceof BigInteger ) { outputStream.write( DataTypeCode.BIG_INTEGER.getValue() ); byte[] bigIntBytes = ((BigInteger) that).toByteArray(); outputStream.write( Bytes.int32ToBytesLE( bigIntBytes.length ) ); outputStream.write( bigIntBytes ); } else if ( that instanceof BigDecimal ) { outputStream.write( DataTypeCode.BIG_DECIMAL.getValue() ); BigDecimal bigDecimal = (BigDecimal) that; byte[] bigIntBytes = bigDecimal.unscaledValue().toByteArray(); int scale = bigDecimal.scale(); outputStream.write( Bytes.int32ToBytesLE( bigIntBytes.length ) ); outputStream.write( bigIntBytes); outputStream.write( Bytes.int32ToBytesLE(scale) ); } } else if ( that instanceof Boolean ) { outputStream.write( DataTypeCode.BOOL.getValue() ); outputStream.write( (Boolean) that ? 1 : 0 ); } else if ( that instanceof JSONString ) { String jsonString = ( (JSONString) that).toJSONString(); outputStream.write( DataTypeCode.STRING.getValue() ); outputStream.write( Bytes.int32ToBytesLE( jsonString.length() ) ); outputStream.write( jsonString.getBytes()); } else if ( that instanceof Bsonut ) { (( Bsonut ) that).bsonSerialize( outputStream ); } else { this.compileUnknownAnyObject( that, outputStream ); } } else { outputStream.write( DataTypeCode.NULL.getValue() ); } return outputStream; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/bson/UlfJSONDecompiler.java ================================================ package com.pinecone.ulf.util.bson; import com.pinecone.framework.util.json.JSONArraytron; import com.pinecone.framework.util.json.JSONDecompiler; import com.pinecone.framework.util.json.JSONMaptron; import java.io.InputStream; import java.util.List; import java.util.Map; public class UlfJSONDecompiler extends ArchJSONDecompiler implements JSONDecompiler { public UlfJSONDecompiler(InputStream is ) { super( is ); } @Override protected Map newJSONObject( Object parent ) { return new JSONMaptron(); } @Override protected List newJSONArray( Object parent ) { return new JSONArraytron(); } @Override public Object decompile( Object parent ) { return super.decompile( parent ); } @Override public Object decompile() { return this.decompile( null ); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/GUIDs.java ================================================ package com.pinecone.ulf.util.guid; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.GuidAllocator128V2; import com.pinecone.ulf.util.guid.i128.GuidAllocator128V7; import com.pinecone.ulf.util.guid.i128.GuidAllocatorHC128V7; import com.pinecone.ulf.util.guid.i128.UUID128; import com.pinecone.ulf.util.guid.i64.GUID64; import com.pinecone.ulf.util.guid.i64.GUID72; import com.pinecone.ulf.util.guid.i64.GuidAllocator72V2; import com.pinecone.ulf.util.guid.i64.worker.WorkerIdAssigner; public final class GUIDs { public static GUID64 GUID64( String s ) { return new GUID64( s ); } public static GUID72 GUID72( String s ) { return new GUID72( s ); } public static GUID128 GUID128( String s ) { UUID128 uuid128 = new UUID128(s); return uuid128; } public static GUID72 Dummy72() { return new GUID72(); } public static UUID128 Dummy128() { return new UUID128(); } public static GuidAllocator newGuidAllocator( WorkerIdAssigner idAssigner ) { if( idAssigner == null ) { return new GuidAllocator72V2(); } return new GuidAllocator72V2( idAssigner ); } public static GuidAllocator newGuidAllocator() { return newGuidAllocator( 0 ); } public static GuidAllocator newGuidAllocator( int machineId ) { if ( machineId <= 0 ) { return new GuidAllocator128V7(); } return new GuidAllocatorHC128V7( machineId ); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/ArchGuidAllocator128.java ================================================ package com.pinecone.ulf.util.guid.i128; import com.pinecone.framework.util.id.GUID; import java.util.Arrays; public abstract class ArchGuidAllocator128 implements GuidAllocator128 { public static final int GUID_CHARS = 36; @Override public GUID parse( String hexId ) { return Parser.parse( hexId ); } static final class Parser { private static final byte[] MAP; static { byte[] mapping = new byte[256]; Arrays.fill(mapping, (byte) -1); mapping['0'] = 0; mapping['1'] = 1; mapping['2'] = 2; mapping['3'] = 3; mapping['4'] = 4; mapping['5'] = 5; mapping['6'] = 6; mapping['7'] = 7; mapping['8'] = 8; mapping['9'] = 9; mapping['a'] = 10; mapping['b'] = 11; mapping['c'] = 12; mapping['d'] = 13; mapping['e'] = 14; mapping['f'] = 15; mapping['A'] = 10; mapping['B'] = 11; mapping['C'] = 12; mapping['D'] = 13; mapping['E'] = 14; mapping['F'] = 15; MAP = mapping; } private static final int DASH_POSITION_1 = 8; private static final int DASH_POSITION_2 = 13; private static final int DASH_POSITION_3 = 18; private static final int DASH_POSITION_4 = 23; public static GUID parse(final String string) { UUID128 neo = new UUID128(); parse( string, neo ); return neo; } public static void parse(final String string, UUID128 that) { validate(string); long msb = 0; long lsb = 0; for (int i = 0; i < 8; i++) { msb = (msb << 4) | get(string, i); } for (int i = 9; i < 13; i++) { msb = (msb << 4) | get(string, i); } for (int i = 14; i < 18; i++) { msb = (msb << 4) | get(string, i); } for (int i = 19; i < 23; i++) { lsb = (lsb << 4) | get(string, i); } for (int i = 24; i < 36; i++) { lsb = (lsb << 4) | get(string, i); } that.mostSigBits = msb; that.leastSigBits = lsb; } public static boolean valid(final String guid) { try { parse(guid); return true; } catch (IllegalArgumentException e) { return false; } } private static long get(final String string, int i) { final int chr = string.charAt(i); if (chr > 255) { throw exception(string); } final byte value = MAP[chr]; if (value < 0) { throw exception(string); } return value & 0xffL; } private static RuntimeException exception(final String str) { return new IllegalArgumentException("Invalid UUID: " + str); } private static void validate(final String string) { if (string == null || string.length() != GUID_CHARS) { throw exception(string); } if (string.charAt(DASH_POSITION_1) != '-' || string.charAt(DASH_POSITION_2) != '-' || string.charAt(DASH_POSITION_3) != '-' || string.charAt(DASH_POSITION_4) != '-') { throw exception(string); } } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/GUID128.java ================================================ package com.pinecone.ulf.util.guid.i128; import com.pinecone.framework.util.id.GUID; import java.util.UUID; public interface GUID128 extends GUID { long getMostSignificantBits(); long getLeastSignificantBits(); UUID toUUID(); int version(); int variant(); int clockSequence() ; long node() ; @Override default int sizeof() { return 16; // 128 bits = 16 bytes } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/GuidAllocator128.java ================================================ package com.pinecone.ulf.util.guid.i128; import com.pinecone.framework.util.id.GuidAllocator; public interface GuidAllocator128 extends GuidAllocator { } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/GuidAllocator128V1.java ================================================ package com.pinecone.ulf.util.guid.i128; import com.pinecone.framework.util.id.GUID; import java.security.SecureRandom; import java.time.Instant; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; import java.util.function.LongSupplier; public class GuidAllocator128V1 extends ArchGuidAllocator128 implements GuidAllocator128{ private final long MASK_12 = 0x0000_0000_0000_0fffL; private final long MASK_16 = 0x0000_0000_0000_ffffL; private final long MULTICAST = 0x0000_0100_0000_0000L; @Override public GUID nextGUID() { return this.nextGUID( System::currentTimeMillis, TLRandom::nextLong ); } public GUID nextGUID( Instant instant, Random random ) { return nextGUID(optional(instant), optional(random)); } private GUID nextGUID( LongSupplier msec, LongSupplier random ) { final long time = gregorian(msec.getAsLong()); final long msb = (time << 32) | ((time >>> 16) & (MASK_16 << 16)) | ((time >>> 48) & MASK_12); final long lsb = random.getAsLong() | MULTICAST; return version(msb, lsb, 1); } private LongSupplier optional(Instant instant) { return instant == null ? System::currentTimeMillis : instant::toEpochMilli; } private LongSupplier optional(Random random) { return random == null ? TLRandom::nextLong : random::nextLong; } private long gregorian( final long millisecons ) { // 1582-10-15T00:00:00Z final long factor = 10_000L; final long offset = 12219292800000L; return ((millisecons + offset) * factor); } GUID version(long hi, long lo, int version) { // set the 4 most significant bits of the 7th byte final long msb = (hi & 0xffff_ffff_ffff_0fffL) | (version & 0xfL) << 12; // RFC 9562 version // set the 2 most significant bits of the 9th byte to 1 and 0 final long lsb = (lo & 0x3fff_ffff_ffff_ffffL) | 0x8000_0000_0000_0000L; // RFC 9562 variant return new UUID128(msb, lsb); } static private class TLRandom { // The JVM unique number tries to mitigate the fact that the thread // local random is not seeded with a secure random seed by default. // Their seeds are based on temporal data and predefined constants. // Although the seeds are unique per JVM, they are not across JVMs. // It helps to generate different sequences of numbers even if two // ThreadLocalRandom are by chance instantiated with the same seed. // Of course it doesn't better the output, but doesn't hurt either. static final long JVM_UNIQUE_NUMBER = new SecureRandom().nextLong(); static private long nextLong() { return ThreadLocalRandom.current().nextLong() ^ JVM_UNIQUE_NUMBER; } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/GuidAllocator128V2.java ================================================ package com.pinecone.ulf.util.guid.i128; import com.pinecone.framework.util.id.GUID; public class GuidAllocator128V2 extends ArchGuidAllocator128 implements GuidAllocator128 { private static final GuidAllocator128 v1 = new GuidAllocator128V1(); private final long MASK_32 = 0x0000_0000_ffff_ffffL; private final long MASK_08 = 0x0000_0000_0000_00ffL; @Override public GUID nextGUID() { return this.nextGUID((byte) 0, (int) 0); } public GUID nextGUID( byte localDomain, int localIdentifier ) { return this.nextGUID( localDomain, localIdentifier, (GUID128) v1.nextGUID() ); } private GUID nextGUID( byte localDomain, int localIdentifier, GUID128 guid ) { final long msb = (guid.getMostSignificantBits() & MASK_32) | ((localIdentifier & MASK_32) << 32); final long lsb = (guid.getLeastSignificantBits() & 0x3f00_ffff_ffff_ffffL) | ((localDomain & MASK_08) << 48); return version(msb, lsb, 2); } GUID version(long hi, long lo, int version) { // set the 4 most significant bits of the 7th byte final long msb = (hi & 0xffff_ffff_ffff_0fffL) | (version & 0xfL) << 12; // RFC 9562 version // set the 2 most significant bits of the 9th byte to 1 and 0 final long lsb = (lo & 0x3fff_ffff_ffff_ffffL) | 0x8000_0000_0000_0000L; // RFC 9562 variant return new UUID128(msb, lsb); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/GuidAllocator128V3.java ================================================ package com.pinecone.ulf.util.guid.i128; import com.pinecone.framework.util.id.GUID; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Objects; public class GuidAllocator128V3 extends ArchGuidAllocator128 implements GuidAllocator128 { private final GUID128 NIL = new UUID128(0x0000000000000000L, 0x0000000000000000L); @Override public GUID nextGUID() { return this.nextGUID(this.NIL,""); } /** * Returns a name-based unique identifier that uses MD5 hashing (UUIDv3). *

* Usage: * *

{@code
     * GUID guid = GUID.v3(Uuid.NAMESPACE_DNS, "www.example.com");
     * }
* * @param namespace a GUID (optional) * @param name a string * @return a GUID * @throws NullPointerException if the name is null */ public GUID nextGUID( GUID128 namespace, String name ) { return hash(3, "MD5", namespace, name); } /** * Returns a name-based unique identifier that uses MD5 hashing (UUIDv3). *

* Usage: * *

{@code
     * GUID guid = GUID.v3(myNameSpace, myBytes);
     * }
* * @param namespace a GUID (optional) * @param bytes a byte array * @return a GUID * @throws NullPointerException if the byte array is null */ public GUID nextGUID( GUID128 namespace, byte[] bytes ) { return hash(3, "MD5", namespace, bytes); } private GUID hash(int version, String algorithm, GUID128 namespace, String name) { Objects.requireNonNull(name, "Null name"); return hash(version, algorithm, namespace, name.getBytes(StandardCharsets.UTF_8)); } private GUID hash(int version, String algorithm, GUID128 namespace, byte[] bytes) { Objects.requireNonNull(bytes, "Null bytes"); MessageDigest hasher = hasher(algorithm); if (namespace != null) { ByteBuffer ns = ByteBuffer.allocate(16); ns.putLong(namespace.getMostSignificantBits()); ns.putLong(namespace.getLeastSignificantBits()); hasher.update(ns.array()); } hasher.update(bytes); ByteBuffer hash = ByteBuffer.wrap(hasher.digest()); final long msb = hash.getLong(); final long lsb = hash.getLong(); return version(msb, lsb, version); } private MessageDigest hasher(String algorithm) { try { return MessageDigest.getInstance(algorithm); } catch (NoSuchAlgorithmException e) { throw new IllegalArgumentException(e.getMessage()); } } private GUID version(long hi, long lo, int version) { // set the 4 most significant bits of the 7th byte final long msb = (hi & 0xffff_ffff_ffff_0fffL) | (version & 0xfL) << 12; // RFC 9562 version // set the 2 most significant bits of the 9th byte to 1 and 0 final long lsb = (lo & 0x3fff_ffff_ffff_ffffL) | 0x8000_0000_0000_0000L; // RFC 9562 variant return new UUID128(msb, lsb); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/GuidAllocator128V4.java ================================================ package com.pinecone.ulf.util.guid.i128; import com.pinecone.framework.util.id.GUID; import java.security.SecureRandom; import java.util.Objects; import java.util.Random; import java.util.UUID; import java.util.concurrent.ThreadLocalRandom; public class GuidAllocator128V4 extends ArchGuidAllocator128 implements GuidAllocator128 { @Override public GUID nextGUID() { return this.version(TLRandom.nextLong(), TLRandom.nextLong(), 4); } /** * Returns a random-based unique identifier (UUIDv4). *

* It is equivalent to {@link UUID#randomUUID()}. *

* Usage: * *

{@code
     * SecureRandom random = new SecureRandom();
     * GUID guid = GUID.v4(random);
     * }
* * @param random a random generator * @return a GUID * @throws NullPointerException if the random is null */ public GUID v4(Random random) { Objects.requireNonNull(random, "Null random"); return version(random.nextLong(), random.nextLong(), 4); } private GUID version(long hi, long lo, int version) { // set the 4 most significant bits of the 7th byte final long msb = (hi & 0xffff_ffff_ffff_0fffL) | (version & 0xfL) << 12; // RFC 9562 version // set the 2 most significant bits of the 9th byte to 1 and 0 final long lsb = (lo & 0x3fff_ffff_ffff_ffffL) | 0x8000_0000_0000_0000L; // RFC 9562 variant return new UUID128(msb, lsb); } static private class TLRandom { // The JVM unique number tries to mitigate the fact that the thread // local random is not seeded with a secure random seed by default. // Their seeds are based on temporal data and predefined constants. // Although the seeds are unique per JVM, they are not across JVMs. // It helps to generate different sequences of numbers even if two // ThreadLocalRandom are by chance instantiated with the same seed. // Of course it doesn't better the output, but doesn't hurt either. static final long JVM_UNIQUE_NUMBER = new SecureRandom().nextLong(); static private long nextLong() { return ThreadLocalRandom.current().nextLong() ^ JVM_UNIQUE_NUMBER; } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/GuidAllocator128V5.java ================================================ package com.pinecone.ulf.util.guid.i128; import com.pinecone.framework.util.id.GUID; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Objects; public class GuidAllocator128V5 extends ArchGuidAllocator128 implements GuidAllocator128 { public final GUID128 NIL = new UUID128(0x0000000000000000L, 0x0000000000000000L); @Override public GUID nextGUID() { return this.nextGUID( this.NIL, "" ); } public GUID nextGUID( GUID128 namespace, String name ) { return hash(5, "SHA-1", namespace, name); } /** * Returns a name-based unique identifier that uses SHA-1 hashing (UUIDv5). *

* Usage: * *

{@code
     * GUID guid = GUID.v5(myNameSpace, myBytes);
     * }
* * @param namespace a GUID (optional) * @param bytes a byte array * @return a GUID * @throws NullPointerException if the byte array is null */ public GUID nextGUID( GUID128 namespace, byte[] bytes ) { return hash(5, "SHA-1", namespace, bytes); } private GUID hash( int version, String algorithm, GUID128 namespace, String name ) { Objects.requireNonNull(name, "Null name"); return hash(version, algorithm, namespace, name.getBytes(StandardCharsets.UTF_8)); } private GUID hash(int version, String algorithm, GUID128 namespace, byte[] bytes) { Objects.requireNonNull(bytes, "Null bytes"); MessageDigest hasher = hasher(algorithm); if (namespace != null) { ByteBuffer ns = ByteBuffer.allocate(16); ns.putLong(namespace.getMostSignificantBits()); ns.putLong(namespace.getLeastSignificantBits()); hasher.update(ns.array()); } hasher.update(bytes); ByteBuffer hash = ByteBuffer.wrap(hasher.digest()); final long msb = hash.getLong(); final long lsb = hash.getLong(); return version(msb, lsb, version); } private MessageDigest hasher(String algorithm) { try { return MessageDigest.getInstance(algorithm); } catch (NoSuchAlgorithmException e) { throw new IllegalArgumentException(e.getMessage()); } } private GUID version(long hi, long lo, int version) { // set the 4 most significant bits of the 7th byte final long msb = (hi & 0xffff_ffff_ffff_0fffL) | (version & 0xfL) << 12; // RFC 9562 version // set the 2 most significant bits of the 9th byte to 1 and 0 final long lsb = (lo & 0x3fff_ffff_ffff_ffffL) | 0x8000_0000_0000_0000L; // RFC 9562 variant return new UUID128(msb, lsb); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/GuidAllocator128V6.java ================================================ package com.pinecone.ulf.util.guid.i128; import com.pinecone.framework.util.id.GUID; import java.security.SecureRandom; import java.time.Instant; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; import java.util.function.LongSupplier; public class GuidAllocator128V6 extends ArchGuidAllocator128 implements GuidAllocator128 { private final long MASK_12 = 0x0000_0000_0000_0fffL; private final long MULTICAST = 0x0000_0100_0000_0000L; @Override public GUID nextGUID() { return UuidCreator.getTimeOrdered(); } public GUID nextSimpleGUID() { return this.nextGUID( System::currentTimeMillis, TLRandom::nextLong ); } /** * Returns a reordered gregorian time-based unique identifier (UUIDv6). *

* The clock sequence and node bits are reset to a pseudo-random value for each * new UUIDv6 generated. *

* Usage: * *

{@code
     * SecureRandom random = new SecureRandom();
     * GUID guid = GUID.v6(Instant.now(), random);
     * }
* * @param instant an instant (optional) * @param random a random generator (optional) * @return a GUID */ public GUID nextGUID( Instant instant, Random random ) { return this.nextGUID( optional(instant), optional(random) ); } private GUID nextGUID( LongSupplier msec, LongSupplier random ) { final long time = gregorian(msec.getAsLong()); final long msb = ((time & ~MASK_12) << 4) | (time & MASK_12); final long lsb = random.getAsLong() | MULTICAST; return version(msb, lsb, 6); } private LongSupplier optional(Instant instant) { return instant == null ? System::currentTimeMillis : instant::toEpochMilli; } private LongSupplier optional(Random random) { return random == null ? TLRandom::nextLong : random::nextLong; } private long gregorian(final long millisecons) { // 1582-10-15T00:00:00Z final long factor = 10_000L; final long offset = 12219292800000L; return ((millisecons + offset) * factor); } GUID version(long hi, long lo, int version) { // set the 4 most significant bits of the 7th byte final long msb = (hi & 0xffff_ffff_ffff_0fffL) | (version & 0xfL) << 12; // RFC 9562 version // set the 2 most significant bits of the 9th byte to 1 and 0 final long lsb = (lo & 0x3fff_ffff_ffff_ffffL) | 0x8000_0000_0000_0000L; // RFC 9562 variant return new UUID128(msb, lsb); } static private class TLRandom { // The JVM unique number tries to mitigate the fact that the thread // local random is not seeded with a secure random seed by default. // Their seeds are based on temporal data and predefined constants. // Although the seeds are unique per JVM, they are not across JVMs. // It helps to generate different sequences of numbers even if two // ThreadLocalRandom are by chance instantiated with the same seed. // Of course it doesn't better the output, but doesn't hurt either. static final long JVM_UNIQUE_NUMBER = new SecureRandom().nextLong(); static private long nextLong() { return ThreadLocalRandom.current().nextLong() ^ JVM_UNIQUE_NUMBER; } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/GuidAllocator128V7.java ================================================ package com.pinecone.ulf.util.guid.i128; import com.pinecone.framework.util.id.GUID; import java.security.SecureRandom; import java.time.Instant; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; import java.util.function.LongSupplier; public class GuidAllocator128V7 extends ArchGuidAllocator128 implements GuidAllocator128 { private final long MASK_12 = 0x0000_0000_0000_0fffL; @Override public GUID nextGUID() { return UuidCreator.getTimeOrderedEpoch(); } public GUID nextSimpleGUID() { return this.nextGUID(System::currentTimeMillis, TLRandom::nextLong); } /** * Returns a Unix epoch time-based unique identifier (UUIDv7). *

* Usage: * *

{@code
     * SecureRandom random = new SecureRandom();
     * GUID guid = GUID.v7(Instant.now(), random);
     * }
* * @param instant an instant (optional) * @param random a random generator (optional) * @return a GUID */ public GUID nextGUID( Instant instant, Random random ) { return this.nextGUID( optional(instant), optional(random) ); } private GUID nextGUID( LongSupplier msec, LongSupplier random ) { final long time = msec.getAsLong(); final long msb = (time << 16) | (TLRandom.nextLong() & MASK_12); final long lsb = random.getAsLong(); return this.version(msb, lsb, 7); } private LongSupplier optional( Instant instant ) { return instant == null ? System::currentTimeMillis : instant::toEpochMilli; } private LongSupplier optional( Random random ) { return random == null ? TLRandom::nextLong : random::nextLong; } private long gregorian( final long millisecons ) { // 1582-10-15T00:00:00Z final long factor = 10_000L; final long offset = 12219292800000L; return ((millisecons + offset) * factor); } GUID version( long hi, long lo, int version ) { // set the 4 most significant bits of the 7th byte final long msb = (hi & 0xffff_ffff_ffff_0fffL) | (version & 0xfL) << 12; // RFC 9562 version // set the 2 most significant bits of the 9th byte to 1 and 0 final long lsb = (lo & 0x3fff_ffff_ffff_ffffL) | 0x8000_0000_0000_0000L; // RFC 9562 variant return new UUID128(msb, lsb); } static private class TLRandom { // The JVM unique number tries to mitigate the fact that the thread // local random is not seeded with a secure random seed by default. // Their seeds are based on temporal data and predefined constants. // Although the seeds are unique per JVM, they are not across JVMs. // It helps to generate different sequences of numbers even if two // ThreadLocalRandom are by chance instantiated with the same seed. // Of course it doesn't better the output, but doesn't hurt either. static final long JVM_UNIQUE_NUMBER = new SecureRandom().nextLong(); static private long nextLong() { return ThreadLocalRandom.current().nextLong() ^ JVM_UNIQUE_NUMBER; } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/GuidAllocatorHC128V7.java ================================================ package com.pinecone.ulf.util.guid.i128; import java.util.function.Supplier; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.ulf.util.guid.i128.factory.standard.TimeOrderedEpochFactory; public class GuidAllocatorHC128V7 extends ArchGuidAllocator128 implements GuidAllocator { protected Logger log = LoggerFactory.getLogger(this.getClass()); protected TimeOrderedEpochFactory mUuidFactory; protected int mnNodeId; public GuidAllocatorHC128V7( int nodeId ) { this.mnNodeId = nodeId; this.mUuidFactory = new TimeOrderedEpochFactory() ; this.log.info( "[GuidAllocatorHC128V7] , firstGuid: {}>", nodeId, this.nextGUID() ); } @Override public GUID nextGUID() { long xorMask = ((long) this.mnNodeId & 0xFFFFFFFFL) << 16; // 32 ~ 48 return this.mUuidFactory.createXorUint64LSB( xorMask ); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/UUID128.java ================================================ package com.pinecone.ulf.util.guid.i128; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.Identification; import java.util.UUID; public class UUID128 implements GUID128 { /** * The most significant bits. */ long mostSigBits; /** * The least significant bits. */ long leastSigBits; public UUID128 () { this( 0, 0 ); } public UUID128( long mostSignificantBits, long leastSignificantBits ) { this.mostSigBits = mostSignificantBits; this.leastSigBits = leastSignificantBits; } public UUID128( String hexId ) { ArchGuidAllocator128.Parser.parse( hexId, this ); } @Override public Identification parse( String hexID ) { ArchGuidAllocator128.Parser.parse( hexID, this ); return this; } @Override public long getMostSignificantBits() { return this.mostSigBits; } @Override public long getLeastSignificantBits() { return this.leastSigBits; } @Override public String toString() { return stringify( this.leastSigBits, this.mostSigBits ); } @Override public String toJSONString() { return "\"" + this.toString() + "\""; } public static String stringify( long leastSigBits, long mostSigBits ) { char[] uuidChars = new char[36]; hexDigits(uuidChars, 0, mostSigBits >>> 32, 8); uuidChars[8] = '-'; hexDigits(uuidChars, 9, mostSigBits >>> 16, 4); uuidChars[13] = '-'; hexDigits(uuidChars, 14, mostSigBits, 4); uuidChars[18] = '-'; hexDigits(uuidChars, 19, leastSigBits >>> 48, 4); uuidChars[23] = '-'; hexDigits(uuidChars, 24, leastSigBits, 12); return new String(uuidChars); } private static final char[] HEX_DIGITS = { '0','1','2','3','4','5','6','7', '8','9','a','b','c','d','e','f' }; private static void hexDigits( char[] dest, int offset, long val, int digits ) { for (int i = offset + digits - 1, shift = 0; i >= offset; i--, shift +=4) { dest[i] = HEX_DIGITS[(int)((val >>> shift) & 0xF)]; } } @Override public UUID toUUID() { return new UUID( this.mostSigBits, this.leastSigBits ); } @Override public int version() { // Version is bits masked by 0x000000000000F000 in MS long return (int)((this.mostSigBits >> 12) & 0x0f); } @Override public int variant() { // This field is composed of a varying number of bits. // 0 - - Reserved for NCS backward compatibility // 1 0 - The IETF aka Leach-Salz variant (used by this class) // 1 1 0 Reserved, Microsoft backward compatibility // 1 1 1 Reserved for future definition. return (int) ((this.leastSigBits >>> (64 - (this.leastSigBits >>> 62))) & (this.leastSigBits >> 63)); } @Override public int clockSequence() { if (version() != 1) { throw new UnsupportedOperationException("Not a time-based GUID"); } return (int)((this.leastSigBits & 0x3FFF000000000000L) >>> 48); } @Override public long node() { if (version() != 1) { throw new UnsupportedOperationException("Not a time-based UUID"); } return this.leastSigBits & 0x0000FFFFFFFFFFFFL; } @Override public byte[] toBytesLE() { byte[] bytes = new byte[16]; // Least significant bits first (little endian) bytes[0] = (byte) (this.leastSigBits); bytes[1] = (byte) (this.leastSigBits >> 8); bytes[2] = (byte) (this.leastSigBits >> 16); bytes[3] = (byte) (this.leastSigBits >> 24); bytes[4] = (byte) (this.leastSigBits >> 32); bytes[5] = (byte) (this.leastSigBits >> 40); bytes[6] = (byte) (this.leastSigBits >> 48); bytes[7] = (byte) (this.leastSigBits >> 56); // Then most significant bits (little endian) bytes[8] = (byte) (this.mostSigBits); bytes[9] = (byte) (this.mostSigBits >> 8); bytes[10] = (byte) (this.mostSigBits >> 16); bytes[11] = (byte) (this.mostSigBits >> 24); bytes[12] = (byte) (this.mostSigBits >> 32); bytes[13] = (byte) (this.mostSigBits >> 40); bytes[14] = (byte) (this.mostSigBits >> 48); bytes[15] = (byte) (this.mostSigBits >> 56); return bytes; } @Override public byte[] toBytesBE() { byte[] bytes = new byte[16]; // Most significant bits first (big endian) bytes[0] = (byte) (this.mostSigBits >> 56); bytes[1] = (byte) (this.mostSigBits >> 48); bytes[2] = (byte) (this.mostSigBits >> 40); bytes[3] = (byte) (this.mostSigBits >> 32); bytes[4] = (byte) (this.mostSigBits >> 24); bytes[5] = (byte) (this.mostSigBits >> 16); bytes[6] = (byte) (this.mostSigBits >> 8); bytes[7] = (byte) (this.mostSigBits); // Then least significant bits (big endian) bytes[8] = (byte) (this.leastSigBits >> 56); bytes[9] = (byte) (this.leastSigBits >> 48); bytes[10] = (byte) (this.leastSigBits >> 40); bytes[11] = (byte) (this.leastSigBits >> 32); bytes[12] = (byte) (this.leastSigBits >> 24); bytes[13] = (byte) (this.leastSigBits >> 16); bytes[14] = (byte) (this.leastSigBits >> 8); bytes[15] = (byte) (this.leastSigBits); return bytes; } @Override public int hashCode() { long hilo = this.mostSigBits ^ this.leastSigBits; return ((int)(hilo >> 32)) ^ (int) hilo; } @Override public long hashCode64() { return this.mostSigBits ^ this.leastSigBits; } @Override public int intVal() { return this.hashCode(); } @Override public long longVal() { return this.hashCode64(); } @Override public boolean equals(Object obj) { if ( !(obj instanceof GUID128) ) { return false; } GUID128 id = (GUID128)obj; return ( this.mostSigBits == id.getMostSignificantBits() && this.leastSigBits == id.getLeastSignificantBits() ); } @Override public int compareTo( Identification that ) { GUID128 val; if ( that instanceof GUID128 ) { val = (GUID128) that; } else { throw new IllegalArgumentException( "Not GUID128" ); } // The ordering is intentionally set up so that the UUIDs // can simply be numerically compared as two numbers return ( this.mostSigBits < val.getMostSignificantBits() ? -1 : ( this.mostSigBits > val.getMostSignificantBits() ? 1 : ( this.leastSigBits < val.getLeastSignificantBits() ? -1 : ( this.leastSigBits > val.getLeastSignificantBits() ? 1 : 0 ) ) ) ); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/UuidCreator.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.codec.StandardBinaryCodec; import com.pinecone.ulf.util.guid.i128.codec.StandardStringCodec; import com.pinecone.ulf.util.guid.i128.enums.UuidLocalDomain; import com.pinecone.ulf.util.guid.i128.enums.UuidNamespace; import com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException; import com.pinecone.ulf.util.guid.i128.factory.UuidFactory; import com.pinecone.ulf.util.guid.i128.factory.UuidFactory.Parameters; import com.pinecone.ulf.util.guid.i128.factory.nonstandard.PrefixCombFactory; import com.pinecone.ulf.util.guid.i128.factory.nonstandard.ShortPrefixCombFactory; import com.pinecone.ulf.util.guid.i128.factory.nonstandard.ShortSuffixCombFactory; import com.pinecone.ulf.util.guid.i128.factory.nonstandard.SuffixCombFactory; import com.pinecone.ulf.util.guid.i128.factory.standard.DceSecurityFactory; import com.pinecone.ulf.util.guid.i128.factory.standard.NameBasedMd5Factory; import com.pinecone.ulf.util.guid.i128.factory.standard.NameBasedSha1Factory; import com.pinecone.ulf.util.guid.i128.factory.standard.RandomBasedFactory; import com.pinecone.ulf.util.guid.i128.factory.standard.TimeBasedFactory; import com.pinecone.ulf.util.guid.i128.factory.standard.TimeOrderedEpochFactory; import com.pinecone.ulf.util.guid.i128.factory.standard.TimeOrderedFactory; import com.pinecone.ulf.util.guid.i128.util.MachineId; import java.time.Instant; import java.util.Objects; import java.util.UUID; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.locks.ReentrantLock; import java.util.function.Supplier; /** * Facade for everything. *

* All UUID types can be generated from this entry point. */ public final class UuidCreator { /** * Name space to be used when the name string is a fully-qualified domain name. */ public static final UuidNamespace NAMESPACE_DNS = UuidNamespace.NAMESPACE_DNS; /** * Name space to be used when the name string is a URL. */ public static final UuidNamespace NAMESPACE_URL = UuidNamespace.NAMESPACE_URL; /** * Name space to be used when the name string is an ISO OID. */ public static final UuidNamespace NAMESPACE_OID = UuidNamespace.NAMESPACE_OID; /** * Name space to be used when the name string is an X.500 DN (DER or text). */ public static final UuidNamespace NAMESPACE_X500 = UuidNamespace.NAMESPACE_X500; /** * The principal domain, interpreted as POSIX UID domain on POSIX systems. */ public static final UuidLocalDomain LOCAL_DOMAIN_PERSON = UuidLocalDomain.LOCAL_DOMAIN_PERSON; /** * The group domain, interpreted as POSIX GID domain on POSIX systems. */ public static final UuidLocalDomain LOCAL_DOMAIN_GROUP = UuidLocalDomain.LOCAL_DOMAIN_GROUP; /** * The organization domain, site-defined. */ public static final UuidLocalDomain LOCAL_DOMAIN_ORG = UuidLocalDomain.LOCAL_DOMAIN_ORG; private static final GUID UUID_NIL = new UUID128(0x0000000000000000L, 0x0000000000000000L); private static final GUID UUID_MAX = new UUID128(0xffffffffffffffffL, 0xffffffffffffffffL); private UuidCreator() { } /** * Returns a Nil UUID. *

* Nil UUID is a special UUID that has all 128 bits set to ZERO. *

* The canonical string of Nil UUID is * 00000000-0000-0000-0000-000000000000. * * @return a Nil UUID */ public static GUID getNil() { return UUID_NIL; } /** * Returns a Max UUID. *

* Max UUID is a special UUID that has all 128 bits set to ONE. *

* The canonical string of Max UUID is * FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF. * * @return a Max UUID * @since 5.0.0 * @see New * UUID Formats */ public static GUID getMax() { return UUID_MAX; } /** * Returns an array of bytes from a UUID. * * @param uuid a UUID * @return an array of bytes * @throws InvalidUuidException if the argument is invalid */ public static byte[] toBytes(final GUID128 uuid) { return StandardBinaryCodec.INSTANCE.encode(uuid); } /** * Returns a UUID from a byte array. *

* It also checks if the input byte array is valid. * * @param uuid a byte array * @return a UUID * @throws InvalidUuidException if the argument is invalid */ public static GUID fromBytes(byte[] uuid) { return StandardBinaryCodec.INSTANCE.decode(uuid); } /** * Returns a string from a UUID. *

* It can be much faster than {@link UUID#toString()} in JDK 8. * * @param uuid a UUID * @return a UUID string * @throws InvalidUuidException if the argument is invalid */ public static String toString(GUID128 uuid) { return StandardStringCodec.INSTANCE.encode(uuid); } /** * Returns a UUID from a string. *

* It accepts strings: *

    *
  • With URN prefix: "urn:uuid:"; *
  • With curly braces: '{' and '}'; *
  • With upper or lower case; *
  • With or without hyphens. *
*

* It can be much faster than {@link UUID#fromString(String)} in JDK 8. *

* It also can be twice as fast as {@link UUID#fromString(String)} in JDK 11. * * @param uuid a UUID string * @return a UUID * @throws InvalidUuidException if the argument is invalid */ public static GUID fromString(String uuid) { return StandardStringCodec.INSTANCE.decode(uuid); } /** * Returns a random-based unique identifier (UUIDv4). * * @return a UUIDv4 * @see RandomBasedFactory */ public static GUID getRandomBased() { return UUID4.create(); } /** * Returns a fast random-based unique identifier (UUIDv4). *

* It employs {@link ThreadLocalRandom} which works very well, although not * cryptographically strong. It can be useful, for example, for logging. *

* Security-sensitive applications that require a cryptographically secure * pseudo-random generator should use {@link UuidCreator#getRandomBased()}. * * @return a UUIDv4 * @see RandomBasedFactory * @see ThreadLocalRandom * @since 5.2.0 */ public static GUID getRandomBasedFast() { return UUID4_FAST.create(); } /** * Returns a time-based unique identifier (UUIDv1). *

* The default node identifier is a random number that is generated once at * initialization. *

* A custom node identifier can be provided by the system property * 'uuidcreator.node' or the environment variable 'UUIDCREATOR_NODE'. * * @return a UUIDv1 * @see TimeBasedFactory */ public static GUID getTimeBased() { return UUID1.create(); } /** * Returns a time-based unique identifier (UUIDv1). *

* The node identifier is a MAC address that is obtained once at initialization. * * @return a UUIDv1 * @see TimeBasedFactory */ public static GUID getTimeBasedWithMac() { return UUID1_MAC.create(); } /** * Returns a time-based unique identifier (UUIDv1). *

* The node identifier is a hash that is calculated once at initialization. *

* The hash input is a string containing host name, MAC and IP. * * @return a UUIDv1 * @see TimeBasedFactory * @see MachineId */ public static GUID getTimeBasedWithHash() { return UUID1_HASH.create(); } /** * Returns a time-based unique identifier (UUIDv1). *

* The node identifier is a random number that is generated with each method * invocation. * * @return a UUIDv1 * @see TimeBasedFactory */ public static GUID getTimeBasedWithRandom() { return UUID1_RANDOM.create(); } /** * Returns a time-based unique identifier (UUIDv1). *

* {@link Instant} accuracy is be limited to 1 millisecond on Linux with JDK 8. * On Windows, its accuracy may be limited to 15.625ms (64hz). *

* The clock sequence is a number between 0 and 16383 (2^14 - 1). If the value * passed as an argument is out of range, the result of MOD 2^14 will be used. *

* The node identifier is a number between 0 and 281474976710655 (2^48 - 1). If * the value passed as an argument is out of range, the result of MOD 2^48 will * be used. *

* Null arguments are ignored. If all arguments are null, this method works just * like method {@link UuidCreator#getTimeBased()}. * * @param instant an alternate instant * @param clockseq an alternate clock sequence between 0 and 2^14-1 * @param nodeid an alternate node identifier between 0 and 2^48-1 * @return a UUIDv1 * @see TimeBasedFactory */ public static GUID getTimeBased(Instant instant, Integer clockseq, Long nodeid) { TimeBasedFactory.Builder builder = TimeBasedFactory.builder(); if (instant != null) { builder.withInstant(instant); } if (clockseq != null) { builder.withClockSeq(clockseq); } if (nodeid != null) { builder.withNodeId(nodeid); } return builder.build().create(); } /** * Returns the minimum UUIDv1 for a given instant. *

* The 60 bits of the timestamp are filled with the bits of the given instant * and the other 62 bits are all set to ZERO. *

* For example, the minimum UUIDv1 for 2022-02-22 22:22:22.222 is * `{@code e7a1c2e0-942d-11ec-8000-000000000000}`, where * `{@code e7a1c2e0-942d-_1ec}` is the timestamp in hexadecimal. *

* It can be useful to find all records before or after a specific timestamp in * a table without a `{@code created_at}` field. * * @param instant a given instant * @return a UUIDv1 */ public static GUID getTimeBasedMin(Instant instant) { Objects.requireNonNull(instant, "Null instant"); TimeBasedFactory.Builder builder = TimeBasedFactory.builder(); return builder.withInstant(instant).withClockSeq(0x0000L).withNodeId(0x000000000000L).build().create(); } /** * Returns the maximum UUIDv1 for a given instant. *

* The 60 bits of the timestamp are filled with the bits of the given instant * and the other 62 bits are all set to ONE. *

* For example, the maximum UUIDv1 for 2022-02-22 22:22:22.222 is * `{@code e7a1c2e0-942d-11ec-bfff-ffffffffffff}`, where * `{@code e7a1c2e0-942d-_1ec}` is the timestamp in hexadecimal. *

* It can be useful to find all records before or after a specific timestamp in * a table without a `{@code created_at}` field. * * @param instant a given instant * @return a UUIDv1 */ public static GUID getTimeBasedMax(Instant instant) { Objects.requireNonNull(instant, "Null instant"); TimeBasedFactory.Builder builder = TimeBasedFactory.builder(); return builder.withInstant(instant).withClockSeq(0xffffL).withNodeId(0xffffffffffffL).build().create(); } /** * Returns a time-ordered unique identifier (UUIDv6). *

* The default node identifier is a random number that is generated once at * initialization. *

* A custom node identifier can be provided by the system property * 'uuidcreator.node' or the environment variable 'UUIDCREATOR_NODE'. * * @return a UUIDv6 * @see TimeOrderedFactory * @see New * UUID Formats */ public static GUID getTimeOrdered() { return UUID6.create(); } /** * Returns a time-ordered unique identifier (UUIDv6). *

* The node identifier is a MAC address that is obtained once at initialization. * * @return a UUIDv6 * @see TimeOrderedFactory * @see New * UUID Formats */ public static GUID getTimeOrderedWithMac() { return UUID6_MAC.create(); } /** * Returns a time-ordered unique identifier (UUIDv6). *

* The node identifier is a hash that is calculated once at initialization. *

* The hash input is a string containing host name, MAC and IP. * * @return a UUIDv6 * @see TimeOrderedFactory * @see MachineId * @see New * UUID Formats */ public static GUID getTimeOrderedWithHash() { return UUID6_HASH.create(); } /** * Returns a time-ordered unique identifier (UUIDv6). *

* The node identifier is a random number that is generated with each method * invocation. * * @return a UUIDv6 * @see TimeOrderedFactory * @see New * UUID Formats */ public static GUID getTimeOrderedWithRandom() { return UUID6_RANDOM.create(); } /** * Returns a time-ordered unique identifier (UUIDv6). *

* {@link Instant} accuracy is be limited to 1 millisecond on Linux with JDK 8. * On Windows, its accuracy may be limited to 15.625ms (64hz). *

* The clock sequence is a number between 0 and 16383 (2^14 - 1). If the value * passed as an argument is out of range, the result of MOD 2^14 will be used. *

* The node identifier is a number between 0 and 281474976710655 (2^48 - 1). If * the value passed as an argument is out of range, the result of MOD 2^48 will * be used. *

* Null arguments are ignored. If all arguments are null, this method works just * like method {@link UuidCreator#getTimeOrdered()}. * * @param instant an alternate instant * @param clockseq an alternate clock sequence between 0 and 2^14-1 * @param nodeid an alternate node identifier between 0 and 2^48-1 * @return a UUIDv6 * @see TimeOrderedFactory * @see New * UUID Formats */ public static GUID getTimeOrdered(Instant instant, Integer clockseq, Long nodeid) { TimeOrderedFactory.Builder builder = TimeOrderedFactory.builder(); if (instant != null) { builder.withInstant(instant); } if (clockseq != null) { builder.withClockSeq(clockseq); } if (nodeid != null) { builder.withNodeId(nodeid); } return builder.build().create(); } /** * Returns the minimum UUIDv6 for a given instant. *

* The 60 bits of the timestamp are filled with the bits of the given instant * and the other 62 bits are all set to ZERO. *

* For example, the minimum UUIDv6 for 2022-02-22 22:22:22.222 is * `{@code 1ec942de-7a1c-62e0-8000-000000000000}`, where * `{@code 1ec942de-7a1c-_2e0}` is the timestamp in hexadecimal. *

* It can be useful to find all records before or after a specific timestamp in * a table without a `{@code created_at}` field. * * @param instant a given instant * @return a UUIDv6 */ public static GUID getTimeOrderedMin(Instant instant) { Objects.requireNonNull(instant, "Null instant"); TimeOrderedFactory.Builder builder = TimeOrderedFactory.builder(); return builder.withInstant(instant).withClockSeq(0x0000L).withNodeId(0x000000000000L).build().create(); } /** * Returns the maximum UUIDv6 for a given instant. *

* The 60 bits of the timestamp are filled with the bits of the given instant * and the other 62 bits are all set to ONE. *

* For example, the maximum UUIDv6 for 2022-02-22 22:22:22.222 is * `{@code 1ec942de-7a1c-62e0-bfff-ffffffffffff}`, where * `{@code 1ec942de-7a1c-_2e0}` is the timestamp in hexadecimal. *

* It can be useful to find all records before or after a specific timestamp in * a table without a `{@code created_at}` field. * * @param instant a given instant * @return a UUIDv6 */ public static GUID getTimeOrderedMax(Instant instant) { Objects.requireNonNull(instant, "Null instant"); TimeOrderedFactory.Builder builder = TimeOrderedFactory.builder(); return builder.withInstant(instant).withClockSeq(0xffffL).withNodeId(0xffffffffffffL).build().create(); } /** * Returns a time-ordered unique identifier that uses Unix Epoch (UUIDv7). *

* This method produces identifiers with 3 parts: time, counter and random. *

* The counter bits are incremented by 1 when the time repeats. *

* The random bits are generated with each method invocation. * * @return a UUIDv7 * @since 5.0.0 * @see TimeOrderedEpochFactory * @see New * UUID Formats */ public static GUID getTimeOrderedEpoch() { return UUID7.create(); } /** * Returns a fast time-ordered unique identifier that uses Unix Epoch (UUIDv7). *

* This method produces identifiers with 3 parts: time, counter and random. *

* The counter bits are incremented by 1 when the time repeats. *

* The random bits are generated with each method invocation. *

* It employs {@link ThreadLocalRandom} which works very well, although not * cryptographically strong. It can be useful, for example, for logging. *

* Security-sensitive applications that require a cryptographically secure * pseudo-random generator should use {@link UuidCreator#getTimeOrderedEpoch()}. * * @return a UUIDv7 * @since 6.0.0 * @see TimeOrderedEpochFactory * @see New * UUID Formats */ public static GUID getTimeOrderedEpochFast() { return UUID7_FAST.create(); } /** * Returns a time-ordered unique identifier that uses Unix Epoch (UUIDv7). *

* This method produces identifiers with 2 parts: time and monotonic random. *

* The monotonic random bits are incremented by 1 when the time repeats. * * @return a UUIDv7 * @since 5.0.0 * @see TimeOrderedEpochFactory * @see New * UUID Formats */ public static GUID getTimeOrderedEpochPlus1() { return UUID7_PLUS_1.create(); } /** * Returns a time-ordered unique identifier that uses Unix Epoch (UUIDv7). *

* This method produces identifiers with 2 parts: time and monotonic random. *

* The monotonic random bits are incremented by a random number between 1 and * 2^32 when the time repeats. * * @return a UUIDv7 * @since 5.0.0 * @see TimeOrderedEpochFactory * @see New * UUID Formats */ public static GUID getTimeOrderedEpochPlusN() { return UUID7_PLUS_N.create(); } /** * Returns a time-ordered unique identifier that uses Unix Epoch (UUIDv7) for a * given instant. *

* This method produces identifiers with 2 parts: time and secure random. *

* The 48 bits of the time component are filled with the bits of the given * instant and the other 74 bits are random. *

* For example, the maximum UUIDv7 for 2022-02-22 22:22:22.222 is * `{@code 017f2387-460e-7012-b345-6789abcdef01}`, where `{@code 017f2387-460e}` * is the timestamp in hexadecimal. *

* The random bits are generated with each method invocation. *

* You can use this method to produce UUIDs with any instant you want, for * example to obfuscate the actual generation instant in a simple way. Example: *

* *

{@code
	 * // Shift the generation instant 1 day ahead of system clock
	 * Instant instant = Instant.now().plus(Duration.ofDays(1));
	 * UUID uuid = UuidCreator.getTimeOrderedEpoch(instant);
	 * }
* * @param instant a given instant * @return a UUIDv7 * @since 5.3.3 */ public static GUID getTimeOrderedEpoch(Instant instant) { return UUID7.create(Parameters.builder().withInstant(instant).build()); } /** * Returns the minimum UUIDv7 for a given instant. *

* The 48 bits of the time component are filled with the bits of the given * instant and the other 74 bits are all set to ZERO. *

* For example, the minimum UUIDv7 for 2022-02-22 22:22:22.222 is * `{@code 017f2387-460e-7000-8000-000000000000}`, where `{@code 017f2387-460e}` * is the timestamp in hexadecimal. *

* It can be useful to find all records before or after a specific timestamp in * a table without a `{@code created_at}` field. * * @param instant a given instant * @return a UUIDv7 */ public static GUID getTimeOrderedEpochMin(Instant instant) { Objects.requireNonNull(instant, "Null instant"); final long time = instant.toEpochMilli(); return new UUID128((time << 16) | 0x7000L, 0x8000000000000000L) { }; } /** * Returns the maximum UUIDv7 for a given instant. *

* The 48 bits of the time component are filled with the bits of the given * instant and the other 74 bits are all set to ONE. *

* For example, the maximum UUIDv7 for 2022-02-22 22:22:22.222 is * `{@code 017f2387-460e-7fff-bfff-ffffffffffff}`, where `{@code 017f2387-460e}` * is the timestamp in hexadecimal. *

* It can be useful to find all records before or after a specific timestamp in * a table without a `{@code created_at}` field. * * @param instant a given instant * @return a UUIDv7 */ public static GUID getTimeOrderedEpochMax(Instant instant) { Objects.requireNonNull(instant, "Null instant"); final long time = instant.toEpochMilli(); return new UUID128((time << 16) | 0x7fffL, 0xbfffffffffffffffL); } /** * Returns a name-based unique identifier that uses MD5 hashing (UUIDv3). *

* The name string is encoded into a sequence of bytes using UTF-8. * * @param name a string * @return a GUIDv3 * @see NameBasedMd5Factory */ public static GUID getNameBasedMd5(String name) { return UUID3.create(Parameters.builder().withName(name).build()); } /** * Returns a name-based unique identifier that uses MD5 hashing (UUIDv3). * * @param name a byte array * @return a UUIDv3 * @see NameBasedMd5Factory */ public static GUID getNameBasedMd5(byte[] name) { return UUID3.create(Parameters.builder().withName(name).build()); } /** * Returns a name-based unique identifier that uses MD5 hashing (UUIDv3). *

* The name string is encoded into a sequence of bytes using UTF-8. * * @param namespace a custom name space UUID * @param name a string * @return a UUIDv3 * @see UuidNamespace * @see NameBasedMd5Factory */ public static GUID getNameBasedMd5(GUID128 namespace, String name) { return UUID3.create(Parameters.builder().withNamespace(namespace).withName(name).build()); } /** * Returns a name-based unique identifier that uses MD5 hashing (UUIDv3). * * @param namespace a custom name space UUID * @param name a byte array * @return a UUIDv3 * @see UuidNamespace * @see NameBasedMd5Factory */ public static GUID getNameBasedMd5(GUID128 namespace, byte[] name) { return UUID3.create(Parameters.builder().withNamespace(namespace).withName(name).build()); } /** * Returns a name-based unique identifier that uses MD5 hashing (UUIDv3). *

* The name string is encoded into a sequence of bytes using UTF-8. * * @param namespace a custom name space UUID in string format * @param name a string * @return a UUIDv3 * @throws InvalidUuidException if namespace is invalid * @see UuidNamespace * @see NameBasedMd5Factory */ public static GUID getNameBasedMd5(String namespace, String name) { return UUID3.create(Parameters.builder().withNamespace(namespace).withName(name).build()); } /** * Returns a name-based unique identifier that uses MD5 hashing (UUIDv3). * * @param namespace a custom name space UUID in string format * @param name a byte array * @return a UUIDv3 * @throws InvalidUuidException if namespace is invalid * @see UuidNamespace * @see NameBasedMd5Factory */ public static GUID getNameBasedMd5(String namespace, byte[] name) { return UUID3.create(Parameters.builder().withNamespace(namespace).withName(name).build()); } /** * Returns a name-based unique identifier that uses MD5 hashing (UUIDv3). *

* The name string is encoded into a sequence of bytes using UTF-8. *

* Name spaces predefined by RFC 9562 (Appendix C): *

    *
  • NAMESPACE_DNS: Name string is a fully-qualified domain name; *
  • NAMESPACE_URL: Name string is a URL; *
  • NAMESPACE_OID: Name string is an ISO OID; *
  • NAMESPACE_X500: Name string is an X.500 DN (in DER or text format). *
* * @param namespace a predefined name space enumeration * @param name a string * @return a UUIDv3 * @see UuidNamespace * @see NameBasedMd5Factory */ public static GUID getNameBasedMd5(UuidNamespace namespace, String name) { return UUID3.create(Parameters.builder().withNamespace(namespace).withName(name).build()); } /** * Returns a name-based unique identifier that uses MD5 hashing (UUIDv3). *

* Name spaces predefined by RFC 9562 (Appendix C): *

    *
  • NAMESPACE_DNS: Name string is a fully-qualified domain name; *
  • NAMESPACE_URL: Name string is a URL; *
  • NAMESPACE_OID: Name string is an ISO OID; *
  • NAMESPACE_X500: Name string is an X.500 DN (in DER or text format). *
* * @param namespace a predefined name space enumeration * @param name a byte array * @return a UUIDv3 * @see UuidNamespace * @see NameBasedMd5Factory */ public static GUID getNameBasedMd5(UuidNamespace namespace, byte[] name) { return UUID3.create(Parameters.builder().withNamespace(namespace).withName(name).build()); } /** * Returns a name-based unique identifier that uses SHA-1 hashing (UUIDv5). *

* The name string is encoded into a sequence of bytes using UTF-8. * * @param name a string * @return a UUIDv5 * @see NameBasedSha1Factory */ public static GUID getNameBasedSha1(String name) { return UUID5.create(Parameters.builder().withName(name).build()); } /** * Returns a name-based unique identifier that uses SHA-1 hashing (UUIDv5). * * @param name a byte array * @return a UUIDv5 * @see NameBasedSha1Factory */ public static GUID getNameBasedSha1(byte[] name) { return UUID5.create(Parameters.builder().withName(name).build()); } /** * Returns a name-based unique identifier that uses SHA-1 hashing (UUIDv5). *

* The name string is encoded into a sequence of bytes using UTF-8. * * @param namespace a custom name space UUID * @param name a string * @return a UUIDv5 * @see UuidNamespace * @see NameBasedSha1Factory */ public static GUID getNameBasedSha1(GUID128 namespace, String name) { return UUID5.create(Parameters.builder().withNamespace(namespace).withName(name).build()); } /** * Returns a name-based unique identifier that uses SHA-1 hashing (UUIDv5). * * @param namespace a custom name space UUID * @param name a byte array * @return a UUIDv5 * @see UuidNamespace * @see NameBasedSha1Factory */ public static GUID getNameBasedSha1(GUID128 namespace, byte[] name) { return UUID5.create(Parameters.builder().withNamespace(namespace).withName(name).build()); } /** * Returns a name-based unique identifier that uses SHA-1 hashing (UUIDv5). *

* The name string is encoded into a sequence of bytes using UTF-8. * * @param namespace a custom name space UUID in string format * @param name a string * @return a UUIDv5 * @throws InvalidUuidException if namespace is invalid * @see UuidNamespace * @see NameBasedSha1Factory */ public static GUID getNameBasedSha1(String namespace, String name) { return UUID5.create(Parameters.builder().withNamespace(namespace).withName(name).build()); } /** * Returns a name-based unique identifier that uses SHA-1 hashing (UUIDv5). * * @param namespace a custom name space UUID in string format * @param name a byte array * @return a UUIDv5 * @throws InvalidUuidException if namespace is invalid * @see UuidNamespace * @see NameBasedSha1Factory */ public static GUID getNameBasedSha1(String namespace, byte[] name) { return UUID5.create(Parameters.builder().withNamespace(namespace).withName(name).build()); } /** * Returns a name-based unique identifier that uses SHA-1 hashing (UUIDv5). *

* The name string is encoded into a sequence of bytes using UTF-8. *

* Name spaces predefined by RFC 9562 (Appendix C): *

    *
  • NAMESPACE_DNS: Name string is a fully-qualified domain name; *
  • NAMESPACE_URL: Name string is a URL; *
  • NAMESPACE_OID: Name string is an ISO OID; *
  • NAMESPACE_X500: Name string is an X.500 DN (in DER or text format). *
* * @param namespace a predefined name space enumeration * @param name a string * @return a UUIDv5 * @see UuidNamespace * @see NameBasedSha1Factory */ public static GUID getNameBasedSha1(UuidNamespace namespace, String name) { return UUID5.create(Parameters.builder().withNamespace(namespace).withName(name).build()); } /** * Returns a name-based unique identifier that uses SHA-1 hashing (UUIDv5). *

* Name spaces predefined by RFC 9562 (Appendix C): *

    *
  • NAMESPACE_DNS: Name string is a fully-qualified domain name; *
  • NAMESPACE_URL: Name string is a URL; *
  • NAMESPACE_OID: Name string is an ISO OID; *
  • NAMESPACE_X500: Name string is an X.500 DN (in DER or text format). *
* * @param namespace a predefined name space enumeration * @param name a byte array * @return a UUIDv5 * @see UuidNamespace * @see NameBasedSha1Factory */ public static GUID getNameBasedSha1(UuidNamespace namespace, byte[] name) { return UUID5.create(Parameters.builder().withNamespace(namespace).withName(name).build()); } /** * Returns a DCE Security unique identifier (UUIDv2). * * @param localDomain a custom local domain byte * @param localIdentifier a local identifier * @return a UUIDv2 * @see UuidLocalDomain * @see DceSecurityFactory */ public static GUID getDceSecurity(byte localDomain, int localIdentifier) { return UUID2 .create(Parameters.builder().withLocalDomain(localDomain).withLocalIdentifier(localIdentifier).build()); } /** * Returns a DCE Security unique identifier (UUIDv2). * * @param localDomain a custom local domain byte * @param localIdentifier a local identifier * @return a UUIDv2 * @see UuidLocalDomain * @see DceSecurityFactory */ public static GUID getDceSecurityWithMac(byte localDomain, int localIdentifier) { return UUID2_MAC .create(Parameters.builder().withLocalDomain(localDomain).withLocalIdentifier(localIdentifier).build()); } /** * Returns a DCE Security unique identifier (UUIDv2). * * @param localDomain a custom local domain byte * @param localIdentifier a local identifier * @return a UUIDv2 * @see UuidLocalDomain * @see DceSecurityFactory */ public static GUID getDceSecurityWithHash(byte localDomain, int localIdentifier) { return UUID2_HASH .create(Parameters.builder().withLocalDomain(localDomain).withLocalIdentifier(localIdentifier).build()); } /** * Returns a DCE Security unique identifier (UUIDv2). * * @param localDomain a custom local domain byte * @param localIdentifier a local identifier * @return a UUIDv2 * @see UuidLocalDomain * @see DceSecurityFactory */ public static GUID getDceSecurityWithRandom(byte localDomain, int localIdentifier) { return UUID2_RANDOM .create(Parameters.builder().withLocalDomain(localDomain).withLocalIdentifier(localIdentifier).build()); } /** * Returns a DCE Security unique identifier (UUIDv2). *

* Local domains predefined by DCE 1.1 Authentication and Security Services * (Chapter 11): *

    *
  • LOCAL_DOMAIN_PERSON: 0 (interpreted as POSIX UID domain); *
  • LOCAL_DOMAIN_GROUP: 1 (interpreted as POSIX GID domain); *
  • LOCAL_DOMAIN_ORG: 2. *
* * @param localDomain a predefined local domain enumeration * @param localIdentifier a local identifier * @return a UUIDv2 * @see UuidLocalDomain * @see DceSecurityFactory */ public static GUID getDceSecurity(UuidLocalDomain localDomain, int localIdentifier) { return UUID2 .create(Parameters.builder().withLocalDomain(localDomain).withLocalIdentifier(localIdentifier).build()); } /** * Returns a DCE Security unique identifier (UUIDv2). *

* Local domains predefined by DCE 1.1 Authentication and Security Services * (Chapter 11): *

    *
  • LOCAL_DOMAIN_PERSON: 0 (interpreted as POSIX UID domain); *
  • LOCAL_DOMAIN_GROUP: 1 (interpreted as POSIX GID domain); *
  • LOCAL_DOMAIN_ORG: 2. *
* * @param localDomain a predefined local domain enumeration * @param localIdentifier a local identifier * @return a UUIDv2 * @see UuidLocalDomain * @see DceSecurityFactory */ public static GUID getDceSecurityWithMac(UuidLocalDomain localDomain, int localIdentifier) { return UUID2_MAC .create(Parameters.builder().withLocalDomain(localDomain).withLocalIdentifier(localIdentifier).build()); } /** * Returns a DCE Security unique identifier (UUIDv2). *

* Local domains predefined by DCE 1.1 Authentication and Security Services * (Chapter 11): *

    *
  • LOCAL_DOMAIN_PERSON: 0 (interpreted as POSIX UID domain); *
  • LOCAL_DOMAIN_GROUP: 1 (interpreted as POSIX GID domain); *
  • LOCAL_DOMAIN_ORG: 2. *
* * @param localDomain a predefined local domain enumeration * @param localIdentifier a local identifier * @return a UUIDv2 * @see UuidLocalDomain * @see DceSecurityFactory */ public static GUID getDceSecurityWithHash(UuidLocalDomain localDomain, int localIdentifier) { return UUID2_HASH .create(Parameters.builder().withLocalDomain(localDomain).withLocalIdentifier(localIdentifier).build()); } /** * Returns a DCE Security unique identifier (UUIDv2). *

* Local domains predefined by DCE 1.1 Authentication and Security Services * (Chapter 11): *

    *
  • LOCAL_DOMAIN_PERSON: 0 (interpreted as POSIX UID domain); *
  • LOCAL_DOMAIN_GROUP: 1 (interpreted as POSIX GID domain); *
  • LOCAL_DOMAIN_ORG: 2. *
* * @param localDomain a predefined local domain enumeration * @param localIdentifier a local identifier * @return a UUIDv2 * @see UuidLocalDomain * @see DceSecurityFactory */ public static GUID getDceSecurityWithRandom(UuidLocalDomain localDomain, int localIdentifier) { return UUID2_RANDOM .create(Parameters.builder().withLocalDomain(localDomain).withLocalIdentifier(localIdentifier).build()); } /** * Returns a Prefix COMB GUID. *

* The creation millisecond is a 6 bytes PREFIX is at the MOST significant bits. * * @return a GUID * @see PrefixCombFactory * @see The Cost * of GUIDs as Primary Keys */ public static GUID getPrefixComb() { return COMB_PREFIX.create(); } /** * Returns the minimum Prefix COMB GUID for a given instant. *

* The 48 bits of the time component are filled with the bits of the given * instant and the other 74 bits are all set to ZERO. *

* For example, the minimum GUID for 2022-02-22 22:22:22.222 is * `{@code 017f2387-460e-4000-8000-000000000000}`, where `{@code 017f2387-460e}` * is the timestamp in hexadecimal. *

* It can be useful to find all records before or after a specific timestamp in * a table without a `{@code created_at}` field. * * @param instant a given instant * @return a GUID */ public static GUID getPrefixCombMin(Instant instant) { Objects.requireNonNull(instant, "Null instant"); final long time = instant.toEpochMilli(); return new UUID128((time << 16) | 0x4000L, 0x8000000000000000L); } /** * Returns the maximum Prefix COMB GUID for a given instant. *

* The 48 bits of the time component are filled with the bits of the given * instant and the other 74 bits are all set to ONE. *

* For example, the maximum GUID for 2022-02-22 22:22:22.222 is * `{@code 017f2387-460e-4fff-bfff-ffffffffffff}`, where `{@code 017f2387-460e}` * is the timestamp in hexadecimal. *

* It can be useful to find all records before or after a specific timestamp in * a table without a `{@code created_at}` field. * * @param instant a given instant * @return a GUID */ public static GUID getPrefixCombMax(Instant instant) { Objects.requireNonNull(instant, "Null instant"); final long time = instant.toEpochMilli(); return new UUID128((time << 16) | 0x4fffL, 0xbfffffffffffffffL); } /** * Returns a Suffix COMB GUID. * * The creation millisecond is a 6 bytes SUFFIX is at the LEAST significant * bits. * * @return a GUID * @see SuffixCombFactory * @see The Cost * of GUIDs as Primary Keys */ public static GUID getSuffixComb() { return COMB_SUFFIX.create(); } /** * Returns the minimum Suffix COMB GUID for a given instant. *

* The 48 bits of the time component are filled with the bits of the given * instant and the other 74 bits are all set to ZERO. *

* For example, the minimum GUID for 2022-02-22 22:22:22.222 is * `{@code 00000000-0000-4000-8000-017f2387460e}`, where `{@code 017f2387460e}` * is the timestamp in hexadecimal. *

* It can be useful to find all records before or after a specific timestamp in * a table without a `{@code created_at}` field. * * @param instant a given instant * @return a GUID */ public static GUID getSuffixCombMin(Instant instant) { Objects.requireNonNull(instant, "Null instant"); final long time = instant.toEpochMilli(); return new UUID128(0x0000000000004000L, 0x8000000000000000L | (time & 0x0000ffffffffffffL)); } /** * Returns the maximum Suffix COMB GUID for a given instant. *

* The 48 bits of the time component are filled with the bits of the given * instant and the other 74 bits are all set to ONE. *

* For example, the maximum GUID for 2022-02-22 22:22:22.222 is * `{@code ffffffff-ffff-4fff-bfff-017f2387460e}`, where `{@code 017f2387460e}` * is the timestamp in hexadecimal. *

* It can be useful to find all records before or after a specific timestamp in * a table without a `{@code created_at}` field. * * @param instant a given instant * @return a GUID */ public static GUID getSuffixCombMax(Instant instant) { Objects.requireNonNull(instant, "Null instant"); final long time = instant.toEpochMilli(); return new UUID128(0xffffffffffff4fffL, 0xbfff000000000000L | (time & 0x0000ffffffffffffL)); } /** * Returns n Short Prefix COMB GUID. *

* The creation minute is a 2 bytes PREFIX is at the MOST significant bits. *

* The prefix wraps around every ~45 days (2^16/60/24 = ~45). * * @return a GUID * @see ShortPrefixCombFactory * @see Sequential * UUID Generators */ public static GUID getShortPrefixComb() { return COMB_SHORT_PREFIX.create(); } /** * Returns a Short Suffix COMB GUID. *

* The creation minute is a 2 bytes SUFFIX is at the LEAST significant bits. *

* The suffix wraps around every ~45 days (2^16/60/24 = ~45). * * @return a GUID * @see ShortSuffixCombFactory * @see Sequential * UUID Generators */ public static GUID getShortSuffixComb() { return COMB_SHORT_SUFFIX.create(); } // *************************************** // Lazy holders // *************************************** private static final Proxy UUID1 = new Proxy(Proxy.UUID1); private static final Proxy UUID1_MAC = new Proxy(Proxy.UUID1_MAC); private static final Proxy UUID1_HASH = new Proxy(Proxy.UUID1_HASH); private static final Proxy UUID1_RANDOM = new Proxy(Proxy.UUID1_RANDOM); private static final Proxy UUID2 = new Proxy(Proxy.UUID2); private static final Proxy UUID2_MAC = new Proxy(Proxy.UUID2_MAC); private static final Proxy UUID2_HASH = new Proxy(Proxy.UUID2_HASH); private static final Proxy UUID2_RANDOM = new Proxy(Proxy.UUID2_RANDOM); private static final Proxy UUID3 = new Proxy(Proxy.UUID3); private static final Proxy UUID4 = new Proxy(Proxy.UUID4); private static final Proxy UUID4_FAST = new Proxy(Proxy.UUID4_FAST); private static final Proxy UUID5 = new Proxy(Proxy.UUID5); private static final Proxy UUID6 = new Proxy(Proxy.UUID6); private static final Proxy UUID6_MAC = new Proxy(Proxy.UUID6_MAC); private static final Proxy UUID6_HASH = new Proxy(Proxy.UUID6_HASH); private static final Proxy UUID6_RANDOM = new Proxy(Proxy.UUID6_RANDOM); private static final Proxy UUID7 = new Proxy(Proxy.UUID7); private static final Proxy UUID7_FAST = new Proxy(Proxy.UUID7_FAST); private static final Proxy UUID7_PLUS_1 = new Proxy(Proxy.UUID7_PLUS_1); private static final Proxy UUID7_PLUS_N = new Proxy(Proxy.UUID7_PLUS_N); private static final Proxy COMB_PREFIX = new Proxy(Proxy.COMB_PREFIX); private static final Proxy COMB_SUFFIX = new Proxy(Proxy.COMB_SUFFIX); private static final Proxy COMB_SHORT_PREFIX = new Proxy(Proxy.COMB_SHORT_PREFIX); private static final Proxy COMB_SHORT_SUFFIX = new Proxy(Proxy.COMB_SHORT_SUFFIX); private static class Proxy extends UuidFactory { private UuidFactory factory = null; private Supplier supplier; private static final ReentrantLock lock = new ReentrantLock(); // @formatter:off static final Supplier UUID1 = TimeBasedFactory::new; static final Supplier UUID1_MAC = () -> TimeBasedFactory.builder().withMacNodeId().build(); static final Supplier UUID1_HASH = () -> TimeBasedFactory.builder().withHashNodeId().build(); static final Supplier UUID1_RANDOM = () -> TimeBasedFactory.builder().withRandomNodeId().build(); static final Supplier UUID2 = DceSecurityFactory::new; static final Supplier UUID2_MAC = () -> DceSecurityFactory.builder().withMacNodeId().build(); static final Supplier UUID2_HASH = () -> DceSecurityFactory.builder().withHashNodeId().build(); static final Supplier UUID2_RANDOM = () -> DceSecurityFactory.builder().withRandomNodeId().build(); static final Supplier UUID3 = NameBasedMd5Factory::new; static final Supplier UUID4 = RandomBasedFactory::new; static final Supplier UUID4_FAST = () -> RandomBasedFactory.builder().withFastRandom().build(); static final Supplier UUID5 = NameBasedSha1Factory::new; static final Supplier UUID6 = TimeOrderedFactory::new; static final Supplier UUID6_MAC = () -> TimeOrderedFactory.builder().withMacNodeId().build(); static final Supplier UUID6_HASH = () -> TimeOrderedFactory.builder().withHashNodeId().build(); static final Supplier UUID6_RANDOM = () -> TimeOrderedFactory.builder().withRandomNodeId().build(); static final Supplier UUID7 = TimeOrderedEpochFactory::new; static final Supplier UUID7_FAST = () -> TimeOrderedEpochFactory.builder().withFastRandom().build(); static final Supplier UUID7_PLUS_1 = () -> TimeOrderedEpochFactory.builder().withIncrementPlus1().build(); static final Supplier UUID7_PLUS_N = () -> TimeOrderedEpochFactory.builder().withIncrementPlusN().build(); static final Supplier COMB_PREFIX = PrefixCombFactory::new; static final Supplier COMB_SUFFIX = SuffixCombFactory::new; static final Supplier COMB_SHORT_PREFIX = ShortPrefixCombFactory::new; static final Supplier COMB_SHORT_SUFFIX = ShortSuffixCombFactory::new; // @formatter:on public Proxy(Supplier supplier) { this.supplier = supplier; } private UuidFactory get() { if (factory != null) { return factory; } lock.lock(); try { if (factory == null) { this.factory = supplier.get(); } return this.factory; } finally { lock.unlock(); } } @Override public GUID128 create() { return this.get().create(); } @Override public GUID128 create(Parameters parameters) { return get().create(parameters); } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/GuidCodec.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException; import java.util.UUID; /** * Interface to be implemented by all codecs of this package. *

* All implementations of this interface throw {@link InvalidUuidException} if * an invalid argument argument is given. *

* The {@link RuntimeException} cases that can be detected beforehand are * translated into an {@link InvalidUuidException}. * * @param the type encoded to and decoded from. * @see InvalidUuidException */ public interface GuidCodec { /** * Get a generic type from a UUID. * * @param uuid a UUID * @return a generic type * @throws InvalidUuidException if the argument is invalid */ T encode(GUID128 uuid); /** * Get a UUID from a generic type. * * @param type a generic type * @return a UUID * @throws InvalidUuidException if the argument is invalid */ GUID128 decode(T type); } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/StandardBinaryCodec.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.UUID128; import com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException; import com.pinecone.ulf.util.guid.i128.util.UuidValidator; import java.util.UUID; /** * Codec for UUID binary encoding as defined in RFC 9562. *

* The UUID is encoded as 16 octets (bytes). * * @see RFC 9562 */ public class StandardBinaryCodec implements GuidCodec { /** * A shared immutable instance. */ public static final StandardBinaryCodec INSTANCE = new StandardBinaryCodec(); /** * Get an array of bytes from a UUID. * * @param uuid a UUID * @return an array of bytes * @throws InvalidUuidException if the argument is invalid */ @Override public byte[] encode(final GUID128 uuid) { UuidValidator.validate(uuid); final byte[] bytes = new byte[16]; final long msb = uuid.getMostSignificantBits(); final long lsb = uuid.getLeastSignificantBits(); bytes[0x0] = (byte) (msb >>> 56); bytes[0x1] = (byte) (msb >>> 48); bytes[0x2] = (byte) (msb >>> 40); bytes[0x3] = (byte) (msb >>> 32); bytes[0x4] = (byte) (msb >>> 24); bytes[0x5] = (byte) (msb >>> 16); bytes[0x6] = (byte) (msb >>> 8); bytes[0x7] = (byte) (msb); bytes[0x8] = (byte) (lsb >>> 56); bytes[0x9] = (byte) (lsb >>> 48); bytes[0xa] = (byte) (lsb >>> 40); bytes[0xb] = (byte) (lsb >>> 32); bytes[0xc] = (byte) (lsb >>> 24); bytes[0xd] = (byte) (lsb >>> 16); bytes[0xe] = (byte) (lsb >>> 8); bytes[0xf] = (byte) (lsb); return bytes; } /** * Get a UUID from an array of bytes. * * @param bytes an array of bytes * @return a UUID * @throws InvalidUuidException if the argument is invalid */ @Override public GUID128 decode(final byte[] bytes) { UuidValidator.validate(bytes); long msb = 0; long lsb = 0; msb |= (bytes[0x0] & 0xffL) << 56; msb |= (bytes[0x1] & 0xffL) << 48; msb |= (bytes[0x2] & 0xffL) << 40; msb |= (bytes[0x3] & 0xffL) << 32; msb |= (bytes[0x4] & 0xffL) << 24; msb |= (bytes[0x5] & 0xffL) << 16; msb |= (bytes[0x6] & 0xffL) << 8; msb |= (bytes[0x7] & 0xffL); lsb |= (bytes[0x8] & 0xffL) << 56; lsb |= (bytes[0x9] & 0xffL) << 48; lsb |= (bytes[0xa] & 0xffL) << 40; lsb |= (bytes[0xb] & 0xffL) << 32; lsb |= (bytes[0xc] & 0xffL) << 24; lsb |= (bytes[0xd] & 0xffL) << 16; lsb |= (bytes[0xe] & 0xffL) << 8; lsb |= (bytes[0xf] & 0xffL); return new UUID128(msb, lsb); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/StandardStringCodec.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.UUID128; import com.pinecone.ulf.util.guid.i128.codec.base.Base16Codec; import com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException; import com.pinecone.ulf.util.guid.i128.util.UuidValidator; import com.pinecone.ulf.util.guid.i128.util.immutable.CharArray; import com.pinecone.ulf.util.guid.i128.util.internal.JavaVersionUtil; import java.util.UUID; /** * Codec for UUID canonical string as defined in RFC 9562. *

* In the canonical textual representation, the 16 bytes of a UUID are * represented as 32 hexadecimal (base-16) digits, displayed in five groups * separated by hyphens, in the form 8-4-4-4-12 for a total of 36 characters (32 * hexadecimal characters and 4 hyphens). *

* This codec decodes (parses) strings in these formats: *

    *
  • 000000000000V0000000000000000000 (hexadecimal string) *
  • 00000000-0000-0000-0000-000000000000 (THE canonical string) *
  • {00000000-0000-0000-0000-000000000000} (Microsoft string) *
  • urn:uuid:00000000-0000-0000-0000-000000000000 (URN string) *
*

* The encoding and decoding processes can be much faster (7x) than * {@link UUID#toString()} and {@link UUID#fromString(String)} in JDK 8. *

* If you prefer a string representation without hyphens, use * {@link Base16Codec} instead of {@link StandardStringCodec}. * {@link Base16Codec} can be much faster (22x) than doing * uuid.toString().replaceAll("-", ""). *

* * @see RFC 9562 */ public class StandardStringCodec implements GuidCodec { /** * A shared immutable instance. */ public static final StandardStringCodec INSTANCE = new StandardStringCodec(); private static final int DASH_POSITION_1 = 8; private static final int DASH_POSITION_2 = 13; private static final int DASH_POSITION_3 = 18; private static final int DASH_POSITION_4 = 23; private static final int LENGTH_WITH_DASH = 36; private static final int LENGTH_WITHOUT_DASH = 32; private static final int LENGTH_WITH_URN_PREFIX = 45; private static final int LENGTH_WITH_CURLY_BRACES = 38; private static final byte[] MAP = Base16Codec.INSTANCE.getBase().getMap().array(); private static final CharArray ALPHABET = Base16Codec.INSTANCE.getBase().getAlphabet(); private static final String URN_PREFIX = "urn:uuid:"; private static final boolean JAVA_VERSION_GREATER_THAN_8 = JavaVersionUtil.getJavaVersion() > 8; /** * Get a string from a UUID. *

* It can be much faster than {@link UUID#toString()} in JDK 8. * * @param uuid a UUID * @return a UUID string * @throws InvalidUuidException if the argument is invalid */ @Override public String encode(GUID128 uuid) { UuidValidator.validate(uuid); if (JAVA_VERSION_GREATER_THAN_8) { return uuid.toString(); } final char[] chars = new char[36]; final long msb = uuid.getMostSignificantBits(); final long lsb = uuid.getLeastSignificantBits(); chars[0x00] = ALPHABET.get((int) (msb >>> 0x3c & 0xf)); chars[0x01] = ALPHABET.get((int) (msb >>> 0x38 & 0xf)); chars[0x02] = ALPHABET.get((int) (msb >>> 0x34 & 0xf)); chars[0x03] = ALPHABET.get((int) (msb >>> 0x30 & 0xf)); chars[0x04] = ALPHABET.get((int) (msb >>> 0x2c & 0xf)); chars[0x05] = ALPHABET.get((int) (msb >>> 0x28 & 0xf)); chars[0x06] = ALPHABET.get((int) (msb >>> 0x24 & 0xf)); chars[0x07] = ALPHABET.get((int) (msb >>> 0x20 & 0xf)); chars[0x08] = '-'; // 8 chars[0x09] = ALPHABET.get((int) (msb >>> 0x1c & 0xf)); chars[0x0a] = ALPHABET.get((int) (msb >>> 0x18 & 0xf)); chars[0x0b] = ALPHABET.get((int) (msb >>> 0x14 & 0xf)); chars[0x0c] = ALPHABET.get((int) (msb >>> 0x10 & 0xf)); chars[0x0d] = '-'; // 13 chars[0x0e] = ALPHABET.get((int) (msb >>> 0x0c & 0xf)); chars[0x0f] = ALPHABET.get((int) (msb >>> 0x08 & 0xf)); chars[0x10] = ALPHABET.get((int) (msb >>> 0x04 & 0xf)); chars[0x11] = ALPHABET.get((int) (msb & 0xf)); chars[0x12] = '-'; // 18 chars[0x13] = ALPHABET.get((int) (lsb >>> 0x3c & 0xf)); chars[0x14] = ALPHABET.get((int) (lsb >>> 0x38 & 0xf)); chars[0x15] = ALPHABET.get((int) (lsb >>> 0x34 & 0xf)); chars[0x16] = ALPHABET.get((int) (lsb >>> 0x30 & 0xf)); chars[0x17] = '-'; // 23 chars[0x18] = ALPHABET.get((int) (lsb >>> 0x2c & 0xf)); chars[0x19] = ALPHABET.get((int) (lsb >>> 0x28 & 0xf)); chars[0x1a] = ALPHABET.get((int) (lsb >>> 0x24 & 0xf)); chars[0x1b] = ALPHABET.get((int) (lsb >>> 0x20 & 0xf)); chars[0x1c] = ALPHABET.get((int) (lsb >>> 0x1c & 0xf)); chars[0x1d] = ALPHABET.get((int) (lsb >>> 0x18 & 0xf)); chars[0x1e] = ALPHABET.get((int) (lsb >>> 0x14 & 0xf)); chars[0x1f] = ALPHABET.get((int) (lsb >>> 0x10 & 0xf)); chars[0x20] = ALPHABET.get((int) (lsb >>> 0x0c & 0xf)); chars[0x21] = ALPHABET.get((int) (lsb >>> 0x08 & 0xf)); chars[0x22] = ALPHABET.get((int) (lsb >>> 0x04 & 0xf)); chars[0x23] = ALPHABET.get((int) (lsb & 0xf)); return new String(chars); } /** * Get a UUID from a string. *

* It accepts strings: *

    *
  • With URN prefix: "urn:uuid:"; *
  • With curly braces: '{' and '}'; *
  • With upper or lower case; *
  • With or without hyphens. *
*

* It can be much faster than {@link UUID#fromString(String)} in JDK 8. *

* It also can be twice as fast as {@link UUID#fromString(String)} in JDK 11. * * @param string a UUID string * @return a UUID * @throws InvalidUuidException if the argument is invalid */ @Override public GUID128 decode(final String string) { if (string == null) { throw InvalidUuidException.newInstance(null); } final String modified = modify(string); if (modified.length() == LENGTH_WITH_DASH) { validate(modified); return parse(modified); } if (modified.length() == LENGTH_WITHOUT_DASH) { return Base16Codec.INSTANCE.decode(modified); } throw InvalidUuidException.newInstance(modified); } private GUID128 parse(final String string) { long msb = 0L; long lsb = 0L; for (int i = 0; i < 8; i++) { msb = (msb << 4) | get(string, i); } for (int i = 9; i < 13; i++) { msb = (msb << 4) | get(string, i); } for (int i = 14; i < 18; i++) { msb = (msb << 4) | get(string, i); } for (int i = 19; i < 23; i++) { lsb = (lsb << 4) | get(string, i); } for (int i = 24; i < 36; i++) { lsb = (lsb << 4) | get(string, i); } return new UUID128(msb, lsb); } protected static String modify(final String string) { // UUID URN format: "urn:uuid:00000000-0000-0000-0000-000000000000" if (string.length() == LENGTH_WITH_URN_PREFIX && string.startsWith(URN_PREFIX)) { return string.substring(URN_PREFIX.length()); // Remove the URN prefix: "urn:uuid:" } // Curly braces format: "{00000000-0000-0000-0000-000000000000}" if (string.length() == LENGTH_WITH_CURLY_BRACES && string.startsWith("{") && string.endsWith("}")) { return string.substring(1, string.length() - 1); // Remove curly braces: '{' and '}' } return string; } private static void validate(final String string) { if (string.charAt(DASH_POSITION_1) != '-' || string.charAt(DASH_POSITION_2) != '-' || string.charAt(DASH_POSITION_3) != '-' || string.charAt(DASH_POSITION_4) != '-') { throw InvalidUuidException.newInstance(string); } } private long get(final String string, final int i) { final int chr = string.charAt(i); if (chr > 255) { throw InvalidUuidException.newInstance(string); } final byte value = MAP[chr]; if (value < 0) { throw InvalidUuidException.newInstance(string); } return value & 0xffL; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/UriCodec.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException; import java.net.URI; import java.util.UUID; /** * Codec for UUID URIs (specifically URNs). *

* {@link UriCodec} encodes UUID to and from an opaque {@link URI}. *

* The URN representation adds the prefix 'urn:uuid:' to a UUID canonical * representation. * * See: https://github.com/f4b6a3/uuid-creator/issues/32 * * * * @see InvalidUuidException * @see UUID URIs * @see UriCodec.isUuidUrn(java.net.URI * uri) * @see What * is the difference between URI, URL and URN? */ public class UriCodec implements GuidCodec { /** * A shared immutable instance. */ public static final UriCodec INSTANCE = new UriCodec(); /** * Get a URI from a UUID. * * @param uuid a UUID * @return a URI * @throws InvalidUuidException if the argument is invalid */ @Override public URI encode(GUID128 uuid) { return URI.create(UrnCodec.INSTANCE.encode(uuid)); } /** * Get a UUID from a URI. * * @param uri a URI * @return a UUID * @throws InvalidUuidException if the argument is invalid */ @Override public GUID128 decode(URI uri) { if (!isUuidUri(uri)) { throw InvalidUuidException.newInstance(uri); } return StandardStringCodec.INSTANCE.decode(uri.toString()); } /** * Check if the URI is a UUID URN. * * @param uri a URI * @return true if the it's a URN */ public static boolean isUuidUri(URI uri) { return uri != null && UrnCodec.isUuidUrn(uri.toString()); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/UrnCodec.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException; import com.pinecone.ulf.util.guid.i128.util.UuidValidator; import java.util.UUID; /** * Codec for UUID URNs. *

* {@link UrnCodec} encodes UUID to and from an URN. *

* The URN representation adds the prefix 'urn:uuid:' to a UUID canonical * representation. * * @see InvalidUuidException * @see UUID * URIs * @see UriCodec.isUuidUrn(java.net.URI * uri) * @see What * is the difference between URI, URL and URN? */ public class UrnCodec implements GuidCodec { /** * A shared immutable instance. */ public static final UrnCodec INSTANCE = new UrnCodec(); private static final String URN_PREFIX = "urn:uuid:"; /** * Get a URN string from a UUID. * * @param uuid a UUID * @return a URN string * @throws InvalidUuidException if the argument is invalid */ @Override public String encode(GUID128 uuid) { UuidValidator.validate(uuid); return URN_PREFIX + StandardStringCodec.INSTANCE.encode(uuid); } /** * Get a UUID from a URN string. * * @param urn a URN string * @return a UUID * @throws InvalidUuidException if the argument is invalid */ @Override public GUID128 decode(String urn) { if (!isUuidUrn(urn)) { throw InvalidUuidException.newInstance(urn); } return StandardStringCodec.INSTANCE.decode(urn); } /** * Check if a URN string is a UUID URN. * * @param urn a string * @return true if the it's a URN */ public static boolean isUuidUrn(String urn) { final int stringLength = 45; // URN string length final int prefixLength = 9; // URN prefix length if (urn != null && urn.length() == stringLength) { String uuid = urn.substring(prefixLength); return UuidValidator.isValid(uuid); } return false; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/Base16Codec.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.base; import com.pinecone.ulf.util.guid.i128.codec.base.function.Base16Decoder; import com.pinecone.ulf.util.guid.i128.codec.base.function.Base16Encoder; /** * Codec for base-16 as defined in RFC-4648. *

* It is case insensitive, so it decodes from lower and upper case, but encodes * to lower case only. *

* It can be up to 22x faster than doing * uuid.toString().replaceAll("-", "")`. * * @see RFC-4648 */ public final class Base16Codec extends BaseNCodec { private static final BaseN BASE_N = new BaseN("0-9a-f"); /** * A shared immutable instance. */ public static final Base16Codec INSTANCE = new Base16Codec(); /** * Default constructor. */ public Base16Codec() { super(BASE_N, new Base16Encoder(BASE_N), new Base16Decoder(BASE_N)); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/Base32Codec.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.base; import com.pinecone.ulf.util.guid.i128.codec.base.function.Base32Decoder; import com.pinecone.ulf.util.guid.i128.codec.base.function.Base32Encoder; /** * Codec for base-32 as defined in RFC-4648. *

* It is case insensitive, so it decodes from lower and upper case, but encodes * to lower case only. *

* This codec complies with RFC-4648, encoding a byte array sequentially. If you * need a codec that encodes integers using the remainder operator (modulus), * use the static factory {@link BaseNCodec#newInstance(BaseN)}. * * @see RFC-4648 */ public final class Base32Codec extends BaseNCodec { private static final BaseN BASE_N = new BaseN("a-z2-7"); /** * A shared immutable instance. */ public static final Base32Codec INSTANCE = new Base32Codec(); /** * Default constructor. */ public Base32Codec() { super(BASE_N, new Base32Encoder(BASE_N), new Base32Decoder(BASE_N)); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/Base58BtcCodec.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.base; /** * Codec for base-58. *

* It is case SENSITIVE. *

* It encodes using remainder operator (modulus). *

* The alphabet for this codec is the same used in Bitcoin (BTC). * * @see The Base58 Encoding Scheme */ public final class Base58BtcCodec extends BaseNCodec { private static final BaseN BASE_N = new BaseN("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"); /** * A shared immutable instance. */ public static final Base58BtcCodec INSTANCE = new Base58BtcCodec(); /** * Default constructor. */ public Base58BtcCodec() { super(BASE_N); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/Base62Codec.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.base; /** * Codec for base-62. *

* It is case SENSITIVE. *

* It encodes using remainder operator (modulus). */ public final class Base62Codec extends BaseNCodec { private static final BaseN BASE_N = new BaseN("0-9A-Za-z"); /** * A shared immutable instance. */ public static final Base62Codec INSTANCE = new Base62Codec(); /** * Default constructor. */ public Base62Codec() { super(BASE_N); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/Base64Codec.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.base; import com.pinecone.ulf.util.guid.i128.codec.base.function.Base64Decoder; import com.pinecone.ulf.util.guid.i128.codec.base.function.Base64Encoder; /** * Codec for base-64 as defined in RFC-4648. *

* It is case SENSITIVE. *

* The only difference between base-64 and base-64-url is that the second * substitutes the chars '+' and '/' with '-' and '_'. *

* This codec complies with RFC-4648, encoding a byte array sequentially. If you * need a codec that encodes integers using the remainder operator (modulus), * use the static factory {@link BaseNCodec#newInstance(BaseN)}. * * @see RFC-4648 */ public final class Base64Codec extends BaseNCodec { private static final BaseN BASE_N = new BaseN("A-Za-z0-9+/"); /** * A shared immutable instance. */ public static final Base64Codec INSTANCE = new Base64Codec(); /** * Default constructor. */ public Base64Codec() { super(BASE_N, new Base64Encoder(BASE_N), new Base64Decoder(BASE_N)); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/Base64UrlCodec.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.base; import com.pinecone.ulf.util.guid.i128.codec.base.function.Base64Decoder; import com.pinecone.ulf.util.guid.i128.codec.base.function.Base64Encoder; /** * Codec for base-64-url as defined in RFC-4648. *

* It is case SENSITIVE. *

* The only difference between base-64 and base-64-url is that the second * substitutes the chars '+' and '/' with '-' and '_'. *

* This codec complies with RFC-4648, encoding a byte array sequentially. If you * need a codec that encodes integers using the remainder operator (modulus), * use the static factory {@link BaseNCodec#newInstance(BaseN)}. * * @see RFC-4648 */ public final class Base64UrlCodec extends BaseNCodec { private static final BaseN BASE_N = new BaseN("A-Za-z0-9-_"); /** * A shared immutable instance. */ public static final Base64UrlCodec INSTANCE = new Base64UrlCodec(); /** * Default constructor. */ public Base64UrlCodec() { super(BASE_N, new Base64Encoder(BASE_N), new Base64Decoder(BASE_N)); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/BaseN.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.base; import com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException; import com.pinecone.ulf.util.guid.i128.util.immutable.ByteArray; import com.pinecone.ulf.util.guid.i128.util.immutable.CharArray; import java.util.Arrays; /** * Class that represents the base-n encodings. */ public final class BaseN { private final int radix; private final int length; private final char padding; private final boolean sensitive; private final CharArray alphabet; private final ByteArray map; /** * The minimum radix: 2. */ protected static final int RADIX_MIN = 2; /** * The maximum radix: 64. */ protected static final int RADIX_MAX = 64; /** * The default alphabet for case-insensitive base-n. */ protected static final String ALPHABET_36 = "0123456789abcdefghijklmnopqrstuvwxyz"; /** * The default alphabet for case-sensitive base-n. */ protected static final String ALPHABET_64 = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-_"; private static final int UUID_BITS = 128; /** * Public constructor for the base-n object. *

* The radix is the alphabet size. *

* The supported alphabet sizes are from 2 to 64. *

* If there are mixed cases in the alphabet, the base-n is case SENSITIVE. *

* The encoded string length is equal to `CEIL(128 / LOG2(n))`, where n is the * radix. The encoded string is padded to fit the expected length. *

* The padding character is the first character of the string. For example, the * padding character for the alphabet "abcdef0123456" is 'a'. *

* The example below shows how to create a {@link BaseN} for an hypothetical * base-26 encoding that contains only letters. You only need to pass a number * 40. * *

{@code
	 * String radix = 40;
	 * BaseN base = new BaseN(radix);
	 * }
* *

* If radix is greater than 36, the alphabet generated is a subset of the * character sequence "0-9A-Za-z-_". Otherwise it is a subset of "0-9a-z". In * the example above the resulting alphabet is * "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcd" (0-9A-Za-d). * * @param radix the radix to be used */ public BaseN(int radix) { this(expand(radix)); } /** * Public constructor for the base-n object. *

* The radix is the alphabet size. *

* The supported alphabet sizes are from 2 to 64. *

* If there are mixed cases in the alphabet, the base-n is case SENSITIVE. *

* The encoded string length is equal to `CEIL(128 / LOG2(n))`, where n is the * radix. The encoded string is padded to fit the expected length. *

* The padding character is the first character of the string. For example, the * padding character for the alphabet "abcdef0123456" is 'a'. *

* The example below shows how to create a {@link BaseN} for an hypothetical * base-26 encoding that contains only letters. You only need to pass a string * with 26 characters. * *

{@code
	 * String alphabet = "abcdefghijklmnopqrstuvwxyz";
	 * BaseN base = new BaseN(alphabet);
	 * }
* * Alphabet strings similar to "a-f0-9" are expanded to "abcdef0123456789". The * same example using the string "a-z" instead of "abcdefghijklmnopqrstuvwxyz": * *
{@code
	 * String alphabet = "a-z";
	 * BaseN base = new BaseN(alphabet);
	 * }
* * @param alphabet the alphabet to be used */ public BaseN(String alphabet) { // expand the alphabet, if necessary String charset = alphabet.indexOf('-') >= 0 ? expand(alphabet) : alphabet; // check the alphabet length if (charset.length() < RADIX_MIN || charset.length() > RADIX_MAX) { throw new IllegalArgumentException("Unsupported length: " + charset.length()); } // set the radix field this.radix = charset.length(); // set the length field this.length = (int) Math.ceil(UUID_BITS / (Math.log(this.radix) / Math.log(2))); // set the padding field this.padding = charset.charAt(0); // set the sensitive field this.sensitive = sensitive(charset); // set the alphabet field this.alphabet = CharArray.from(charset.toCharArray()); // set the map field this.map = map(charset, sensitive); } /** * Returns the radix of the base-n. * * @return the radix */ public int getRadix() { return radix; } /** * Returns the length of encoded UUIDs. * * @return the length */ public int getLength() { return length; } /** * Return the padding character. * * @return a character */ public char getPadding() { return padding; } /** * Informs if the base-n is case-sensitive. * * @return true if it is case-sensitive */ public boolean isSensitive() { return sensitive; } /** * Returns the alphabet of the base-n. * * @return the alphabet */ public CharArray getAlphabet() { return this.alphabet; } /** * Returns the map of the base-n. * * @return a map */ public ByteArray getMap() { return this.map; } /** * Checks if the UUID string is valid. * * @param uuid a UUID string * @return true if valid, false if invalid */ public boolean isValid(String uuid) { if (uuid == null || uuid.length() != this.length) { return false; } for (int i = 0; i < this.length; i++) { if (this.map.get(uuid.charAt(i)) == -1) { return false; } } return true; } /** * Checks if the UUID string is a valid. * * @param uuid a UUID string * @throws InvalidUuidException if the argument is invalid */ public void validate(String uuid) { if (!isValid(uuid)) { throw InvalidUuidException.newInstance(uuid); } } private static boolean sensitive(String charset) { String lowercase = charset.toLowerCase(); String uppercase = charset.toUpperCase(); return !(charset.equals(lowercase) || charset.equals(uppercase)); } private static ByteArray map(String alphabet, boolean sensitive) { // initialize the map with -1 byte[] mapping = new byte[256]; Arrays.fill(mapping, (byte) -1); // map the alphabets chars to values for (int i = 0; i < alphabet.length(); i++) { if (sensitive) { mapping[alphabet.charAt(i)] = (byte) i; } else { mapping[alphabet.toLowerCase().charAt(i)] = (byte) i; mapping[alphabet.toUpperCase().charAt(i)] = (byte) i; } } return ByteArray.from(mapping); } private static String expand(int radix) { if (radix < RADIX_MIN || radix > RADIX_MAX) { throw new IllegalArgumentException("Unsupported radix: " + radix); } if (radix > 36) { return ALPHABET_64.substring(0, radix); // 0-9A-Za-z-_ } return ALPHABET_36.substring(0, radix); // 0-9a-z } /** * Expands character sequences similar to 0-9, a-z and A-Z. * * @param string a string to be expanded * @return a string */ protected static String expand(String string) { StringBuilder buffer = new StringBuilder(); int i = 1; while (i <= string.length()) { final char a = string.charAt(i - 1); // previous char if ((i < string.length() - 1) && (string.charAt(i) == '-')) { final char b = string.charAt(i + 1); // next char char[] expanded = expand(a, b); if (expanded.length != 0) { i += 2; // skip buffer.append(expanded); } else { buffer.append(a); } } else { buffer.append(a); } i++; } return buffer.toString(); } /** * Expands a character sequence similar to 0-9, a-z and A-Z. * * @param a the first character of the sequence * @param b the last character of the sequence * @return an expanded sequence of characters */ protected static char[] expand(char a, char b) { char[] expanded = expand(a, b, '0', '9'); // digits (0-9) if (expanded.length == 0) { expanded = expand(a, b, 'a', 'z'); // lower case letters (a-z) } if (expanded.length == 0) { expanded = expand(a, b, 'A', 'Z'); // upper case letters (A-Z) } return expanded; } private static char[] expand(char a, char b, char min, char max) { if (!isValidRange(a, b, min, max)) { return new char[0]; } return fillRange(a, b); } private static boolean isValidRange(char start, char end, char min, char max) { return start <= end && start >= min && end <= max; } private static char[] fillRange(char start, char end) { char[] buffer = new char[(end - start) + 1]; for (int i = 0; i < buffer.length; i++) { buffer[i] = (char) (start + i); } return buffer; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/BaseNCodec.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.base; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.codec.GuidCodec; import com.pinecone.ulf.util.guid.i128.codec.base.function.BaseNDecoder; import com.pinecone.ulf.util.guid.i128.codec.base.function.BaseNEncoder; import com.pinecone.ulf.util.guid.i128.codec.base.function.BaseNRemainderDecoder; import com.pinecone.ulf.util.guid.i128.codec.base.function.BaseNRemainderEncoder; import com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException; import com.pinecone.ulf.util.guid.i128.util.UuidValidator; import java.util.UUID; import java.util.function.Function; /** * Abstract class that contains the basic functionality for base-n codecs of * this package. */ public abstract class BaseNCodec implements GuidCodec { /** * The base-n. */ protected final BaseN base; /** * An encoder function. */ protected final Function encoder; /** * A decoder function. */ protected final Function decoder; /** * A division function that returns quotient and remainder. *

* It MUST perform SIGNED long division. *

* Example: * *

{@code
	 * 
	 * CustomDivider divideBy64 = x -> new long[] { x / 64, x % 64 };
	 * 
	 * long[] answer = divideBy64(1024);
	 * 
	 * }
*/ @FunctionalInterface public interface CustomDivider { /** * Divide a number by x. * * Returned pair of longs: [x / divider, x % divider] * * @param x the divisor * @return a pair of longs */ long[] divide(long x); } /** * @param base an object that represents the base-n encoding */ protected BaseNCodec(BaseN base) { this(base, null); } /** * @param base an object that represents the base-n encoding * @param divider a division function that returns quotient and remainder */ protected BaseNCodec(BaseN base, CustomDivider divider) { this(base, new BaseNRemainderEncoder(base, divider), new BaseNRemainderDecoder(base)); } /** * @param base an object that represents the base-n encoding * @param encoder a functional encoder * @param decoder a functional decoder */ protected BaseNCodec(BaseN base, BaseNEncoder encoder, BaseNDecoder decoder) { this.base = base; this.encoder = encoder; this.decoder = decoder; } /** * Static factory that returns a new instance of {@link BaseNCodec} using the * specified {@link BaseN}. *

* This method can be used if none of the existing concrete codecs of this * package class is desired. *

* The {@link BaseNCodec} objects provided by this method encode UUIDs using * remainder operation (modulus), a common approach to encode integers. *

* If you need a {@link BaseN} that is not available in this package, use the * static factories {@link BaseNCodec#newInstance(String)} or * {@link BaseNCodec#newInstance(int)}. * * @param base an object that represents the base-n encoding * @return a {@link BaseNCodec} */ public static BaseNCodec newInstance(BaseN base) { return newInstance(base, null); } /** * Static factory that returns a new instance of {@link BaseNCodec} using the * specified radix. *

* This method can be used if none of the existing concrete codecs of this * package class is desired. *

* The {@link BaseNCodec} objects provided by this method encode UUIDs using * remainder operator (modulus), a common approach to encode integers. *

* The example below shows how to create a {@link BaseNCodec} for an * hypothetical base-40 encoding that contains only letters. You only need to * pass a number 40. The {@link BaseNCodec} instantiates a {@link BaseN} object * internally. See {@link BaseN}. * *

{@code
	 * String radix = 40;
	 * BaseNCodec codec = BaseNCodec.newInstance(radix);
	 * }
* *

* If radix is greater than 36, the alphabet generated is a subset of the * character sequence "0-9A-Za-z-_". Otherwise it is a subset of "0-9a-z". In * the example above the resulting alphabet is * "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcd" (0-9A-Za-d). * * @param radix the radix to be used * @return a {@link BaseNCodec} */ public static BaseNCodec newInstance(int radix) { return newInstance(radix, null); } /** * Static factory that returns a new instance of {@link BaseNCodec} using the * specified alphabet. *

* This method can be used if none of the existing concrete codecs of this * package class is desired. *

* The {@link BaseNCodec} objects provided by this method encode UUIDs using * remainder operator (modulus), a common approach to encode integers. *

* The example below shows how to create a {@link BaseNCodec} for an * hypothetical base-26 encoding that contains only letters. You only need to * pass a string with 26 characters. The {@link BaseNCodec} instantiates a * {@link BaseN} object internally. See {@link BaseN}. * *

{@code
	 * String alphabet = "abcdefghijklmnopqrstuvwxyz";
	 * BaseNCodec codec = BaseNCodec.newInstance(alphabet);
	 * }
* *

* Alphabet strings similar to "a-f0-9" are expanded to "abcdef0123456789". The * same example using the string "a-z" instead of "abcdefghijklmnopqrstuvwxyz": * *

{@code
	 * String alphabet = "a-z";
	 * BaseNCodec codec = BaseNCodec.newInstance(alphabet);
	 * }
* * @param alphabet the alphabet to be used * @return a {@link BaseNCodec} */ public static BaseNCodec newInstance(String alphabet) { return newInstance(alphabet, null); } /** * Static factory that returns a new instance of {@link BaseNCodec} using the * specified {@link BaseN} and a {@link CustomDivider}. * * @param base an object that represents the base-n encoding * @param divider a division function that returns quotient and remainder * @return a {@link BaseNCodec} */ public static BaseNCodec newInstance(BaseN base, CustomDivider divider) { return new BaseNCodec(base, divider) { }; } /** * Static factory that returns a new instance of {@link BaseNCodec} using the * specified radix and a {@link CustomDivider}. * * @param radix the radix to be used * @param divider a division function that returns quotient and remainder * @return a {@link BaseNCodec} */ public static BaseNCodec newInstance(int radix, CustomDivider divider) { BaseN base = new BaseN(radix); return newInstance(base, divider); } /** * Static factory that returns a new instance of {@link BaseNCodec} using the * specified alphabet and a {@link CustomDivider}. * * @param alphabet the alphabet to be used * @param divider a division function that returns quotient and remainder * @return a {@link BaseNCodec} */ public static BaseNCodec newInstance(String alphabet, CustomDivider divider) { BaseN base = new BaseN(alphabet); return newInstance(base, divider); } /** * Get the base-n encoding object. * * @return a base-n encoding object */ public BaseN getBase() { return this.base; } /** * Get an encoded string from a UUID. * * @param uuid a UUID * @return an encoded string * @throws InvalidUuidException if the argument is invalid */ @Override public String encode(GUID128 uuid) { try { UuidValidator.validate(uuid); return encoder.apply(uuid); } catch (RuntimeException e) { throw new InvalidUuidException(e.getMessage(), e); } } /** * Get a UUID from an encoded string. * * @param string the encoded string * @return a UUID * @throws InvalidUuidException if the argument is invalid */ @Override public GUID128 decode(String string) { try { validate(string); return decoder.apply(string); } catch (RuntimeException e) { throw new InvalidUuidException(e.getMessage(), e); } } protected void validate(String string) { if (string == null || string.length() != this.base.getLength()) { throw InvalidUuidException.newInstance(string); } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/function/Base16Decoder.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.base.function; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.UUID128; import com.pinecone.ulf.util.guid.i128.codec.base.BaseN; import java.util.UUID; /** * Function that decodes a base-16 string to a UUID. *

* It is case insensitive, so it decodes in lower case and upper case. * * @see RFC-4648 */ public final class Base16Decoder extends BaseNDecoder { /** * Constructor with a base-n. * * @param base a base-n */ public Base16Decoder(BaseN base) { super(base); } @Override public GUID128 apply(String string) { long msb = 0; long lsb = 0; for (int i = 0; i < 16; i++) { msb = (msb << 4) | get(string, i); } for (int i = 16; i < 32; i++) { lsb = (lsb << 4) | get(string, i); } return new UUID128(msb, lsb); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/function/Base16Encoder.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.base.function; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.codec.base.BaseN; import java.util.UUID; /** * Function that encodes a UUID to a base-16 string. *

* It encodes in lower case only. * * @see RFC-4648 */ public final class Base16Encoder extends BaseNEncoder { private static final int CHAR_LENGTH = 32; /** * Constructor with a base-n. * * @param base a base-n */ public Base16Encoder(BaseN base) { super(base); } @Override public String apply(GUID128 uuid) { final char[] chars = new char[CHAR_LENGTH]; final long msb = uuid.getMostSignificantBits(); final long lsb = uuid.getLeastSignificantBits(); chars[0x00] = get(msb >>> 0x3c & 0xf); chars[0x01] = get(msb >>> 0x38 & 0xf); chars[0x02] = get(msb >>> 0x34 & 0xf); chars[0x03] = get(msb >>> 0x30 & 0xf); chars[0x04] = get(msb >>> 0x2c & 0xf); chars[0x05] = get(msb >>> 0x28 & 0xf); chars[0x06] = get(msb >>> 0x24 & 0xf); chars[0x07] = get(msb >>> 0x20 & 0xf); chars[0x08] = get(msb >>> 0x1c & 0xf); chars[0x09] = get(msb >>> 0x18 & 0xf); chars[0x0a] = get(msb >>> 0x14 & 0xf); chars[0x0b] = get(msb >>> 0x10 & 0xf); chars[0x0c] = get(msb >>> 0x0c & 0xf); chars[0x0d] = get(msb >>> 0x08 & 0xf); chars[0x0e] = get(msb >>> 0x04 & 0xf); chars[0x0f] = get(msb >>> 0x00 & 0xf); chars[0x10] = get(lsb >>> 0x3c & 0xf); chars[0x11] = get(lsb >>> 0x38 & 0xf); chars[0x12] = get(lsb >>> 0x34 & 0xf); chars[0x13] = get(lsb >>> 0x30 & 0xf); chars[0x14] = get(lsb >>> 0x2c & 0xf); chars[0x15] = get(lsb >>> 0x28 & 0xf); chars[0x16] = get(lsb >>> 0x24 & 0xf); chars[0x17] = get(lsb >>> 0x20 & 0xf); chars[0x18] = get(lsb >>> 0x1c & 0xf); chars[0x19] = get(lsb >>> 0x18 & 0xf); chars[0x1a] = get(lsb >>> 0x14 & 0xf); chars[0x1b] = get(lsb >>> 0x10 & 0xf); chars[0x1c] = get(lsb >>> 0x0c & 0xf); chars[0x1d] = get(lsb >>> 0x08 & 0xf); chars[0x1e] = get(lsb >>> 0x04 & 0xf); chars[0x1f] = get(lsb >>> 0x00 & 0xf); return new String(chars); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/function/Base32Decoder.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.base.function; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.UUID128; import com.pinecone.ulf.util.guid.i128.codec.base.BaseN; import java.util.UUID; /** * Function that decodes a base-32 string to a UUID. *

* It is case insensitive, so it decodes in lower case and upper case. * * @see RFC-4648 */ public final class Base32Decoder extends BaseNDecoder { /** * Constructor with a base-n. * * @param base a base-n */ public Base32Decoder(BaseN base) { super(base); } @Override public GUID128 apply(String string) { long msb = 0; long lsb = 0; for (int i = 0; i < 12; i++) { msb = (msb << 5) | get(string, i); } msb = (msb << 4) | (get(string, 12) >>> 1); lsb = (lsb << 5) | get(string, 12); for (int i = 13; i < 25; i++) { lsb = (lsb << 5) | get(string, i); } lsb = (lsb << 3) | (get(string, 25) >>> 2); return new UUID128(msb, lsb); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/function/Base32Encoder.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.base.function; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.codec.base.BaseN; import java.util.UUID; /** * Function that encodes a UUID to a base-32 string. *

* It encodes in lower case only. * * @see RFC-4648 */ public final class Base32Encoder extends BaseNEncoder { private static final int CHAR_LENGTH = 26; /** * Constructor with a base-n. * * @param base a base-n */ public Base32Encoder(BaseN base) { super(base); } @Override public String apply(GUID128 uuid) { final char[] chars = new char[CHAR_LENGTH]; long msb = uuid.getMostSignificantBits(); long lsb = uuid.getLeastSignificantBits(); chars[0x00] = get((msb >>> 59) & 0b11111); chars[0x01] = get((msb >>> 54) & 0b11111); chars[0x02] = get((msb >>> 49) & 0b11111); chars[0x03] = get((msb >>> 44) & 0b11111); chars[0x04] = get((msb >>> 39) & 0b11111); chars[0x05] = get((msb >>> 34) & 0b11111); chars[0x06] = get((msb >>> 29) & 0b11111); chars[0x07] = get((msb >>> 24) & 0b11111); chars[0x08] = get((msb >>> 19) & 0b11111); chars[0x09] = get((msb >>> 14) & 0b11111); chars[0x0a] = get((msb >>> 9) & 0b11111); chars[0x0b] = get((msb >>> 4) & 0b11111); chars[0x0c] = get(((msb << 1) & 0b11111) | ((lsb >>> 63) & 0b11111)); chars[0x0d] = get((lsb >>> 58) & 0b11111); chars[0x0e] = get((lsb >>> 53) & 0b11111); chars[0x0f] = get((lsb >>> 48) & 0b11111); chars[0x10] = get((lsb >>> 43) & 0b11111); chars[0x11] = get((lsb >>> 38) & 0b11111); chars[0x12] = get((lsb >>> 33) & 0b11111); chars[0x13] = get((lsb >>> 28) & 0b11111); chars[0x14] = get((lsb >>> 23) & 0b11111); chars[0x15] = get((lsb >>> 18) & 0b11111); chars[0x16] = get((lsb >>> 13) & 0b11111); chars[0x17] = get((lsb >>> 8) & 0b11111); chars[0x18] = get((lsb >>> 3) & 0b11111); chars[0x19] = get((lsb << 2) & 0b11111); return new String(chars); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/function/Base64Decoder.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.base.function; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.UUID128; import com.pinecone.ulf.util.guid.i128.codec.base.BaseN; import java.util.UUID; /** * Function that decodes a base-64 string to a UUID. *

* It is case SENSITIVE. * * @see RFC-4648 */ public final class Base64Decoder extends BaseNDecoder { /** * Constructor with a base-n. * * @param base a base-n */ public Base64Decoder(BaseN base) { super(base); } @Override public GUID128 apply(String string) { long msb = 0; long lsb = 0; for (int i = 0; i < 10; i++) { msb = (msb << 6) | get(string, i); } msb = (msb << 4) | (get(string, 10) >>> 2); lsb = (lsb << 6) | get(string, 10); for (int i = 11; i < 21; i++) { lsb = (lsb << 6) | get(string, i); } lsb = (lsb << 2) | (get(string, 21) >>> 4); return new UUID128(msb, lsb); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/function/Base64Encoder.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.base.function; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.codec.base.BaseN; import java.util.UUID; /** * Function that encodes a UUID to a base-64 string. * * @see RFC-4648 */ public final class Base64Encoder extends BaseNEncoder { private static final int CHAR_LENGTH = 22; /** * Constructor with a base-n. * * @param base a base-n */ public Base64Encoder(BaseN base) { super(base); } @Override public String apply(GUID128 uuid) { final char[] chars = new char[CHAR_LENGTH]; long msb = uuid.getMostSignificantBits(); long lsb = uuid.getLeastSignificantBits(); chars[0x00] = get((msb >>> 58) & 0b111111); chars[0x01] = get((msb >>> 52) & 0b111111); chars[0x02] = get((msb >>> 46) & 0b111111); chars[0x03] = get((msb >>> 40) & 0b111111); chars[0x04] = get((msb >>> 34) & 0b111111); chars[0x05] = get((msb >>> 28) & 0b111111); chars[0x06] = get((msb >>> 22) & 0b111111); chars[0x07] = get((msb >>> 16) & 0b111111); chars[0x08] = get((msb >>> 10) & 0b111111); chars[0x09] = get((msb >>> 4) & 0b111111); chars[0x0a] = get(((msb << 2) & 0b111111) | ((lsb >>> 62) & 0b111111)); chars[0x0b] = get((lsb >>> 56) & 0b111111); chars[0x0c] = get((lsb >>> 50) & 0b111111); chars[0x0d] = get((lsb >>> 44) & 0b111111); chars[0x0e] = get((lsb >>> 38) & 0b111111); chars[0x0f] = get((lsb >>> 32) & 0b111111); chars[0x10] = get((lsb >>> 26) & 0b111111); chars[0x11] = get((lsb >>> 20) & 0b111111); chars[0x12] = get((lsb >>> 14) & 0b111111); chars[0x13] = get((lsb >>> 8) & 0b111111); chars[0x14] = get((lsb >>> 2) & 0b111111); chars[0x15] = get((lsb << 4) & 0b111111); return new String(chars); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/function/BaseNDecoder.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.base.function; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.codec.base.BaseN; import com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException; import com.pinecone.ulf.util.guid.i128.util.immutable.ByteArray; import java.util.UUID; import java.util.function.Function; /** * Abstract function to be extended by all decoder functions of this package. *

* If the base-n is case insensitive, it decodes in lower case and upper case. */ public abstract class BaseNDecoder implements Function { /** * The base-n. */ protected final BaseN base; /** * The base-n map. */ protected final ByteArray map; /** * @param base an enumeration that represents the base-n encoding */ public BaseNDecoder(BaseN base) { this.base = base; this.map = base.getMap(); } protected long get(String string, int i) { final int chr = string.charAt(i); if (chr > 255) { throw InvalidUuidException.newInstance(string); } final byte value = map.get(chr); if (value < 0) { throw InvalidUuidException.newInstance(string); } return value & 0xffL; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/function/BaseNEncoder.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.base.function; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.codec.base.BaseN; import com.pinecone.ulf.util.guid.i128.util.immutable.CharArray; import java.util.UUID; import java.util.function.Function; /** * Abstract function to be extended by all encoder functions of this package. *

* If the base-n is case insensitive, it encodes in lower case only. */ public abstract class BaseNEncoder implements Function { /** * The base-n. */ protected final BaseN base; /** * The base-n alphabet. */ protected final CharArray alphabet; /** * @param base an object that represents the base-n encoding */ public BaseNEncoder(BaseN base) { this.base = base; this.alphabet = base.getAlphabet(); } protected char get(final long index) { return alphabet.get((int) index); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/function/BaseNRemainderDecoder.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.base.function; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.UUID128; import com.pinecone.ulf.util.guid.i128.codec.base.BaseN; import com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException; import java.util.UUID; /** * Function that decodes a base-n string to a UUID. *

* It decodes strings created by encoders that use remainder operator (modulus), * a common approach to encode integers. *

* The decoding process is performed using integer arithmetic. */ public final class BaseNRemainderDecoder extends BaseNDecoder { private final int multiplier; private static final long MASK = 0x00000000ffffffffL; /** * Constructor with a base-n. * * @param base a base-n */ public BaseNRemainderDecoder(BaseN base) { super(base); multiplier = base.getRadix(); } public GUID128 apply(String string) { long msb = 0; long lsb = 0; long rem = 0; // remainder long[] ans; // [product, overflow] for (int i = 0; i < base.getLength(); i++) { rem = get(string, i); ans = multiply(lsb, multiplier, rem); lsb = ans[0]; rem = ans[1]; ans = multiply(msb, multiplier, rem); msb = ans[0]; rem = ans[1]; } if (rem != 0) { throw new InvalidUuidException("Invalid encoded string (overflow): \"" + string + "\""); } return new UUID128(msb, lsb); } // multiply a long as unsigned 64 bit integer /** * Multiply a long as unsigned 64 bit integer * * @param x a number to be multiplied * @param multiplier a multiplier * @param rem the reminder * @return an array of longs */ protected static long[] multiply(final long x, final long multiplier, final long rem) { long mul; long overflow; final long product1; final long product2; // multiply the last 32 bits mul = ((x & MASK) * multiplier) + rem; product1 = mul & MASK; overflow = mul >>> 32; // multiply the first 32 bits mul = ((x >>> 32) * multiplier) + overflow; product2 = mul & MASK; overflow = mul >>> 32; // prepare the answer final long[] answer = new long[2]; answer[0] = (product2 << 32) | (product1 & MASK); answer[1] = overflow; return answer; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/function/BaseNRemainderEncoder.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.base.function; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.codec.base.BaseN; import com.pinecone.ulf.util.guid.i128.codec.base.BaseNCodec.CustomDivider; import java.util.UUID; /** * Function that encodes a UUID to a base-n string. *

* It encodes using remainder operator (modulus), a common approach to encode * integers. *

* The encoding process is performed using integer arithmetic. */ public final class BaseNRemainderEncoder extends BaseNEncoder { private final int length; private final char padding; /** * A custom divider for optimization. */ protected final CustomDivider divider; private static final long MASK = 0x00000000ffffffffL; /** * Constructor with a base-n. * * @param base a base-n */ public BaseNRemainderEncoder(BaseN base) { this(base, null); } /** * Constructor with a base-n and a custom divider. * * @param base a base-n * @param divider a custom divider */ public BaseNRemainderEncoder(BaseN base, CustomDivider divider) { super(base); length = base.getLength(); padding = base.getPadding(); final long radix = base.getRadix(); if (divider != null) { this.divider = divider; } else { this.divider = x -> new long[] { x / radix, x % radix }; } } @Override public String apply(GUID128 uuid) { long msb = uuid.getMostSignificantBits(); long lsb = uuid.getLeastSignificantBits(); int b = length; // buffer index char[] buffer = new char[length]; long rem = 0; // remainder long[] ans; // [quotient, remainder] // fill in the buffer backwards while (msb != 0 || lsb != 0) { rem = 0; ans = divide(msb, divider, rem); msb = ans[0]; // quotient rem = ans[1]; // remainder ans = divide(lsb, divider, rem); lsb = ans[0]; // quotient rem = ans[1]; // remainder buffer[--b] = alphabet.get((int) rem); } // complete padding while (b > 0) { buffer[--b] = padding; } return new String(buffer); } /** * Divide a long as unsigned 64 bit integer * * @param x a number to be divided * @param divider a custom divider * @param rem the reminder * @return an array of longs */ protected static long[] divide(final long x, CustomDivider divider, final long rem) { long[] div; long remainder; final long quotient1; final long quotient2; // divide the first 32 bits div = divider.divide((rem << 32) | (x >>> 32)); quotient1 = div[0]; remainder = div[1]; // divide the last 32 bits div = divider.divide((remainder << 32) | (x & MASK)); quotient2 = div[0]; remainder = div[1]; // prepare the answer final long[] answer = new long[2]; answer[0] = (quotient1 << 32) | (quotient2 & MASK); answer[1] = remainder; return answer; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/other/DotNetGuid1Codec.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.other; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.UUID128; import com.pinecone.ulf.util.guid.i128.codec.GuidCodec; import com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException; import com.pinecone.ulf.util.guid.i128.util.UuidUtil; import com.pinecone.ulf.util.guid.i128.util.UuidValidator; import java.util.UUID; /** * Codec for time-based .Net Guids. */ public class DotNetGuid1Codec implements GuidCodec { /** * A shared immutable instance. */ public static final DotNetGuid1Codec INSTANCE = new DotNetGuid1Codec(); /** * Get a .Ned Guid from a time-based UUID (v1). *

* This codec converts a time-based UUID (v1) to a .Net Guid. *

* It rearranges the most significant bytes from big-endian to little-endian, * and vice-versa. *

* The .Net Guid stores the most significant bytes as little-endian, while the * least significant bytes are stored as big-endian (network order). * * @param uuid a UUID * @return another UUID * @throws InvalidUuidException if the argument is invalid */ @Override public GUID128 encode(GUID128 uuid) { UuidValidator.validate(uuid); if (!UuidUtil.isTimeBased(uuid)) { throw new InvalidUuidException(String.format("Not a time-based UUID: %s.", uuid.toString())); } return toAndFromDotNetGuid(uuid); } /** * Get a time-based UUID (v4) from a .Net Guid. *

* It rearranges the most significant bytes from big-endian to little-endian, * and vice-versa. *

* The .Net Guid stores the most significant bytes as little-endian, while the * least significant bytes are stored as big-endian (network order). * * @param uuid a UUID * @return another UUID * @throws InvalidUuidException if the argument is invalid */ @Override public GUID128 decode(GUID128 uuid) { UuidValidator.validate(uuid); GUID128 uuidv1 = toAndFromDotNetGuid(uuid); if (!UuidUtil.isTimeBased(uuidv1)) { throw new InvalidUuidException(String.format("Not a time-based UUID: %s.", uuidv1.toString())); } return uuidv1; } /** * Convert a UUID to and from a .Net Guid. *

* It rearranges the most significant bytes from big-endian to little-endian, * and vice-versa. *

* The .Net Guid stores the most significant bytes as little-endian, while the * least significant bytes are stored as big-endian (network order). * * @see How * to Generate Sequential GUIDs for SQL Server in .NET * @see How * are GUIDs sorted by SQL Server? * * @param uuid a UUID * @return another UUID */ protected static GUID128 toAndFromDotNetGuid(GUID128 uuid) { long msb = uuid.getMostSignificantBits(); long lsb = uuid.getLeastSignificantBits(); long newMsb = 0x0000000000000000L; // high bits newMsb |= (msb & 0xff000000_0000_0000L) >>> 24; newMsb |= (msb & 0x00ff0000_0000_0000L) >>> 8; newMsb |= (msb & 0x0000ff00_0000_0000L) << 8; newMsb |= (msb & 0x000000ff_0000_0000L) << 24; // mid bits newMsb |= (msb & 0x00000000_ff00_0000L) >>> 8; newMsb |= (msb & 0x00000000_00ff_0000L) << 8; // low bits newMsb |= (msb & 0x00000000_0000_ff00L) >>> 8; newMsb |= (msb & 0x00000000_0000_00ffL) << 8; return new UUID128(newMsb, lsb); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/other/DotNetGuid4Codec.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.other; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.codec.GuidCodec; import com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException; import com.pinecone.ulf.util.guid.i128.util.UuidUtil; import com.pinecone.ulf.util.guid.i128.util.UuidValidator; import java.util.UUID; import static com.pinecone.ulf.util.guid.i128.codec.other.DotNetGuid1Codec.toAndFromDotNetGuid; /** * Codec for random-based .Net Guids. */ public class DotNetGuid4Codec implements GuidCodec { /** * A shared immutable instance. */ public static final DotNetGuid4Codec INSTANCE = new DotNetGuid4Codec(); /** * Get a .Ned Guid from a random-based UUID (v4). *

* It rearranges the most significant bytes from big-endian to little-endian, * and vice-versa. *

* The .Net Guid stores the most significant bytes as little-endian, while the * least significant bytes are stored as big-endian (network order). * * @param uuid a UUID * @return another UUID * @throws InvalidUuidException if the argument is invalid */ @Override public GUID128 encode(GUID128 uuid) { UuidValidator.validate(uuid); if (!UuidUtil.isRandomBased(uuid)) { throw new InvalidUuidException(String.format("Not a random-based UUID: %s.", uuid.toString())); } return toAndFromDotNetGuid(uuid); } /** * Get a random-based UUID (v4) from a .Net Guid. *

* It rearranges the most significant bytes from big-endian to little-endian, * and vice-versa. *

* The .Net Guid stores the most significant bytes as little-endian, while the * least significant bytes are stored as big-endian (network order). * * @param uuid a UUID * @return another UUID * @throws InvalidUuidException if the argument is invalid */ @Override public GUID128 decode(GUID128 uuid) { UuidValidator.validate(uuid); GUID128 uuidv4 = toAndFromDotNetGuid(uuid); if (!UuidUtil.isRandomBased(uuidv4)) { throw new InvalidUuidException(String.format("Not a random-based UUID: %s.", uuidv4.toString())); } return uuidv4; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/other/NcnameCodec.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.other; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.codec.StandardBinaryCodec; import com.pinecone.ulf.util.guid.i128.codec.GuidCodec; import com.pinecone.ulf.util.guid.i128.codec.base.Base32Codec; import com.pinecone.ulf.util.guid.i128.codec.base.Base64UrlCodec; import com.pinecone.ulf.util.guid.i128.codec.base.BaseNCodec; import com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException; import com.pinecone.ulf.util.guid.i128.util.UuidValidator; import com.pinecone.ulf.util.guid.i128.util.immutable.ByteArray; import com.pinecone.ulf.util.guid.i128.util.immutable.CharArray; import com.pinecone.ulf.util.guid.i128.util.internal.ByteUtil; import java.util.Arrays; import java.util.UUID; /** * Codec for UUID NCNames. *

* A UUID NCName is a shorter string representation that conforms to the * constraints of various other identifiers such as NCName in XML documents. *

* The {@link NcnameCodec} turns a UUID into a string that does not start with * digits (0-9). But due to the default base-64-url encoding, it is case * sensitive and may contain '-' and '_'. *

* The {@link Base32Codec} can be passed to the {@link NcnameCodec} constructor * to generate base-32 NCNames. Due to the base-32 alphabet, it is case * insensitive and it contains only letters (a-zA-Z) and digits (2-7). This * encoding substitution can be done to avoid the characters '-' and '_' of the * base-64-url encoding, but it makes the NCName case insensitive. *

* The transformation scheme is outlined in this RFC: * https://tools.ietf.org/html/draft-taylor-uuid-ncname-00. The draft describes * schemes for base-64-url and base-32. *

* {@link SlugCodec} and {@link NcnameCodec} are very similar. The difference * between the two is the bit shift they do with the original UUID to transform * it into a string. * * @see UUID * NCNames */ public final class NcnameCodec implements GuidCodec { /** * A shared immutable instance using `base64url` */ public static final NcnameCodec INSTANCE = new NcnameCodec(); private final int radix; private final int length; private final int shift; private final char padding; private final BaseNCodec codec; private static final CharArray VERSION_UPPERCASE = CharArray.from("ABCDEFGHIJKLMNOP".toCharArray()); private static final CharArray VERSION_LOWERCASE = CharArray.from("abcdefghijklmnop".toCharArray()); private static final ByteArray VERSION_MAP; static { // initialize the array with -1 byte[] mapping = new byte[256]; Arrays.fill(mapping, (byte) -1); // upper case for base-64 mapping['A'] = 0x0; mapping['B'] = 0x1; mapping['C'] = 0x2; mapping['D'] = 0x3; mapping['E'] = 0x4; mapping['F'] = 0x5; mapping['G'] = 0x6; mapping['H'] = 0x7; mapping['I'] = 0x8; mapping['J'] = 0x9; mapping['K'] = 0xa; mapping['L'] = 0xb; mapping['M'] = 0xc; mapping['N'] = 0xd; mapping['O'] = 0xe; mapping['P'] = 0xf; // lower case for base-16 and base-32 mapping['a'] = 0x0; mapping['b'] = 0x1; mapping['c'] = 0x2; mapping['d'] = 0x3; mapping['e'] = 0x4; mapping['f'] = 0x5; mapping['g'] = 0x6; mapping['h'] = 0x7; mapping['i'] = 0x8; mapping['j'] = 0x9; mapping['k'] = 0xa; mapping['l'] = 0xb; mapping['m'] = 0xc; mapping['n'] = 0xd; mapping['o'] = 0xe; mapping['p'] = 0xf; VERSION_MAP = ByteArray.from(mapping); } /** * Default constructor. */ public NcnameCodec() { this(Base64UrlCodec.INSTANCE); } /** * Constructor with a base-n codec. * * @param codec a base-n codec */ public NcnameCodec(BaseNCodec codec) { if (!(codec instanceof Base64UrlCodec || codec instanceof Base32Codec)) { throw new IllegalArgumentException("Unsupported base-n codec"); } this.codec = codec; this.radix = codec.getBase().getRadix(); this.length = codec.getBase().getLength(); this.padding = codec.getBase().getPadding(); switch (this.radix) { case 32: this.shift = 1; break; case 64: this.shift = 2; break; default: this.shift = 0; // unspecified break; } } /** * Get a NCName from a UUID. * * @param uuid a UUID * @return a NCName * @throws InvalidUuidException if the argument is invalid */ @Override public String encode(GUID128 uuid) { UuidValidator.validate(uuid); int version = uuid.version(); byte[] bytes = StandardBinaryCodec.INSTANCE.encode(uuid); int[] ints = ByteUtil.toInts(bytes); int variant = (ints[2] & 0xf0000000) >>> 24; ints[1] = (ints[1] & 0xffff0000) | ((ints[1] & 0x00000fff) << 4) | ((ints[2] & 0x0fffffff) >>> 24); ints[2] = (ints[2] & 0x00ffffff) << 8 | (ints[3] >>> 24); ints[3] = (ints[3] << 8) | variant; bytes = ByteUtil.fromInts(ints); bytes[15] = (byte) ((bytes[15] & 0xff) >>> this.shift); GUID128 uuuu = StandardBinaryCodec.INSTANCE.decode(bytes); String encoded = this.codec.encode(uuuu).substring(0, this.length - 1); // if base is 64, use upper case version, else use lower case char v = this.radix == 64 ? VERSION_UPPERCASE.get(version) : VERSION_LOWERCASE.get(version); return v + encoded; } /** * Get a UUID from a NCName. * * @param ncname a NCName * @return a UUID * @throws InvalidUuidException if the argument is invalid */ @Override public GUID128 decode(String ncname) { if (ncname == null || ncname.length() != this.length) { throw new InvalidUuidException("Invalid UUID NCName: \"" + ncname + "\""); } // check if the bookends are valid chars: [A-Pa-p] int bookend1 = (int) VERSION_MAP.get(ncname.charAt(0)); int bookend2 = (int) VERSION_MAP.get(ncname.charAt(ncname.length() - 1)); if (bookend1 == -1 || bookend2 == -1) { throw new InvalidUuidException("Invalid UUID NCName: \"" + ncname + "\""); } int version = bookend1 & 0xf; String substring = ncname.substring(1, ncname.length()); GUID128 uuid = this.codec.decode(substring + padding); byte[] bytes = StandardBinaryCodec.INSTANCE.encode(uuid); bytes[15] = (byte) ((bytes[15] & 0xff) << this.shift); int[] ints = ByteUtil.toInts(bytes); int variant = (ints[3] & 0xf0) << 24; ints[3] >>>= 8; ints[3] |= ((ints[2] & 0xff) << 24); ints[2] >>>= 8; ints[2] |= ((ints[1] & 0xf) << 24) | variant; ints[1] = (ints[1] & 0xffff0000) | (version << 12) | ((ints[1] >>> 4) & 0xfff); bytes = ByteUtil.fromInts(ints); return StandardBinaryCodec.INSTANCE.decode(bytes); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/other/SlugCodec.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.other; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.UUID128; import com.pinecone.ulf.util.guid.i128.codec.GuidCodec; import com.pinecone.ulf.util.guid.i128.codec.base.Base32Codec; import com.pinecone.ulf.util.guid.i128.codec.base.Base64UrlCodec; import com.pinecone.ulf.util.guid.i128.codec.base.BaseNCodec; import com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException; import com.pinecone.ulf.util.guid.i128.util.UuidValidator; import java.util.UUID; /** * Codec for UUID Slugs. *

* A UUID Slug is a shorter string representation that can be safely included in * URLs and file names. *

* The {@link SlugCodec} turns a UUID into a string that does not start with * digits (0-9). Due to the default base-64-url alphabet, it is case * sensitive and may contain '-' and '_'. *

* The {@link Base32Codec} can be passed to the {@link SlugCodec} constructor to * generate base-32 slugs. Due to the base-32 alphabet, it is case insensitive * and it contains only letters (a-zA-Z) and digits (2-7). This encoding * substitution can be done to avoid the characters '-' and '_' of the * base-64-url encoding, but it makes the slug case insensitive. *

* To turn a UUID into a slug, the version and variant nibbles are are moved to * the first position of the UUID byte array. The slugs generated of the same * UUID version show a constant letter in the first position of the base-64-url * string. *

* This is how the UUID bits are rearranged: * *

{@code
 *   aaaaaaaa-bbbb-Vccc-Rddd-eeeeeeeeeeee
 *                 |    |            ^
 *   ,-------------'    |   encode   |
 *   |,-----------------'      |   decode
 *   ||                        v
 *   VRaaaaaa-aabb-bbcc-cddd-eeeeeeeeeeee
 *               shift >>|
 *
 *   V: version nibble or character
 *   R: variant nibble or character
 * }
* *

* This table shows the slug prefixes for each UUID version: * *

 * VERSON  PREFIX   EXAMPLE
 *    1       G     GxA1e7vco3Ib6_mjtptP3w
 *    2       K     KryezRARVgTHLQ3zJpAXIw
 *    3       O     O9JfSS1IqIabkEWC-uXWNA
 *    4       S     S5iPSZYDt7q2w0qiIFZVwQ
 *    5       W     WY-Uv6WAY5os7Gfv4ILnvQ
 *    6       a     aMKkEoaymw0FSQNJRDL7Gw
 * 
* *

* If you don't like the change in the bytes layout before the encoding to * base-64-url, use the {@link Base64UrlCodec} instead of {@link SlugCodec} to * generate slugs. *

* {@link SlugCodec} and {@link NcnameCodec} are very similar. The difference * between the two is the bit shift they do with the original UUID to transform * it into a string. *

* In the case someone is interested in implementing this type of slug in * another language, the change in the bytes layout don't have to be done with * bit shifting. Since a base-16 character corresponds to a nibble, the layout * change could be easily done by moving characters instead of by shifting bits. * See {@code SlugCodecTest#moveCharacters()}. * * @see UUID * Slugs */ public final class SlugCodec implements GuidCodec { /** * A shared immutable instance using `base64url` */ public static final SlugCodec INSTANCE = new SlugCodec(); private final int length; private final BaseNCodec codec; /** * Default constructor. */ public SlugCodec() { this(Base64UrlCodec.INSTANCE); } /** * @param codec a base-n codec to be used (the default is base-64-url) */ public SlugCodec(BaseNCodec codec) { if (codec == null) { throw new IllegalArgumentException("Null codec"); } this.codec = codec; this.length = codec.getBase().getLength(); } /** * Get a Slug from a UUID. * * @param uuid a UUID * @return a Slug * @throws InvalidUuidException if the argument is invalid */ @Override public String encode(GUID128 uuid) { UuidValidator.validate(uuid); long long1 = uuid.getMostSignificantBits(); long long2 = uuid.getLeastSignificantBits(); long msb = 0; long lsb = 0; msb |= (long1 & 0x000000000000f000L) << 48; // move version nibble to bit positions 0, 1, 2, and 3 msb |= (long2 & 0xf000000000000000L) >>> 4; // move variant nibble to bit positions 4, 5, 6, and 7 msb |= (long1 & 0xffffffffffff0000L) >>> 8; msb |= (long1 & 0x0000000000000fffL) >>> 4; lsb |= (long1 & 0x000000000000000fL) << 60; lsb |= (long2 & 0x0fffffffffffffffL); return this.codec.encode(new UUID128(msb, lsb)); } /** * Get a UUID from a Slug. * * @param slug a Slug * @return a UUID * @throws InvalidUuidException if the argument is invalid */ @Override public GUID128 decode(String slug) { if (slug == null || slug.length() != this.length) { throw new InvalidUuidException("Invalid UUID Slug: \"" + slug + "\""); } GUID128 uuid = this.codec.decode(slug); long long1 = uuid.getMostSignificantBits(); long long2 = uuid.getLeastSignificantBits(); long msb = 0; long lsb = 0; msb |= (long1 & 0xf000000000000000L) >>> 48; // move version nibble to its original position msb |= (long2 & 0xf000000000000000L) >>> 60; // move variant nibble to its original position msb |= (long1 & 0x00ffffffffffff00L) << 8; msb |= (long1 & 0x00000000000000ffL) << 4; lsb |= (long1 & 0x0f00000000000000L) << 4; lsb |= (long2 & 0x0fffffffffffffffL); return new UUID128(msb, lsb); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/other/TimeOrderedCodec.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.codec.other; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.UUID128; import com.pinecone.ulf.util.guid.i128.codec.GuidCodec; import com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException; import com.pinecone.ulf.util.guid.i128.util.UuidUtil; import com.pinecone.ulf.util.guid.i128.util.UuidValidator; import java.util.UUID; /** * Codec for time-ordered UUIDs *

* This codec converts time-based UUIDs (UUIDv1) to time-ordered UUIDs (UUIDv6). */ public class TimeOrderedCodec implements GuidCodec { /** * A shared immutable instance. */ public static final TimeOrderedCodec INSTANCE = new TimeOrderedCodec(); /** * Get a time-ordered UUID from a time-based UUID. * * @param uuid a time-based UUID * @return a time-ordered UUID * @throws InvalidUuidException if the argument is invalid */ @Override public GUID128 encode(GUID128 uuid) { UuidValidator.validate(uuid); if (!UuidUtil.isTimeBased(uuid)) { throw new InvalidUuidException("Not a time-based UUID: " + uuid); } long timestamp = UuidUtil.getTimestamp(uuid); long msb = ((timestamp & 0x0ffffffffffff000L) << 4) // | (timestamp & 0x0000000000000fffL) // | 0x0000000000006000L; // set version 6 long lsb = uuid.getLeastSignificantBits(); return new UUID128(msb, lsb); } /** * Get a time-based UUID from a time-ordered UUID. * * @param uuid a time-ordered UUID * @return a time-based UUID * @throws InvalidUuidException if the argument is invalid */ @Override public GUID128 decode(GUID128 uuid) { UuidValidator.validate(uuid); if (!UuidUtil.isTimeOrdered(uuid)) { throw new InvalidUuidException("Not a time-ordered UUID: " + uuid); } long timestamp = UuidUtil.getTimestamp(uuid); long timeHigh = (timestamp & 0x0fff_0000_00000000L) >>> 48; long timeMid = (timestamp & 0x0000_ffff_00000000L) >>> 16; long timeLow = (timestamp & 0x0000_0000_ffffffffL) << 32; long version = 0x0000000000001000L; // Set version 1 // Combine the parts to form the Most Significant Bits (MSB) long msb = timeHigh | timeMid | timeLow | version; long lsb = uuid.getLeastSignificantBits(); return new UUID128(msb, lsb); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/enums/UuidLocalDomain.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.enums; public enum UuidLocalDomain { /** * The principal domain, interpreted as POSIX UID domain on POSIX systems. */ LOCAL_DOMAIN_PERSON((byte) 0), /** * The group domain, interpreted as POSIX GID domain on POSIX systems. */ LOCAL_DOMAIN_GROUP((byte) 1), /** * The organization domain, site-defined. */ LOCAL_DOMAIN_ORG((byte) 2); private final byte value; UuidLocalDomain(byte value) { this.value = value; } /** * Get the byte value. * * @return a byte */ public byte getValue() { return this.value; } /** * Get the enum value. * * @param value a byte. * @return the enum */ public static UuidLocalDomain getLocalDomain(byte value) { for (UuidLocalDomain domain : UuidLocalDomain.values()) { if (domain.getValue() == value) { return domain; } } return null; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/enums/UuidNamespace.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.enums; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.UUID128; public enum UuidNamespace { /** * Name space to be used when the name string is a fully-qualified domain name. */ NAMESPACE_DNS(new UUID128(0x6ba7b8109dad11d1L, 0x80b400c04fd430c8L)), /** * Name space to be used when the name string is a URL. */ NAMESPACE_URL(new UUID128(0x6ba7b8119dad11d1L, 0x80b400c04fd430c8L)), /** * Name space to be used when the name string is an ISO OID. */ NAMESPACE_OID(new UUID128(0x6ba7b8129dad11d1L, 0x80b400c04fd430c8L)), /** * Name space to be used when the name string is an X.500 DN (DER or text). */ NAMESPACE_X500(new UUID128(0x6ba7b8149dad11d1L, 0x80b400c04fd430c8L)); private final GUID128 value; UuidNamespace(GUID128 value) { this.value = value; } /** * Get the UUID value * * @return a UUID */ public GUID128 getValue() { return this.value; } /** * Get the enum value. * * @param value a UUID. * @return the enum */ public static UuidNamespace getNamespace(GUID value) { for (UuidNamespace namespace : UuidNamespace.values()) { if (namespace.getValue().equals(value)) { return namespace; } } return null; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/enums/UuidVariant.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.enums; public enum UuidVariant { /** * Reserved for NCS backward compatibility. */ VARIANT_RESERVED_NCS(0), /** * The variant specified in RFC 9562. */ VARIANT_STANDARD(2), /** * Reserved for Microsoft Corporation backward compatibility. */ VARIANT_RESERVED_MICROSOFT(6), /** * Reserved for future definition. */ VARIANT_RESERVED_FUTURE(7); private final int value; UuidVariant(int value) { this.value = value; } /** * Get the number value. * * @return a number */ public int getValue() { return this.value; } /** * Get the enum value. * * @param value a number. * @return the enum */ public static UuidVariant getVariant(int value) { for (UuidVariant variant : UuidVariant.values()) { if (variant.getValue() == value) { return variant; } } return null; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/enums/UuidVersion.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.enums; public enum UuidVersion { /** * An unknown version. */ VERSION_UNKNOWN(0), /** * The time-based version with gregorian epoch specified in RFC 9562. */ VERSION_TIME_BASED(1), /** * The DCE Security version, with embedded POSIX UIDs. */ VERSION_DCE_SECURITY(2), /** * The name-based version specified in RFC 9562 that uses MD5 hashing. */ VERSION_NAME_BASED_MD5(3), /** * The randomly or pseudo-randomly generated version specified in RFC 9562. */ VERSION_RANDOM_BASED(4), /** * The name-based version specified in RFC 9562 that uses SHA-1 hashing. */ VERSION_NAME_BASED_SHA1(5), /** * The time-ordered version with gregorian epoch proposed by Peabody and Davis. */ VERSION_TIME_ORDERED(6), /** * The time-ordered version with Unix epoch proposed by Peabody and Davis. */ VERSION_TIME_ORDERED_EPOCH(7), /** * The custom or free-form version proposed by Peabody and Davis. */ VERSION_CUSTOM(8); private final int value; UuidVersion(int value) { this.value = value; } /** * Get the number value. * * @return a number */ public int getValue() { return this.value; } /** * Get the enum value. * * @param value a number. * @return the enum */ public static UuidVersion getVersion(int value) { for (UuidVersion version : UuidVersion.values()) { if (version.getValue() == value) { return version; } } return null; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/exception/InvalidUuidException.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.exception; import java.util.Arrays; /** * Runtime exception to be used when an invalid UUID is received as argument. */ public final class InvalidUuidException extends RuntimeException { private static final long serialVersionUID = 1L; /** * Default constructor with a message. * * @param message a message */ public InvalidUuidException(String message) { super(message); } /** * Default constructor with a message and the cause. * * @param message a message * @param cause the cause */ public InvalidUuidException(String message, Throwable cause) { super(message, cause); } /** * Factory method for creating a runtime exception. * * @param obj an object that can, for example, a string of a char array. * @return a runtime exception */ public static InvalidUuidException newInstance(Object obj) { String string; if (obj == null) { string = null; } else if (obj instanceof char[]) { string = String.valueOf((char[]) obj); } else if (obj.getClass().isArray()) { string = Arrays.toString((byte[]) obj); } else { string = String.valueOf(obj); } if (string != null) { string = "\"" + string + "\""; } return new InvalidUuidException("Invalid UUID: " + string); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/AbstCombFactory.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory; import com.pinecone.ulf.util.guid.i128.enums.UuidVersion; import java.time.Clock; import java.time.Instant; import java.util.function.LongSupplier; import java.util.function.Supplier; /** * Abstract Factory for creating COMB GUIDs. *

* COMB GUIDs combine a creation time and random bytes. */ public abstract class AbstCombFactory extends AbstRandomBasedFactory { /** * The instant function. */ protected Supplier instantFunction; /** * Constructor whith a version number and a builder. * * @param version a version number * @param builder a builder */ protected AbstCombFactory(UuidVersion version, Builder builder) { super(version, builder); this.instantFunction = builder.getInstantFunction(); } /** * Abstract builder for creating a COMB factory. * * @param factory type * @param builder type * @see AbstRandomBasedFactory.Builder */ public abstract static class Builder> extends AbstRandomBasedFactory.Builder { /** * The instant function. */ protected Supplier instantFunction; /** * Get the instant function. * * @return the builder */ protected Supplier getInstantFunction() { if (this.instantFunction == null) { this.instantFunction = () -> Instant.now(); } return this.instantFunction; } /** * Set the clock. * * @param clock a clock * @return the builder */ @SuppressWarnings("unchecked") public B withClock(Clock clock) { if (clock != null) { this.instantFunction = () -> clock.instant(); } return (B) this; } /** * Set the time function. * * The time is the number of milliseconds since 1970-01-01T00:00:00Z. * * @param timeFunction a function * @return the builder */ @SuppressWarnings("unchecked") public B withTimeFunction(LongSupplier timeFunction) { this.instantFunction = () -> Instant.ofEpochMilli(timeFunction.getAsLong()); return (B) this; } /** * Set the instant function. * * @param instantFunction a function * @return the builder */ @SuppressWarnings("unchecked") public B withInstantFunction(Supplier instantFunction) { this.instantFunction = instantFunction; return (B) this; } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/AbstNameBasedFactory.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.enums.UuidNamespace; import com.pinecone.ulf.util.guid.i128.enums.UuidVersion; import com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException; import com.pinecone.ulf.util.guid.i128.util.internal.ByteUtil; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Objects; import java.util.UUID; import static com.pinecone.ulf.util.guid.i128.enums.UuidVersion.VERSION_NAME_BASED_MD5; import static com.pinecone.ulf.util.guid.i128.enums.UuidVersion.VERSION_NAME_BASED_SHA1; /** * Abstract factory for creating name-based unique identifiers (UUIDv3 and * UUIDv5). * * The name space is optional for compatibility with the JDK's UUID method for * generating UUIDv3, which is {@link UUID#nameUUIDFromBytes(byte[])}. * * @see UuidNamespace * @see RFC 9562 */ public abstract class AbstNameBasedFactory extends UuidFactory { /** * The namespace (optional). */ protected byte[] namespace; // can be null /** * The hash algorithm. */ protected final String algorithm; // MD5 or SHA-1 /** * The MD5 algorithm. */ protected static final String ALGORITHM_MD5 = "MD5"; /** * The SHA-1 algorithm. */ protected static final String ALGORITHM_SHA1 = "SHA-1"; /** * Protected constructor that receives the message digest algorithm and an * optional name space. * * @param version the version number (3 or 5) * @param algorithm a message digest algorithm (MD5 or SHA-1) * @param namespace a name space byte array (null or 16 bytes) */ protected AbstNameBasedFactory(UuidVersion version, String algorithm, byte[] namespace) { super(version); if (!VERSION_NAME_BASED_MD5.equals(version) && !VERSION_NAME_BASED_SHA1.equals(version)) { throw new IllegalArgumentException("Invalid UUID version"); } if (ALGORITHM_MD5.equals(algorithm) || ALGORITHM_SHA1.equals(algorithm)) { this.algorithm = algorithm; } else { throw new IllegalArgumentException("Invalid message digest algorithm"); } if (namespace != null) { if (namespace.length == 16) { // must be 16 bytes length this.namespace = namespace; } else { throw new IllegalArgumentException("Invalid namespace"); } } } /** * Returns a name-based UUID. * * @param name a byte array * @return a name-based UUID * @throws NullPointerException if name is null */ public GUID128 create(final byte[] name) { return (GUID128) this.create(this.namespace, nameBytes(name)); } /** * Returns a name-based UUID. * * The name string is encoded into a sequence of bytes using UTF-8. * * @param name a string * @return a name-based UUID * @throws NullPointerException if name is null */ public GUID128 create(final String name) { return (GUID128) this.create(this.namespace, nameBytes(name)); } /** * Returns a name-based UUID. * * @param namespace a name space UUID * @param name a byte array * @return a name-based UUID * @throws IllegalArgumentException if name is null */ public GUID128 create(final GUID128 namespace, final byte[] name) { return (GUID128) this.create(namespaceBytes(namespace), nameBytes(name)); } /** * Returns a name-based UUID. * * The name string is encoded into a sequence of bytes using UTF-8. * * @param namespace a name space UUID * @param name a string * @return a name-based UUID * @throws NullPointerException if name is null */ public GUID128 create(final GUID128 namespace, final String name) { return (GUID128) this.create(namespaceBytes(namespace), nameBytes(name)); } /** * Returns a name-based UUID. * * @param namespace a name space string * @param name a byte array * @return a name-based UUID * @throws NullPointerException if name is null * @throws InvalidUuidException if the name space is invalid * @see InvalidUuidException */ public GUID128 create(final String namespace, final byte[] name) { return (GUID128) this.create(namespaceBytes(namespace), nameBytes(name)); } /** * Returns a name-based UUID. *

* The name string is encoded into a sequence of bytes using UTF-8. * * @param namespace a name space string * @param name a string * @return a name-based UUID * @throws NullPointerException if name is null * @throws InvalidUuidException if the name space is invalid * @see InvalidUuidException */ public GUID128 create(final String namespace, final String name) { return (GUID128) this.create(namespaceBytes(namespace), nameBytes(name)); } /** * Returns a name-based UUID. * * @param namespace a name space enumeration * @param name a byte array * @return a name-based UUID * @throws NullPointerException if name is null */ public GUID128 create(final UuidNamespace namespace, final byte[] name) { return (GUID128) this.create(namespaceBytes(namespace), nameBytes(name)); } /** * Returns a name-based UUID. *

* The name string is encoded into a sequence of bytes using UTF-8. * * @param namespace a name space enumeration * @param name a string * @return a name-based UUID * @throws NullPointerException if name is null */ public GUID128 create(final UuidNamespace namespace, final String name) { return (GUID128) this.create(namespaceBytes(namespace), nameBytes(name)); } @Override public GUID128 create() { return create(Parameters.builder().build()); } @Override public GUID128 create(Parameters parameters) { return (GUID128) this.create( parameters.getNamespace(), parameters.getName() ); } private Object create( final byte[] namespace, final byte[] name ) { Objects.requireNonNull(name, "Null name"); MessageDigest hasher; try { hasher = MessageDigest.getInstance(this.algorithm); } catch (NoSuchAlgorithmException e) { throw new IllegalArgumentException(e.getMessage()); } if (namespace != null) { // Prepend the name space hasher.update(namespace); } // Compute the hash of the name final byte[] hash = hasher.digest(name); final long msb = ByteUtil.toNumber(hash, 0, 8); final long lsb = ByteUtil.toNumber(hash, 8, 16); return this.toUuid(msb, lsb); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/AbstRandomBasedFactory.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.enums.UuidVersion; import com.pinecone.ulf.util.guid.i128.factory.function.RandomFunction; import com.pinecone.ulf.util.guid.i128.factory.function.impl.DefaultRandomFunction; import com.pinecone.ulf.util.guid.i128.util.internal.ByteUtil; import java.security.SecureRandom; import java.util.Objects; import java.util.Random; import java.util.UUID; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.locks.ReentrantLock; import java.util.function.IntFunction; import java.util.function.LongSupplier; /** * Abstract factory for creating random-based unique identifiers (UUIDv4). * * @see RandomFunction */ public abstract class AbstRandomBasedFactory extends UuidFactory { /** * The random generator. */ protected final IRandom random; /** * The number of bytes of a UUID. */ protected static final int UUID_BYTES = 16; /** * The reentrant lock for synchronization. */ protected final ReentrantLock lock = new ReentrantLock(); /** * Constructor with a version number and a builder * * @param version a version number * @param builder a builder */ protected AbstRandomBasedFactory(UuidVersion version, Builder builder) { super(version); this.random = builder.getRandom(); } @Override public GUID128 create(Parameters parameters) { return create(); // ignore parameters } /** * Abstract builder for creating a random-based factory. * * @param factory type * @param builder type */ protected abstract static class Builder> { /** * A random generator. */ protected IRandom random; /** * Get the random generator. * * @return a random generator */ protected IRandom getRandom() { if (this.random == null) { this.random = new SafeRandom(new DefaultRandomFunction()); } return this.random; } /** * Set the random generator with a fast algorithm. * * Use it to replace the {@link DefaultRandomFunction} with * {@link ThreadLocalRandom}. * * @return the generator */ @SuppressWarnings("unchecked") public B withFastRandom() { this.random = new FastRandom(); return (B) this; } /** * Set the random generator with a safe algorithm. * * Use it to replace the {@link DefaultRandomFunction} with * {@link SecureRandom}. * * @return the generator */ @SuppressWarnings("unchecked") public B withSafeRandom() { this.random = new SafeRandom(); return (B) this; } /** * Set the random generator. * * @param random a random * @return the builder */ @SuppressWarnings("unchecked") public B withRandom(Random random) { if (random != null) { if (random instanceof SecureRandom) { this.random = new SafeRandom(random); } else { this.random = new FastRandom(random); } } return (B) this; } /** * Set a random function which returns random numbers. * * @param randomFunction a function * @return the builder */ @SuppressWarnings("unchecked") public B withRandomFunction(LongSupplier randomFunction) { this.random = new FastRandom(randomFunction); return (B) this; } /** * Finishes the factory building. * * @return the build factory */ public abstract T build(); } /** * Interface for random generator. */ protected interface IRandom { /** * Return a random number. * * @return a number */ long nextLong(); /** * Return a random number. * * @param length the byte array length * @return a number */ long nextLong(int length); /** * Return a random array of bytes. * * @param length the byte array length * @return an array */ byte[] nextBytes(int length); } /** * A long random generator. */ protected static final class FastRandom implements IRandom { private final LongSupplier randomFunction; /** * Default constructor. */ public FastRandom() { this(newFastFunction(null)); } /** * Constructor with a random. * * @param random a random */ public FastRandom(Random random) { this(newFastFunction(Objects.requireNonNull(random))); } /** * Constructor with a function which returns random numbers. * * @param randomFunction a function */ public FastRandom(LongSupplier randomFunction) { this.randomFunction = Objects.requireNonNull(randomFunction); } @Override public long nextLong() { return randomFunction.getAsLong(); } @Override public long nextLong(int length) { byte[] bytes = nextBytes(length); return ByteUtil.toNumber(bytes); } @Override public byte[] nextBytes(int length) { int shift = 0; long random = 0; final byte[] bytes = new byte[length]; for (int i = 0; i < length; i++) { if (shift < Byte.SIZE) { shift = Long.SIZE; random = randomFunction.getAsLong(); } shift -= Byte.SIZE; // 56, 48, 42... bytes[i] = (byte) (random >>> shift); } return bytes; } /** * Returns a new random function. * * @param random a random * @return a function */ private static LongSupplier newFastFunction(Random random) { if (random != null) { return () -> random.nextLong(); } return () -> ThreadLocalRandom.current().nextLong(); } } /** * A byte random generator. */ protected static final class SafeRandom implements IRandom { private final IntFunction randomFunction; /** * Default constructor. */ public SafeRandom() { this(newSafeFunction(null)); } /** * Constructor with a random. * * @param random a random */ public SafeRandom(Random random) { this(newSafeFunction(Objects.requireNonNull(random))); } /** * Constructor with a function which returns random numbers. * * @param randomFunction a function */ public SafeRandom(IntFunction randomFunction) { this.randomFunction = Objects.requireNonNull(randomFunction); } @Override public long nextLong() { byte[] bytes = this.randomFunction.apply(Long.BYTES); return ByteUtil.toNumber(bytes); } public long nextLong(int length) { byte[] bytes = nextBytes(length); return ByteUtil.toNumber(bytes); } @Override public byte[] nextBytes(int length) { return this.randomFunction.apply(length); } /** * Returns a new random function. * * @param random a random * @return a function */ private static IntFunction newSafeFunction(Random random) { final Random entropy = random != null ? random : new SecureRandom(); return (final int length) -> { final byte[] bytes = new byte[length]; entropy.nextBytes(bytes); return bytes; }; } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/AbstTimeBasedFactory.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.UUID128; import com.pinecone.ulf.util.guid.i128.enums.UuidVersion; import com.pinecone.ulf.util.guid.i128.factory.function.ClockSeqFunction; import com.pinecone.ulf.util.guid.i128.factory.function.NodeIdFunction; import com.pinecone.ulf.util.guid.i128.factory.function.TimeFunction; import com.pinecone.ulf.util.guid.i128.factory.function.impl.DefaultClockSeqFunction; import com.pinecone.ulf.util.guid.i128.factory.function.impl.DefaultNodeIdFunction; import com.pinecone.ulf.util.guid.i128.factory.function.impl.DefaultTimeFunction; import com.pinecone.ulf.util.guid.i128.factory.function.impl.HashNodeIdFunction; import com.pinecone.ulf.util.guid.i128.factory.function.impl.MacNodeIdFunction; import com.pinecone.ulf.util.guid.i128.factory.function.impl.RandomNodeIdFunction; import com.pinecone.ulf.util.guid.i128.factory.function.impl.WindowsTimeFunction; import com.pinecone.ulf.util.guid.i128.util.UuidTime; import com.pinecone.ulf.util.guid.i128.util.internal.ByteUtil; import com.pinecone.ulf.util.guid.i128.util.internal.SettingsUtil; import java.time.Instant; import java.util.UUID; import java.util.concurrent.locks.ReentrantLock; /** * Abstract factory for creating time-based unique identifiers (UUIDv1, UUIDv2 * and UUIDv6). *

* The time stamp has 100-nanoseconds resolution, starting from 1582-10-15, * which is a date known as Gregorian Epoch. The the time stamp rolls over * around AD 5235 (1582 + 2^60 / 365.25 / 24 / 60 / 60 / 10000000). *

* The node identifier can be: *

    *
  • A MAC address; *
  • A hash of host name, MAC and IP; *
  • A random number that always changes; *
  • A specific number chosen by someone. *
*

* The node identifier used by this factory can be controlled by defining a * system property 'uuidcreator.node' or an environment variable * 'UUIDCREATOR_NODE'. The system property has preference over the * environment variable. *

* Options accepted by the system property and the environment variable: *

    *
  • The string "mac" for using the MAC address; *
  • The string "hash" for using a hash of host name, MAC and IP; *
  • The string "random" for using a random number that always changes; *
  • The string representation of a specific number between 0 and 2^48-1. *
*

* If a property or variable is defined, all UUIDs generated by this factory * will be based on it. *

* Otherwise, if no property or variable is defined, a random node identifier is * generated once at instantiation. This is the default. *

* Example of system property definition: * *

{@code
 * # Append to VM arguments
 * -Duuidcreator.node="mac"
 * }
*

* Example of environment variable definition: * *

{@code
 * # Append to ~/.profile
 * export UUIDCREATOR_NODE="mac"
 * }
* * @see TimeFunction * @see NodeIdFunction * @see ClockSeqFunction * @see RFC 9562 */ public abstract class AbstTimeBasedFactory extends UuidFactory { /** * The time function. */ protected TimeFunction timeFunction; /** * The node function. */ protected NodeIdFunction nodeidFunction; /** * The clock sequence function. */ protected ClockSeqFunction clockseqFunction; private static final String NODE_MAC = "mac"; private static final String NODE_HASH = "hash"; private static final String NODE_RANDOM = "random"; /** * The reentrant lock for synchronization. */ protected final ReentrantLock lock = new ReentrantLock(); private static final long EPOCH_TIMESTAMP = TimeFunction.toUnixTimestamp(UuidTime.EPOCH_GREG); /** * A protected constructor that receives a builder object. * * @param version the version number (1, 2 or 6) * @param builder a builder object */ protected AbstTimeBasedFactory(UuidVersion version, Builder builder) { super(version); this.timeFunction = builder.getTimeFunction(); this.nodeidFunction = builder.getNodeIdFunction(); this.clockseqFunction = builder.getClockSeqFunction(); } /** * Returns a time-based UUID. * * @return a time-based UUID */ @Override public GUID128 create() { lock.lock(); try { // Get the time stamp final long timestamp = TimeFunction.toExpectedRange(this.timeFunction.getAsLong() - EPOCH_TIMESTAMP); // Get the node identifier final long nodeIdentifier = NodeIdFunction.toExpectedRange(this.nodeidFunction.getAsLong()); // Get the clock sequence final long clockSequence = ClockSeqFunction.toExpectedRange(this.clockseqFunction.applyAsLong(timestamp)); // Format the most significant bits final long msb = this.formatMostSignificantBits(timestamp); // Format the least significant bits final long lsb = this.formatLeastSignificantBits(nodeIdentifier, clockSequence); return new UUID128(msb, lsb); } finally { lock.unlock(); } } /** * Returns a time-based UUID. * * @return a time-based UUID */ @Override public GUID128 create(Parameters parameters) { return create(); // ignore arguments } /** * Returns the most significant bits of the UUID. *

* It implements the algorithm for generating UUIDv1. * * @param timestamp the number of 100-nanoseconds since 1970-01-01 (Unix epoch) * @return the MSB */ protected long formatMostSignificantBits(final long timestamp) { return ((timestamp & 0x0fff_0000_00000000L) >>> 48) // | ((timestamp & 0x0000_ffff_00000000L) >>> 16) // | ((timestamp & 0x0000_0000_ffffffffL) << 32) // | 0x0000000000001000L; // apply version 1 } /** * Returns the least significant bits of the UUID. * * @param nodeIdentifier a node identifier * @param clockSequence a clock sequence * @return the LSB */ protected long formatLeastSignificantBits(final long nodeIdentifier, final long clockSequence) { return ((((clockSequence << 48) | (nodeIdentifier & 0x0000ffffffffffffL)) // & 0x3fffffffffffffffL) // clear variant bits | 0x8000000000000000L); // apply variant bits } /** * Select the node identifier function. * * This method reads the system property 'uuidcreator.node' and the environment * variable 'UUIDCREATOR_NODE' to decide what node identifier function must be * used. * * 1. If it finds the string "mac", the generator will use the MAC address. * * 2. If it finds the string "hash", the generator will use the system data * hash. * * 3. If it finds the string "random", the generator will use a random number * that always changes. * * 4. If it finds the string representation of a specific number in octal, * hexadecimal or decimal format, the generator will use the number represented. * * 5. Else, a random number will be used by the generator. * * @return a node function */ protected static NodeIdFunction selectNodeIdFunction() { String string = SettingsUtil.getProperty(SettingsUtil.PROPERTY_NODE); if (NODE_MAC.equalsIgnoreCase(string)) { return new MacNodeIdFunction(); } if (NODE_HASH.equalsIgnoreCase(string)) { return new HashNodeIdFunction(); } if (NODE_RANDOM.equalsIgnoreCase(string)) { return new RandomNodeIdFunction(); } Long number = SettingsUtil.getNodeIdentifier(); if (number != null) { final long nodeid = NodeIdFunction.toExpectedRange(number); return () -> nodeid; } return new DefaultNodeIdFunction(); } /** * Select the time function. * * If the operating system is WINDOWS, it returns a function that is more * efficient for its typical time granularity (15.6ms). Otherwise, it returns * the default time function. * * @return a time function */ protected static TimeFunction selectTimeFunction() { // check if the operating system is WINDOWS final String os = System.getProperty("os.name"); if (os != null && os.toLowerCase().startsWith("win")) { return new WindowsTimeFunction(); } return new DefaultTimeFunction(); } /** * Abstract builder for creating a time-based factory. */ public abstract static class Builder> { /** * The time function. */ protected TimeFunction timeFunction; /** * The node function. */ protected NodeIdFunction nodeidFunction; /** * The clock sequence function. */ protected ClockSeqFunction clockseqFunction; /** * Get the time function. * * @return a function */ protected TimeFunction getTimeFunction() { if (this.timeFunction == null) { this.timeFunction = selectTimeFunction(); } return this.timeFunction; } /** * Get the node function. * * @return a function */ protected NodeIdFunction getNodeIdFunction() { if (this.nodeidFunction == null) { this.nodeidFunction = selectNodeIdFunction(); } return this.nodeidFunction; } /** * Get the clock sequence function. * * @return a function */ protected ClockSeqFunction getClockSeqFunction() { if (this.clockseqFunction == null) { this.clockseqFunction = new DefaultClockSeqFunction(); } return this.clockseqFunction; } /** * Set the time function. * * @param timeFunction a function * @return the builder */ @SuppressWarnings("unchecked") public B withTimeFunction(TimeFunction timeFunction) { this.timeFunction = timeFunction; return (B) this; } /** * Set the node function * * @param nodeidFunction a function * @return the builder */ @SuppressWarnings("unchecked") public B withNodeIdFunction(NodeIdFunction nodeidFunction) { this.nodeidFunction = nodeidFunction; return (B) this; } /** * Set the clock sequence function * * @param clockseqFunction a function * @return the builder */ @SuppressWarnings("unchecked") public B withClockSeqFunction(ClockSeqFunction clockseqFunction) { this.clockseqFunction = clockseqFunction; return (B) this; } /** * Set the fixed instant. * * @param instant an instant * @return the builder */ @SuppressWarnings("unchecked") public B withInstant(Instant instant) { final long timestamp = TimeFunction.toUnixTimestamp(instant); this.timeFunction = () -> timestamp; return (B) this; } /** * Set the fixed clock sequence. * * @param clockseq a clock sequence * @return the builder */ @SuppressWarnings("unchecked") public B withClockSeq(long clockseq) { final long clockSequence = ClockSeqFunction.toExpectedRange(clockseq); this.clockseqFunction = x -> clockSequence; return (B) this; } /** * Set a fixed clock sequence. * * @param clockseq a clock sequence * @return the builder */ @SuppressWarnings("unchecked") public B withClockSeq(byte[] clockseq) { final long clockSequence = ClockSeqFunction.toExpectedRange(ByteUtil.toNumber(clockseq)); this.clockseqFunction = x -> clockSequence; return (B) this; } /** * Set a fixed node. * * @param nodeid a node * @return the builder */ @SuppressWarnings("unchecked") public B withNodeId(long nodeid) { final long nodeIdentifier = NodeIdFunction.toExpectedRange(nodeid); this.nodeidFunction = () -> nodeIdentifier; return (B) this; } /** * Set a fixed node * * @param nodeid a node * @return the builder */ @SuppressWarnings("unchecked") public B withNodeId(byte[] nodeid) { final long nodeIdentifier = NodeIdFunction.toExpectedRange(ByteUtil.toNumber(nodeid)); this.nodeidFunction = () -> nodeIdentifier; return (B) this; } /** * Set the node function to MAC strategy. * * @return the builder */ @SuppressWarnings("unchecked") public B withMacNodeId() { this.nodeidFunction = new MacNodeIdFunction(); return (B) this; } /** * Set the node function to hash strategy. * * @return the builder */ @SuppressWarnings("unchecked") public B withHashNodeId() { this.nodeidFunction = new HashNodeIdFunction(); return (B) this; } /** * Set the node function to random strategy. * * @return the builder */ @SuppressWarnings("unchecked") public B withRandomNodeId() { this.nodeidFunction = new RandomNodeIdFunction(); return (B) this; } /** * Finish the factory building. * * @return the built factory */ public abstract T build(); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/UuidFactory.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.UUID128; import com.pinecone.ulf.util.guid.i128.codec.StandardBinaryCodec; import com.pinecone.ulf.util.guid.i128.codec.StandardStringCodec; import com.pinecone.ulf.util.guid.i128.enums.UuidLocalDomain; import com.pinecone.ulf.util.guid.i128.enums.UuidNamespace; import com.pinecone.ulf.util.guid.i128.enums.UuidVersion; import com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException; import java.nio.charset.StandardCharsets; import java.time.Instant; import java.util.Arrays; import java.util.Objects; import java.util.UUID; /** * Abstract factory that is base for all UUID factories. */ public abstract class UuidFactory { /** * Version number. */ protected final UuidVersion version; /** * Version bit mask. */ protected final long versionMask; /** * Default Constructor. *

* The version used is {@link UuidVersion#VERSION_UNKNOWN}. */ public UuidFactory() { this.version = UuidVersion.VERSION_UNKNOWN; this.versionMask = (long) version.getValue() << 12; } /** * Constructor with a version number. * * @param version a version number */ public UuidFactory(UuidVersion version) { this.version = version; this.versionMask = (long) version.getValue() << 12; } /** * Returns the version number for this factory. * * @return the version number */ public UuidVersion getVersion() { return this.version; } /** * Create a UUID * * @return a UUID */ public abstract GUID128 create(); /** * Creates a UUID using parameters. * * @param parameters parameters object * @return a UUID */ public abstract GUID128 create(Parameters parameters); /** * Parameters object to be used with a {@link UuidFactory#create(Parameters)}. */ public static class Parameters { /** * Instant to be used. */ private final Instant instant; /** * Name space byte array. */ private final byte[] namespace; /** * Name byte array. */ private final byte[] name; /** * Local domain byte. */ private final byte localDomain; /** * Local identifier number. */ private final int localIdentifier; /** * Constructor using a builder. * * @param builder a builder */ public Parameters(Builder builder) { Objects.requireNonNull(builder); this.instant = builder.instant; this.namespace = builder.namespace; this.name = builder.name; this.localDomain = builder.localDomain; this.localIdentifier = builder.localIdentifier; } public Instant getInstant() { return this.instant; } /** * Get the name space bytes. * * @return a byte array */ public byte[] getNamespace() { return this.namespace; } /** * Get the name bytes. * * @return a byte array */ public byte[] getName() { return this.name; } /** * Get the local domain. * * @return the local domain */ public byte getLocalDomain() { return this.localDomain; } /** * Get the local identifier. * * @return the local identifier */ public int getLocalIdentifier() { return this.localIdentifier; } /** * Returns a new builder. * * @return a builder */ public static Builder builder() { return new Builder(); } /** * Parameters builder. */ public static class Builder { /** * Instant to be used. */ private Instant instant; /** * Name space byte array. */ private byte[] namespace = null; /** * Name byte array. */ private byte[] name = null; /** * Local domain byte. */ private byte localDomain; /** * Local identifier number. */ private int localIdentifier; private Builder() { } /** * Use the instant provided. * * @param instant an instant * @return the builder */ public Builder withInstant(Instant instant) { this.instant = instant; return this; } /** * Use the name space UUID. * * @param namespace a name space * @return the builder */ public Builder withNamespace(GUID128 namespace) { this.namespace = namespaceBytes(namespace); return this; } /** * Use the name space string. * * @param namespace a name space * @return the builder */ public Builder withNamespace(String namespace) { this.namespace = namespaceBytes(namespace); return this; } /** * Use the name space enum. * * @param namespace a name space * @return the builder */ public Builder withNamespace(UuidNamespace namespace) { this.namespace = namespaceBytes(namespace); return this; } /** * Use the name byte array. * * It makes a copy of the input byte array. * * @param name a name * @return the builder */ public Builder withName(byte[] name) { this.name = nameBytes(name); return this; } /** * Use the name string. * * The string is encoded into UTF-8 byte array. * * @param name a name * @return the builder */ public Builder withName(String name) { this.name = nameBytes(name); return this; } /** * Use the local domain. * * @param localDomain the local domain * @return the builder */ public Builder withLocalDomain(UuidLocalDomain localDomain) { this.localDomain = localDomain.getValue(); return this; } /** * Use the local domain. * * @param localDomain the local domain * @return the builder */ public Builder withLocalDomain(byte localDomain) { this.localDomain = localDomain; return this; } /** * Use the local identifier. * * @param localIdentifier the local identifier * @return the builder */ public Builder withLocalIdentifier(int localIdentifier) { this.localIdentifier = localIdentifier; return this; } /** * Finishes the parameters build. * * @return the build parameters. */ public Parameters build() { return new Parameters(this); } } } /** * Returns a copy of the input byte array. * * @param name a name string * @return a byte array * @throws IllegalArgumentException if the input is null */ protected static byte[] nameBytes(byte[] name) { Objects.requireNonNull(name, "Null name"); return Arrays.copyOf(name, name.length); } /** * Converts a name string into a byte array. * * @param name a name string * @return a byte array * @throws IllegalArgumentException if the input is null */ protected static byte[] nameBytes(String name) { Objects.requireNonNull(name, "Null name"); return name.getBytes(StandardCharsets.UTF_8); } /** * Converts a name space enumeration into a byte array. * * @param namespace a name space enumeration * @return a byte array */ protected static byte[] namespaceBytes(UuidNamespace namespace) { if (namespace != null) { return namespaceBytes(namespace.getValue()); } return null; // the name space can be null } /** * Converts a name space UUID into a byte array. * * @param namespace a name space UUID * @return a byte array */ protected static byte[] namespaceBytes(GUID128 namespace) { if (namespace != null) { return StandardBinaryCodec.INSTANCE.encode(namespace); } return null; // the name space can be null } /** * Converts a name space string into a byte array. * * @param namespace a name space string * @return a byte array * @throws InvalidUuidException if the name space is invalid */ protected static byte[] namespaceBytes(String namespace) { if (namespace != null) { return StandardBinaryCodec.INSTANCE.encode(StandardStringCodec.INSTANCE.decode(namespace)); } return null; // the name space can be null } /** * Creates a UUID from a pair of numbers. *

* It applies the version and variant numbers to the resulting UUID. * * @param msb the most significant bits * @param lsb the least significant bits * @return a UUID */ protected GUID128 toUuid(final long msb, final long lsb) { final long msb0 = (msb & 0xffffffffffff0fffL) | this.versionMask; // set version final long lsb0 = (lsb & 0x3fffffffffffffffL) | 0x8000000000000000L; // set variant return new UUID128(msb0, lsb0); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/ClockSeqFunction.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.function; import com.pinecone.ulf.util.guid.i128.util.internal.RandomUtil; import java.util.function.LongUnaryOperator; /** * Function that must return a number between 0 and 16383 (2^14-1). *

* It receives as argument a number of 100-nanoseconds since 1970-01-01 (Unix * epoch). *

* Example: * *

{@code
 * // A function that returns new random clock sequences
 * ClockSeqFunction f = t -> ClockSeqFunction.getRandom();
 * }
* */ @FunctionalInterface public interface ClockSeqFunction extends LongUnaryOperator { /** * Returns a new random clock sequence in the range 0 to 16383 (2^14-1). * * @return a number in the range 0 to 16383 (2^14-1) */ static long getRandom() { return toExpectedRange(RandomUtil.newSecureRandom().nextLong()); } /** * Clears the leading bits so that the resulting number is within the range 0 to * 16383 (2^14-1). *

* The result is equivalent to {@code n % 2^14}. * * @param clockseq a clock sequence * @return a number in the range 0 to 16383 (2^14-1). */ static long toExpectedRange(final long clockseq) { return clockseq & 0x0000000000003fffL; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/NodeIdFunction.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.function; import com.pinecone.ulf.util.guid.i128.util.internal.RandomUtil; import java.util.function.LongSupplier; /** * Function that must return a number between 0 and 2^48-1. *

* Example: * *

{@code
 * // A function that returns new random multicast node identifiers
 * NodeIdFunction f = () -> NodeIdFunction.getMulticastRandom();
 * }
* */ @FunctionalInterface public interface NodeIdFunction extends LongSupplier { /** * Returns a new random node identifier. * * @return a number in the range 0 to 2^48-1. */ static long getRandom() { return toExpectedRange(RandomUtil.newSecureRandom().nextLong()); } /** * Return a new random multicast node identifier. * * @return a number in the range 0 to 2^48-1. */ static long getMulticastRandom() { return toMulticast(getRandom()); } /** * Clears the leading bits so that the resulting number is in the range 0 to * 2^48-1. *

* The result is equivalent to {@code n % 2^48}. * * @param nodeid the node identifier * @return a number in the range 0 to 2^48-1. */ static long toExpectedRange(final long nodeid) { return nodeid & 0x0000_ffffffffffffL; } /** * Sets the multicast bit of a node identifier. *

* It also clears leading bits so that the resulting number is within the range * 0 to 2^48-1. * * @param nodeid the node identifier * @return a node identifier with the multicast bit set */ static long toMulticast(long nodeid) { return (nodeid & 0x0000_ffffffffffffL) | 0x0000_010000000000L; } /** * Checks if the multicast bit of a node identifier is set. * * @param nodeid a node identifier * @return true if the node identifier is multicast */ static boolean isMulticast(long nodeid) { return (nodeid & 0x0000_010000000000L) == 0x0000_010000000000L; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/RandomFunction.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.function; import java.util.function.IntFunction; /** * Function that must return an array of bytes with the given length. */ @FunctionalInterface public interface RandomFunction extends IntFunction { } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/TimeFunction.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.function; import com.pinecone.ulf.util.guid.i128.util.UuidTime; import java.time.Instant; import java.util.function.LongSupplier; /** * Function that must return a number of 100-nanoseconds since 1970-01-01 (Unix * epoch). *

* Example: * *

{@code
 * // A function that returns `Instant.now()` as a number of 100ns
 * TimeFunction f = () -> TimeFunction.toUnixTimestamp(Instant.now());
 * }
* *

* In JDK 8, {@link Instant#now()} has millisecond precision, in spite of * {@link Instant} has nanoseconds resolution. In JDK 9+, {@link Instant#now()} * has microsecond precision. * * @see Current time in * microseconds in java * @see Increase the * precision of the implementation of java.time.Clock.systemUTC() */ @FunctionalInterface public interface TimeFunction extends LongSupplier { /** * Converts an instant to a number of 100-nanoseconds since 1970-01-01 (Unix * epoch). * * @param instant an instant * @return a number of 100-nanoseconds since 1970-01-01 (Unix epoch) */ static long toUnixTimestamp(final Instant instant) { return UuidTime.toUnixTimestamp(instant); } /** * Clears the leading bits so that the resulting number is in the range 0 to * 2^60-1. *

* The result is equivalent to {@code n % 2^60}. * * @param timestamp a number of 100-nanoseconds since 1970-01-01 (Unix epoch) * @return a number in the range 0 to 2^60-1. */ static long toExpectedRange(final long timestamp) { return timestamp & 0x0_fffffffffffffffL; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/impl/DefaultClockSeqFunction.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.function.impl; import com.pinecone.ulf.util.guid.i128.factory.function.ClockSeqFunction; import java.util.SplittableRandom; import java.util.concurrent.atomic.AtomicInteger; /** * Function that returns a clock sequence. * * @see ClockSeqFunction */ public final class DefaultClockSeqFunction implements ClockSeqFunction { private AtomicInteger sequence; private long lastTimestamp = -1; /** * The pool of clock sequence numbers. */ protected static final ClockSeqPool POOL = new ClockSeqPool(); /** * Default constructor. */ public DefaultClockSeqFunction() { final int initial = POOL.random(); this.sequence = new AtomicInteger(initial); } @Override public long applyAsLong(final long timestamp) { if (timestamp > this.lastTimestamp) { this.lastTimestamp = timestamp; return this.sequence.get(); } this.lastTimestamp = timestamp; return this.next(); } /** * Get the next random clock sequence number. * * @return a number */ public int next() { if (this.sequence.incrementAndGet() > ClockSeqPool.POOL_MAX) { this.sequence.set(ClockSeqPool.POOL_MIN); } return this.sequence.updateAndGet(POOL::take); } /** * Nested class that manages a pool of 16384 clock sequence values. *

* The pool is implemented as an array of 2048 bytes (16384 bits). Each bit of * the array corresponds to a clock sequence value. *

* It is used to avoid that two time-based factories use the same clock sequence * at same time in a class loader. */ static final class ClockSeqPool { private final byte[] pool = new byte[2048]; private static final int POOL_SIZE = 16384; // 2^14 = 16384 /** * The minimum pool size, which is zero. */ public static final int POOL_MIN = 0x00000000; /** * The maximum pool size, which is 16383 (2^14-1). */ public static final int POOL_MAX = 0x00003fff; // 2^14-1 = 16383 /** * Take a value from the pool. *

* If the value to be taken is already in use, it is incremented until a free * value is found and returned. *

* In the case that all pool values are in use, the pool is cleared and the last * incremented value is returned. *

* It does nothing to negative arguments. * * @param take value to be taken from the pool * @return the value to be borrowed if not used */ public synchronized int take(final int take) { int value = take; for (int i = 0; i < POOL_SIZE; i++) { if (setBit(value)) { return value; } value = ++value % POOL_SIZE; } clearPool(); setBit(value); return value; } /** * Take a random value from the pool. * * @return the random value to be borrowed if not used */ public synchronized int random() { // Choose a random number between 0 and 16383 int random = Math.abs(new SplittableRandom().nextInt()) % POOL_SIZE; return this.take(random); } /** * Set a bit from the byte array that represents the pool. *

* This operation corresponds to setting a value as used. *

* It returns false if the value is not free. *

* It does nothing to negative arguments. * * @param value the value to be taken from the pool * @return true if success */ private synchronized boolean setBit(int value) { if (value < 0) { return false; } final int byteIndex = value / 8; final int bitIndex = value % 8; final int mask = (0x00000001 << bitIndex); final boolean clear = (pool[byteIndex] & mask) == 0; if (clear) { pool[byteIndex] = (byte) (pool[byteIndex] | mask); return true; } return false; } /** * Check if a value is used out of the pool. * * @param value a value to be checked in the pool * @return true if the value is used */ public synchronized boolean isUsed(int value) { final int byteIndex = value / 8; final int bitIndex = value % 8; final int mask = (0x00000001 << bitIndex); boolean clear = (pool[byteIndex] & mask) == 0; return !clear; } /** * Check if a value is free in the pool. * * @param value a value to be checked in the pool * @return true if the value is free */ public synchronized boolean isFree(int value) { return !this.isUsed(value); } /** * Count the used values out of the pool * * @return the count of used values */ public synchronized int countUsed() { int counter = 0; for (int i = 0; i < POOL_SIZE; i++) { if (this.isUsed(i)) { counter++; } } return counter; } /** * Count the free values in the pool. * * @return the count of free values */ public synchronized int countFree() { return POOL_SIZE - this.countUsed(); } /** * Clear all bits of the byte array that represents the pool. *

* This corresponds to marking all pool values as free */ public synchronized void clearPool() { for (int i = 0; i < pool.length; i++) { pool[i] = 0; } } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/impl/DefaultNodeIdFunction.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.function.impl; import com.pinecone.ulf.util.guid.i128.factory.function.NodeIdFunction; /** * Function that returns a final random multicast node identifier. *

* The random value is generated once during instantiation. * * @see NodeIdFunction */ public final class DefaultNodeIdFunction implements NodeIdFunction { private final long nodeIdentifier; /** * Default constructor. */ public DefaultNodeIdFunction() { this.nodeIdentifier = NodeIdFunction.getMulticastRandom(); } @Override public long getAsLong() { return this.nodeIdentifier; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/impl/DefaultRandomFunction.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.function.impl; import com.pinecone.ulf.util.guid.i128.factory.function.RandomFunction; import com.pinecone.ulf.util.guid.i128.util.internal.RandomUtil; /** * Function that returns an array of bytes with the given length. * * @see RandomFunction * @see RandomUtil */ public final class DefaultRandomFunction implements RandomFunction { @Override public byte[] apply(final int length) { return RandomUtil.nextBytes(length); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/impl/DefaultTimeFunction.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.function.impl; import com.pinecone.ulf.util.guid.i128.factory.function.TimeFunction; import java.time.Clock; import java.util.SplittableRandom; import static com.pinecone.ulf.util.guid.i128.util.UuidTime.TICKS_PER_MILLI; /** * Function that returns a number of 100-nanoseconds since 1970-01-01 (Unix * epoch). *

* It can advance 1ms or more ahead of system clock on heavy load. * * @see TimeFunction */ public final class DefaultTimeFunction implements TimeFunction { private final Clock clock; private long lastTime = -1; // let go up to 1 second ahead of system clock private static final long advanceMax = 1_000L; // start the counter with a random number between 0 and 9,999 private long counter = Math.abs(new SplittableRandom().nextLong()) % TICKS_PER_MILLI; // start the counter limit with a number between 10,000 and 19,999 private long counterMax = counter + TICKS_PER_MILLI; /** * Default constructor. */ public DefaultTimeFunction() { this.clock = Clock.systemUTC(); } /** * Default constructor with a {@link Clock} instance. * * @param clock a clock */ public DefaultTimeFunction(Clock clock) { this.clock = clock; } @Override public long getAsLong() { counter++; // always increment // get current system time long time = clock.millis(); // is it not too much ahead of system clock? if (advanceMax > Math.abs(lastTime - time)) { time = Math.max(lastTime, time); } // check time change if (time == lastTime) { // if the time repeats, // check the counter limit if (counter >= counterMax) { time++; // must go ahead of system clock // reset to a number between 0 and 9,999 counter = counter % TICKS_PER_MILLI; // reset to a number between 10,000 and 19,999 counterMax = counter + TICKS_PER_MILLI; } } else { // reset to a number between 0 and 9,999 counter = counter % TICKS_PER_MILLI; // reset to a number between 10,000 and 19,999 counterMax = counter + TICKS_PER_MILLI; } // save time for the next call lastTime = time; // simulate a high resolution clock return (time * TICKS_PER_MILLI) + counter; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/impl/HashNodeIdFunction.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.function.impl; import com.pinecone.ulf.util.guid.i128.factory.function.NodeIdFunction; import com.pinecone.ulf.util.guid.i128.util.MachineId; import com.pinecone.ulf.util.guid.i128.util.internal.ByteUtil; /** * Function that returns a hash of host name, MAC and IP. *

* The hash is calculated once during instantiation. * * @see NodeIdFunction * @see MachineId */ public final class HashNodeIdFunction implements NodeIdFunction { private final long nodeIdentifier; /** * Default constructor. */ public HashNodeIdFunction() { final byte[] hash = MachineId.getMachineHash(); final long number = ByteUtil.toNumber(hash, 0, 6); this.nodeIdentifier = NodeIdFunction.toMulticast(number); } @Override public long getAsLong() { return this.nodeIdentifier; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/impl/MacNodeIdFunction.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.function.impl; import com.pinecone.ulf.util.guid.i128.factory.function.NodeIdFunction; import com.pinecone.ulf.util.guid.i128.util.internal.NetworkUtil; import java.net.NetworkInterface; import java.net.SocketException; import static com.pinecone.ulf.util.guid.i128.util.internal.ByteUtil.toNumber; /** * Function that returns a MAC address. *

* The MAC address is obtained once during instantiation. *

* If no MAC address is found, it returns a random multicast node identifier. * * @see NodeIdFunction */ public final class MacNodeIdFunction implements NodeIdFunction { private final long nodeIdentifier; /** * Default constructor. */ public MacNodeIdFunction() { this.nodeIdentifier = getHardwareAddress(); } @Override public long getAsLong() { return this.nodeIdentifier; } private long getHardwareAddress() { try { NetworkInterface nic = NetworkUtil.nic(); if (nic != null) { return toNumber(nic.getHardwareAddress()); } } catch (SocketException e) { return NodeIdFunction.getMulticastRandom(); } return NodeIdFunction.getMulticastRandom(); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/impl/RandomNodeIdFunction.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.function.impl; import com.pinecone.ulf.util.guid.i128.factory.function.NodeIdFunction; import com.pinecone.ulf.util.guid.i128.util.internal.RandomUtil; /** * Function that returns a new random multicast node identifier. *

* The random value is generated with each new invocation. * * @see NodeIdFunction */ public final class RandomNodeIdFunction implements NodeIdFunction { @Override public long getAsLong() { return NodeIdFunction.toMulticast(RandomUtil.nextLong()); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/impl/WindowsTimeFunction.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.function.impl; import com.pinecone.ulf.util.guid.i128.factory.function.TimeFunction; import java.time.Clock; import java.util.SplittableRandom; import static com.pinecone.ulf.util.guid.i128.util.UuidTime.TICKS_PER_MILLI; /** * Function that returns a number of 100-nanoseconds since 1970-01-01 (Unix * epoch). *

* This function is for WINDOWS systems. *

* On WINDOWS, the typical system time granularity is 15.625ms due to a default * 64Hz timer frequency. *

* It can advance 16ms or more ahead of system clock on heavy load. * * @see TimeFunction */ public final class WindowsTimeFunction implements TimeFunction { private final Clock clock; private long lastTime = -1; // let go up to 1 second ahead of system clock private static final long advanceMax = 1_000L; // arbitrary granularity greater than 15ms private static final long GRANULARITY = 16; private static final long TICKS_PER_GRANULARITY = TICKS_PER_MILLI * GRANULARITY; // start the counter with a random number between 0 and 159,999 private long counter = Math.abs(new SplittableRandom().nextLong()) % TICKS_PER_GRANULARITY; // start the counter limit with a number between 160,000 and 319,999 private long counterMax = counter + TICKS_PER_GRANULARITY; /** * Default constructor. */ public WindowsTimeFunction() { this.clock = Clock.systemUTC(); } /** * Constructor with a clock. * * @param clock a clock */ public WindowsTimeFunction(Clock clock) { this.clock = clock; } @Override public long getAsLong() { counter++; // always increment // get calculated system time long time = calculatedMillis(); // is it not too much ahead of system clock? if (advanceMax > Math.abs(lastTime - time)) { time = Math.max(lastTime, time); } // check time change if (time == lastTime) { // if the time repeats, // check the counter limit if (counter >= counterMax) { time += GRANULARITY; // let it go forwards // reset to a number between 0 and 159,999 counter = counter % TICKS_PER_GRANULARITY; // reset to a number between 160,000 and 319,999 counterMax = counter + TICKS_PER_GRANULARITY; } } else { // reset to a number between 0 and 159,999 counter = counter % TICKS_PER_GRANULARITY; // reset to a number between 160,000 and 319,999 counterMax = counter + TICKS_PER_GRANULARITY; } // save time for the next call lastTime = time; // simulate a high resolution clock return (time * TICKS_PER_MILLI) + counter; } /** * Returns the calculated time in milliseconds. * * It can be 16ms ahead of system time due to time granularity. * * @return the calculated time */ private long calculatedMillis() { final long time = clock.millis(); return time + GRANULARITY - (time % GRANULARITY); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/nonstandard/PrefixCombFactory.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.nonstandard; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.enums.UuidVersion; import com.pinecone.ulf.util.guid.i128.factory.AbstCombFactory; import java.time.Clock; import java.util.Random; import java.util.UUID; import java.util.function.LongSupplier; /** * Concrete factory for creating Prefix COMB GUIDs. *

* A Prefix COMB GUID is a UUID that combines a creation time with random bits. *

* The creation millisecond is a 6 bytes PREFIX at the MOST significant bits. *

* The created UUID is a UUIDv4 for compatibility with RFC 9562. * * @see The Cost * of GUIDs as Primary Keys */ public final class PrefixCombFactory extends AbstCombFactory { /** * Default constructor. */ public PrefixCombFactory() { this(builder()); } /** * Constructor with a clock. * * @param clock a clock */ public PrefixCombFactory(Clock clock) { this(builder().withClock(clock)); } /** * Constructor with a random. * * @param random a random generator */ public PrefixCombFactory(Random random) { this(builder().withRandom(random)); } /** * Constructor with a random and a clock. * * @param random a random * @param clock a clock */ public PrefixCombFactory(Random random, Clock clock) { this(builder().withRandom(random).withClock(clock)); } /** * Constructor with a function which return random numbers. * * @param randomFunction a function */ public PrefixCombFactory(LongSupplier randomFunction) { this(builder().withRandomFunction(randomFunction)); } /** * Constructor with a function which a function which return random numbers and * a clock. * * @param randomFunction a function * @param clock a clock */ public PrefixCombFactory(LongSupplier randomFunction, Clock clock) { this(builder().withRandomFunction(randomFunction).withClock(clock)); } private PrefixCombFactory(Builder builder) { super(UuidVersion.VERSION_RANDOM_BASED, builder); } /** * Builder of factories. */ public static class Builder extends AbstCombFactory.Builder { @Override public PrefixCombFactory build() { return new PrefixCombFactory(this); } } /** * Returns a new builder. * * @return a builder */ public static Builder builder() { return new Builder(); } /** * Returns a Prefix COMB GUID. * * @return a UUIDv4 */ @Override public GUID128 create() { lock.lock(); try { final long time = instantFunction.get().toEpochMilli(); final long long1 = this.random.nextLong(2); final long long2 = this.random.nextLong(8); return make(time, long1, long2); } finally { lock.unlock(); } } private GUID128 make(final long time, final long long1, final long long2) { return toUuid((time << 16) | (long1 & 0x000000000000ffffL), long2); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/nonstandard/ShortPrefixCombFactory.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.nonstandard; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.enums.UuidVersion; import com.pinecone.ulf.util.guid.i128.factory.AbstCombFactory; import java.time.Clock; import java.util.Random; import java.util.UUID; import java.util.function.LongSupplier; /** * Concrete factory for creating Short Prefix COMB GUIDs. *

* A Short Prefix COMB GUID is a UUID that combines a creation time with random * bits. *

* The creation minute is a 2 bytes PREFIX at the MOST significant bits. *

* The prefix wraps around every ~45 days (2^16/60/24 = ~45). *

* The created UUID is a UUIDv4 for compatibility with RFC 9562. * * @see Sequential * UUID Generators */ public final class ShortPrefixCombFactory extends AbstCombFactory { /** * Interval in milliseconds. */ protected final int interval; /** * Default interval of 60 seconds in milliseconds. */ protected static final int DEFAULT_INTERVAL = 60_000; /** * Default constructor. */ public ShortPrefixCombFactory() { this(builder()); } /** * Constructor with a clock. * * @param clock a clock */ public ShortPrefixCombFactory(Clock clock) { this(builder().withClock(clock)); } /** * Constructor with a random. * * @param random a random generator */ public ShortPrefixCombFactory(Random random) { this(builder().withRandom(random)); } /** * Constructor with a random and a clock. * * @param random a random * @param clock a clock */ public ShortPrefixCombFactory(Random random, Clock clock) { this(builder().withRandom(random).withClock(clock)); } /** * Constructor with a function which return random numbers. * * @param randomFunction a function */ public ShortPrefixCombFactory(LongSupplier randomFunction) { this(builder().withRandomFunction(randomFunction)); } /** * Constructor with a function which a function which return random numbers and * a clock. * * @param randomFunction a function * @param clock a clock */ public ShortPrefixCombFactory(LongSupplier randomFunction, Clock clock) { this(builder().withRandomFunction(randomFunction).withClock(clock)); } private ShortPrefixCombFactory(Builder builder) { super(UuidVersion.VERSION_RANDOM_BASED, builder); this.interval = builder.getInterval(); } /** * A builder of factories. */ public static class Builder extends AbstCombFactory.Builder { private Integer interval; /** * Get the interval in milliseconds. * * @return the interval in milliseconds. */ protected int getInterval() { if (this.interval == null) { this.interval = DEFAULT_INTERVAL; } return this.interval; } /** * Set the interval in milliseconds. * * @param interval the interval in milliseconds * @return the builder */ public Builder withInterval(int interval) { this.interval = interval; return this; } @Override public ShortPrefixCombFactory build() { return new ShortPrefixCombFactory(this); } } /** * Returns a new builder. * * @return a builder */ public static Builder builder() { return new Builder(); } /** * Returns a Short Prefix COMB GUID. * * @return a UUIDv4 */ @Override public GUID128 create() { lock.lock(); try { final long time = instantFunction.get().toEpochMilli() / interval; final long long1 = this.random.nextLong(6); final long long2 = this.random.nextLong(8); return make(time, long1, long2); } finally { lock.unlock(); } } private GUID128 make(final long time, final long long1, final long long2) { return toUuid((time << 48) | (long1 & 0x0000ffffffffffffL), long2); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/nonstandard/ShortSuffixCombFactory.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.nonstandard; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.enums.UuidVersion; import com.pinecone.ulf.util.guid.i128.factory.AbstCombFactory; import java.time.Clock; import java.util.Random; import java.util.UUID; import java.util.function.LongSupplier; /** * Concrete factory for creating Short Suffix COMB GUIDs. *

* A Short Suffix COMB GUID is a UUID that combines a creation time with random * bits. *

* The creation minute is a 2 bytes SUFFIX at the LEAST significant bits. *

* The suffix wraps around every ~45 days (2^16/60/24 = ~45). *

* The created UUID is a UUIDv4 for compatibility with RFC 9562. * * @see Sequential * UUID Generators */ public final class ShortSuffixCombFactory extends AbstCombFactory { /** * Interval in milliseconds. */ protected final int interval; /** * Default interval of 60 seconds in milliseconds. */ protected static final int DEFAULT_INTERVAL = 60_000; /** * Default constructor. */ public ShortSuffixCombFactory() { this(builder()); } /** * Constructor with a clock. * * @param clock a clock */ public ShortSuffixCombFactory(Clock clock) { this(builder().withClock(clock)); } /** * Constructor with a random. * * @param random a random generator */ public ShortSuffixCombFactory(Random random) { this(builder().withRandom(random)); } /** * Constructor with a random and a clock. * * @param random a random * @param clock a clock */ public ShortSuffixCombFactory(Random random, Clock clock) { this(builder().withRandom(random).withClock(clock)); } /** * Constructor with a function which return random numbers. * * @param randomFunction a function */ public ShortSuffixCombFactory(LongSupplier randomFunction) { this(builder().withRandomFunction(randomFunction)); } /** * Constructor with a function which a function which return random numbers and * a clock. * * @param randomFunction a function * @param clock a clock */ public ShortSuffixCombFactory(LongSupplier randomFunction, Clock clock) { this(builder().withRandomFunction(randomFunction).withClock(clock)); } private ShortSuffixCombFactory(Builder builder) { super(UuidVersion.VERSION_RANDOM_BASED, builder); this.interval = builder.getInterval(); } /** * Builder of factories. */ public static class Builder extends AbstCombFactory.Builder { private Integer interval; /** * Get the interval in milliseconds. * * @return the interval in milliseconds. */ protected int getInterval() { if (this.interval == null) { this.interval = DEFAULT_INTERVAL; } return this.interval; } /** * Set the interval in milliseconds. * * @param interval the interval in milliseconds * @return the builder */ public Builder withInterval(int interval) { this.interval = interval; return this; } @Override public ShortSuffixCombFactory build() { return new ShortSuffixCombFactory(this); } } /** * Returns a new builder. * * @return a builder */ public static Builder builder() { return new Builder(); } /** * Returns a Short Suffix COMB GUID. * * @return a UUIDv4 */ @Override public GUID128 create() { lock.lock(); try { final long time = instantFunction.get().toEpochMilli() / interval; final long long1 = this.random.nextLong(8); final long long2 = this.random.nextLong(6); return make(time, long1, long2); } finally { lock.unlock(); } } private GUID128 make(final long time, final long long1, final long long2) { return toUuid(long1, (((long2 & 0x0000ffff00000000L) << 16) | (time & 0xffffL) << 32) | (long2 & 0x00000000ffffffffL)); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/nonstandard/SuffixCombFactory.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.nonstandard; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.enums.UuidVersion; import com.pinecone.ulf.util.guid.i128.factory.AbstCombFactory; import com.pinecone.ulf.util.guid.i128.factory.AbstRandomBasedFactory; import java.time.Clock; import java.util.Random; import java.util.UUID; import java.util.function.LongSupplier; /** * Concrete factory for creating Suffix COMB GUIDs. *

* A Suffix COMB GUID is a UUID that combines a creation time with random bits. *

* The creation millisecond is a 6 bytes SUFFIX at the LEAST significant bits. *

* The created UUID is a UUIDv4 for compatibility with RFC 9562. * * @see AbstCombFactory * @see AbstRandomBasedFactory * @see The Cost * of GUIDs as Primary Keys */ public final class SuffixCombFactory extends AbstCombFactory { /** * Default constructor. */ public SuffixCombFactory() { this(builder()); } /** * Constructor with a clock. * * @param clock a clock */ public SuffixCombFactory(Clock clock) { this(builder().withClock(clock)); } /** * Constructor with a random. * * @param random a random generator */ public SuffixCombFactory(Random random) { this(builder().withRandom(random)); } /** * Constructor with a random and a clock. * * @param random a random * @param clock a clock */ public SuffixCombFactory(Random random, Clock clock) { this(builder().withRandom(random).withClock(clock)); } /** * Constructor with a function which return random numbers. * * @param randomFunction a function */ public SuffixCombFactory(LongSupplier randomFunction) { this(builder().withRandomFunction(randomFunction)); } /** * Constructor with a function which a function which return random numbers and * a clock. * * @param randomFunction a function * @param clock a clock */ public SuffixCombFactory(LongSupplier randomFunction, Clock clock) { this(builder().withRandomFunction(randomFunction).withClock(clock)); } private SuffixCombFactory(Builder builder) { super(UuidVersion.VERSION_RANDOM_BASED, builder); } /** * Builder of factories. */ public static class Builder extends AbstCombFactory.Builder { @Override public SuffixCombFactory build() { return new SuffixCombFactory(this); } } /** * Returns a new builder. * * @return a builder */ public static Builder builder() { return new Builder(); } /** * Returns a Suffix COMB GUID. * * @return a UUIDv4 */ @Override public GUID128 create() { lock.lock(); try { final long time = instantFunction.get().toEpochMilli(); final long long1 = this.random.nextLong(8); final long long2 = this.random.nextLong(2); return make(time, long1, long2); } finally { lock.unlock(); } } private GUID128 make(final long time, final long long1, final long long2) { return toUuid(long1, (long2 << 48) | (time & 0x0000ffffffffffffL)); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/standard/DceSecurityFactory.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.standard; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.enums.UuidLocalDomain; import com.pinecone.ulf.util.guid.i128.enums.UuidVersion; import com.pinecone.ulf.util.guid.i128.factory.AbstTimeBasedFactory; import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; /** * * Concrete factory for creating DCE Security unique identifiers (UUIDv2). * * @see UuidLocalDomain * @see DCE * Security UUIDs */ public final class DceSecurityFactory extends AbstTimeBasedFactory { private AtomicInteger counter; private final byte localDomain; /** * Default constructor. */ public DceSecurityFactory() { this(builder()); } private DceSecurityFactory(Builder builder) { super(UuidVersion.VERSION_DCE_SECURITY, builder); this.localDomain = builder.localDomain; this.counter = new AtomicInteger(); } /** * Returns a builder of DCE Security factory. * * @return a builder */ public static Builder builder() { return new Builder(); } /** * Concrete builder for creating a DCE Security factory. * * @see AbstTimeBasedFactory.Builder */ public static class Builder extends AbstTimeBasedFactory.Builder { private byte localDomain; /** * Set the local domain. * * @param localDomain the local domain * @return the builder */ public Builder withLocalDomain(UuidLocalDomain localDomain) { this.localDomain = localDomain.getValue(); return this; } /** * Set the local domain. * * @param localDomain the local domain * @return the builder */ public Builder withLocalDomain(byte localDomain) { this.localDomain = localDomain; return this; } @Override public DceSecurityFactory build() { return new DceSecurityFactory(this); } } /** * Returns a DCE Security unique identifier (UUIDv2). *

* A DCE Security UUID is a modified UUIDv1. *

* Steps of creation: *

    *
  1. Create a Time-based UUIDv1; *
  2. Replace the least significant 8 bits of the clock sequence with the local * domain; *
  3. Replace the least significant 32 bits of the time stamp with the local * identifier. *
* * @param localDomain a local domain * @param localIdentifier a local identifier * @return a DCE Security UUID */ public GUID128 create(byte localDomain, int localIdentifier) { // Create a UUIDv1 GUID128 uuid = super.create(); // Embed the local domain bits final long lsb = embedLocalDomain(uuid.getLeastSignificantBits(), localDomain, this.counter.incrementAndGet()); // Embed the local identifier bits final long msb = emgedLocalIdentifier(uuid.getMostSignificantBits(), localIdentifier); return toUuid(msb, lsb); } /** * Returns a DCE Security unique identifier (UUIDv2). * * @param localDomain a local domain * @param localIdentifier a local identifier * @return a DCE Security UUID */ public GUID128 create(UuidLocalDomain localDomain, int localIdentifier) { return create(localDomain.getValue(), localIdentifier); } /** * Returns a DCE Security unique identifier (UUIDv2). *

* The local domain is local domain used by this method defined by builder: * *

{@code
	 * DceSecurityFactory factory = DceSecurityFactory.builder().withLocalDomain(UuidLocalDomain).build();
	 * }
* * @param localIdentifier a local identifier * @return a UUIDv2 */ public GUID128 create(int localIdentifier) { return create(this.localDomain, localIdentifier); } /** * Always throws an exception. *

* Overrides the method {@link AbstTimeBasedFactory#create()} to throw an * exception instead of returning a UUID. * * @throws UnsupportedOperationException always */ @Override public GUID128 create() { throw new UnsupportedOperationException("Unsuported operation for DCE Security UUID factory"); } /** * Returns a DCE Security unique identifier (UUIDv2). * * @return a UUIDv2 */ @Override public GUID128 create(Parameters parameters) { return create(parameters.getLocalDomain(), parameters.getLocalIdentifier()); } /** * Embeds the local identifier in into the most significant bits. * * @param msb the MSB * @param localIdentifier the local identifier * @return the updated MSB */ private static long emgedLocalIdentifier(long msb, int localIdentifier) { return (msb & 0x00000000ffffffffL) // clear time_low bits | ((localIdentifier & 0x00000000ffffffffL) << 32); } /** * Embeds the local domain bits in the least significant bits. * * @param lsb the LSB * @param localDomain a local domain * @param counter a counter value * @return the updated LSB */ private static long embedLocalDomain(long lsb, byte localDomain, long counter) { return (lsb & 0x0000ffffffffffffL) // clear clock_seq bits | ((localDomain & 0x00000000000000ffL) << 48) // | ((counter & 0x00000000000000ffL) << 56); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/standard/NameBasedMd5Factory.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.standard; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.enums.UuidNamespace; import com.pinecone.ulf.util.guid.i128.enums.UuidVersion; import com.pinecone.ulf.util.guid.i128.factory.AbstNameBasedFactory; import java.util.UUID; /** * Concrete factory for creating name-based unique identifiers using MD5 hashing * (UUIDv3). * * @see AbstNameBasedFactory */ public final class NameBasedMd5Factory extends AbstNameBasedFactory { /** * Default constructor. */ public NameBasedMd5Factory() { this((byte[]) null); } /** * Constructor with a namespace. * * @param namespace a namespace */ public NameBasedMd5Factory(GUID128 namespace) { this(namespaceBytes(namespace)); } /** * Constructor with a namespace. * * @param namespace a namespace */ public NameBasedMd5Factory(String namespace) { this(namespaceBytes(namespace)); } /** * Constructor with a namespace. * * @param namespace a namespace */ public NameBasedMd5Factory(UuidNamespace namespace) { this(namespaceBytes(namespace)); } private NameBasedMd5Factory(byte[] namespace) { super(UuidVersion.VERSION_NAME_BASED_MD5, ALGORITHM_MD5, namespace); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/standard/NameBasedSha1Factory.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.standard; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.enums.UuidNamespace; import com.pinecone.ulf.util.guid.i128.enums.UuidVersion; import com.pinecone.ulf.util.guid.i128.factory.AbstNameBasedFactory; import java.util.UUID; /** * Concrete factory for creating name-based unique identifiers using SHA-1 * hashing (UUIDv5). * * @see AbstNameBasedFactory */ public final class NameBasedSha1Factory extends AbstNameBasedFactory { /** * Default constructor. */ public NameBasedSha1Factory() { this((byte[]) null); } /** * Constructor with a namespace. * * @param namespace a namespace */ public NameBasedSha1Factory(GUID128 namespace) { this(namespaceBytes(namespace)); } /** * Constructor with a namespace. * * @param namespace a namespace */ public NameBasedSha1Factory(String namespace) { this(namespaceBytes(namespace)); } /** * Constructor with a namespace. * * @param namespace a namespace */ public NameBasedSha1Factory(UuidNamespace namespace) { this(namespaceBytes(namespace)); } private NameBasedSha1Factory(byte[] namespace) { super(UuidVersion.VERSION_NAME_BASED_SHA1, ALGORITHM_SHA1, namespace); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/standard/RandomBasedFactory.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.standard; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.enums.UuidVersion; import com.pinecone.ulf.util.guid.i128.factory.AbstRandomBasedFactory; import com.pinecone.ulf.util.guid.i128.util.internal.ByteUtil; import java.util.Random; import java.util.UUID; import java.util.function.LongSupplier; /** * Concrete factory for creating random-based unique identifiers (UUIDv4). */ public final class RandomBasedFactory extends AbstRandomBasedFactory { /** * Default constructor. */ public RandomBasedFactory() { this(builder()); } /** * Constructor with a {@link Random} instance. * * @param random a {@link Random} instance */ public RandomBasedFactory(Random random) { this(builder().withRandom(random)); } /** * Constructor with a function which returns random number. * * @param randomSupplier a function */ public RandomBasedFactory(LongSupplier randomSupplier) { this(builder().withRandomFunction(randomSupplier)); } private RandomBasedFactory(Builder builder) { super(UuidVersion.VERSION_RANDOM_BASED, builder); } /** * Concrete builder for creating a random-based factory. * * @see AbstRandomBasedFactory.Builder */ public static class Builder extends AbstRandomBasedFactory.Builder { @Override public RandomBasedFactory build() { return new RandomBasedFactory(this); } } /** * Returns a builder of random-based factory. * * @return a builder */ public static Builder builder() { return new Builder(); } /** * Returns a random-based UUID. * * ### RFC 9562 - 4.4. Algorithms for Creating a UUID from Truly Random or * Pseudo-Random Numbers * * (1) Set the two most significant bits (bits 6 and 7) of the * clock_seq_hi_and_reserved to zero and one, respectively. * * (2) Set the four most significant bits (bits 12 through 15) of the * time_hi_and_version field to the 4-bit version number from Section 4.1.3. * * (3) Set all the other bits to randomly (or pseudo-randomly) chosen values. * * @return a random-based UUID */ @Override public GUID128 create() { lock.lock(); try { if (this.random instanceof SafeRandom) { final byte[] bytes = this.random.nextBytes(16); final long msb = ByteUtil.toNumber(bytes, 0, 8); final long lsb = ByteUtil.toNumber(bytes, 8, 16); return toUuid(msb, lsb); } else { final long msb = this.random.nextLong(); final long lsb = this.random.nextLong(); return toUuid(msb, lsb); } } finally { lock.unlock(); } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/standard/TimeBasedFactory.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.standard; import com.pinecone.ulf.util.guid.i128.enums.UuidVersion; import com.pinecone.ulf.util.guid.i128.factory.AbstTimeBasedFactory; /** * Concrete factory for creating time-based unique identifiers (UUIDv1). * * @see AbstTimeBasedFactory */ public final class TimeBasedFactory extends AbstTimeBasedFactory { /** * Default constructor. */ public TimeBasedFactory() { this(builder()); } private TimeBasedFactory(Builder builder) { super(UuidVersion.VERSION_TIME_BASED, builder); } /** * Returns a builder of time-based factory. * * @return a builder */ public static Builder builder() { return new Builder(); } /** * Concrete builder for creating a time-based factory. * * @see AbstTimeBasedFactory.Builder */ public static class Builder extends AbstTimeBasedFactory.Builder { @Override public TimeBasedFactory build() { return new TimeBasedFactory(this); } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/standard/TimeOrderedEpochFactory.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.standard; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.enums.UuidVersion; import com.pinecone.ulf.util.guid.i128.factory.AbstCombFactory; import com.pinecone.ulf.util.guid.i128.factory.nonstandard.PrefixCombFactory; import java.time.Clock; import java.time.Instant; import java.util.Objects; import java.util.Random; import java.util.UUID; import java.util.concurrent.locks.ReentrantLock; import java.util.function.Function; import java.util.function.LongSupplier; import java.util.function.Supplier; /** * Concrete factory for creating Unix epoch time-ordered unique identifiers * (UUIDv7). *

* UUIDv7 is a new UUID version proposed by Peabody and Davis. It is similar to * Prefix COMB GUID and ULID. *

* This factory creates 3 types: *

    *
  • Type 1 (default): this type is divided in 3 components, namely * time, counter and random. The counter component is incremented by 1 when the * time repeats. The random component is always randomized. *
  • Type 2 (plus 1): this type is divided in 2 components, namely time * and monotonic random. The monotonic random component is incremented by 1 when * the time repeats. This type of UUID is like a Monotonic ULID. It can be much * faster than the other types. *
  • Type 3 (plus n): this type is also divided in 2 components, namely * time and monotonic random. The monotonic random component is incremented by a * random positive integer between 1 and 2^32. This type of UUID is also like a * Monotonic ULID, but with a random increment instead of 1. *
*

* If the underlying runtime provides enough clock precision, the microseconds * are also injected in the UUID, specifically in the {@code rand_a} field, * which is the name RFC 9562 gives to the 12 bits right after the milliseconds * field, from left to right. Otherwise, these 12 bits are randomly generated. *

* In JDK 11, we get 1 microsecond precision. However, in JDK 8, the maximum * precision we can get is 1 millisecond. On Windows, it is even worse because * the default precision is 15.625ms, due to the system clock's refresh rate of * 64Hz. * * @since 5.0.0 * @see PrefixCombFactory * @see ULID Specification * @see New * UUID formats * @see Revise * Universally Unique Identifier Definitions (uuidrev) */ public final class TimeOrderedEpochFactory extends AbstCombFactory { private final UuidFunction uuidFunction; private static final int INCREMENT_TYPE_DEFAULT = 0; // add 2^48 to `rand_b` private static final int INCREMENT_TYPE_PLUS_1 = 1; // just add 1 to `rand_b` private static final int INCREMENT_TYPE_PLUS_N = 2; // add a random n to `rand_b`, where 1 <= n <= 2^32 private static final long INCREMENT_MAX_DEFAULT = 0xffffffffL; // 2^32-1 private static final long versionBits = 0x000000000000f000L; private static final long variantBits = 0xc000000000000000L; private static final long upper16Bits = 0xffff000000000000L; private static final long upper48Bits = 0xffffffffffff0000L; /** * Default constructor. */ public TimeOrderedEpochFactory() { this(builder()); } /** * Constructor with a clock. * * @param clock a clock */ public TimeOrderedEpochFactory(Clock clock) { this(builder().withClock(clock)); } /** * Constructor with a random. * * @param random a random */ public TimeOrderedEpochFactory(Random random) { this(builder().withRandom(random)); } /** * Constructor with a random and a clock. * * @param random a random * @param clock a clock */ public TimeOrderedEpochFactory(Random random, Clock clock) { this(builder().withRandom(random).withClock(clock)); } /** * Constructor with a function which return random numbers. * * @param randomFunction a function */ public TimeOrderedEpochFactory(LongSupplier randomFunction) { this(builder().withRandomFunction(randomFunction)); } /** * Constructor with a function which a function which return random numbers and * a clock. * * @param randomFunction a function * @param clock a clock */ public TimeOrderedEpochFactory(LongSupplier randomFunction, Clock clock) { this(builder().withRandomFunction(randomFunction).withClock(clock)); } private TimeOrderedEpochFactory(Builder builder) { super(UuidVersion.VERSION_TIME_ORDERED_EPOCH, builder); switch (builder.getIncrementType()) { case INCREMENT_TYPE_PLUS_1: this.uuidFunction = new Plus1Function(random, instantFunction); break; case INCREMENT_TYPE_PLUS_N: this.uuidFunction = new PlusNFunction(random, instantFunction, builder.getIncrementMax()); break; case INCREMENT_TYPE_DEFAULT: default: this.uuidFunction = new DefaultFunction(random, instantFunction); } } /** * Concrete builder for creating a Unix epoch time-ordered factory. * * @see AbstCombFactory.Builder */ public static class Builder extends AbstCombFactory.Builder { private Integer incrementType; private Long incrementMax; /** * Set the increment type to PLUS 1. * * @return the builder */ public Builder withIncrementPlus1() { this.incrementType = INCREMENT_TYPE_PLUS_1; this.incrementMax = null; return this; } /** * Set the increment type to PLUS N. * * @return the builder */ public Builder withIncrementPlusN() { this.incrementType = INCREMENT_TYPE_PLUS_N; this.incrementMax = null; return this; } /** * Set the increment type to PLUS N and set the max increment. * * @param incrementMax a number * @return the builder */ public Builder withIncrementPlusN(long incrementMax) { this.incrementType = INCREMENT_TYPE_PLUS_N; this.incrementMax = incrementMax; return this; } /** * Set the increment type. * * @return an number */ protected int getIncrementType() { if (this.incrementType == null) { this.incrementType = INCREMENT_TYPE_DEFAULT; } return this.incrementType; } /** * Get the max increment. * * @return a number */ protected long getIncrementMax() { if (this.incrementMax == null) { this.incrementMax = INCREMENT_MAX_DEFAULT; } return this.incrementMax; } @Override public TimeOrderedEpochFactory build() { return new TimeOrderedEpochFactory(this); } } /** * Returns a builder of Unix epoch time-ordered factory. * * @return a builder */ public static Builder builder() { return new Builder(); } /** * Returns a time-ordered unique identifier (UUIDv7). * * @return a UUIDv7 */ @Override public GUID128 create() { UUID uuid = this.uuidFunction.apply(null); return toUuid(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits()); } public GUID128 createXorUint64LSB( long xorMask ) { UUID uuid = this.uuidFunction.apply(null); long msb = uuid.getMostSignificantBits(); long lsb = uuid.getLeastSignificantBits(); long modifiedLsb = lsb ^ xorMask; return toUuid(msb, modifiedLsb); } /** * Returns a time-ordered unique identifier (UUIDv7) for a given instant. *

* The random component is generated with each method invocation. * * @return a UUIDv7 * @param instant a given instant */ @Override public GUID128 create(Parameters parameters) { Objects.requireNonNull(parameters.getInstant(), "Null instant"); UUID uuid = this.uuidFunction.apply(parameters.getInstant()); return toUuid(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits()); } static abstract class UuidFunction implements Function { protected long msb = 0L; // most significant bits protected long lsb = 0L; // least significant bits protected final IRandom random; protected Supplier instantFunction; protected final ReentrantLock lock = new ReentrantLock(); // let go up to 1 second ahead of system clock private static final long advanceMax = 1_000L; // let's try to detect the system clock precision protected static final int precision = precision(); protected static final int PRECISION_MILLISECOND = 1; protected static final int PRECISION_MICROSECOND = 2; protected static final long overflow = 0x0000000000000000L; public UuidFunction(IRandom random, Supplier instantFunction) { this.random = random; this.instantFunction = instantFunction; // instantiate the internal state reset(this.instantFunction.get()); } @Override public UUID apply(final Instant instant) { lock.lock(); try { if (instant != null) { reset(instant); // user specified return new UUID(this.msb, this.lsb); } Instant now = instantFunction.get(); long lastTime = this.lastTime(); long time = now.toEpochMilli(); // is it not too much ahead of system clock? if (advanceMax > Math.abs(lastTime - time)) { time = Math.max(lastTime, time); } if (time == lastTime) { increment(now); } else { reset(now); } return new UUID(this.msb, this.lsb); } finally { lock.unlock(); } } /** * Increment the `rand_b` field. * * If the `rand_b` field rolls over, then `rand_a` should be incremented too. * * Note that as `unix_ts_ms` and `rand_a` are stored in the same `long` * variable, when `rand_a` rolls over, `unix_ts_ms` goes up automatically. * * To be implemented by each specific subclass. * * @param instant an instant */ abstract void increment(final Instant instant); /** * Reset the `unix_ts_ms` field with the current milliseconds. Also set the * `rand_a` and `rand_b` fields with random bits. * * If there's enough clock precision, inject the current microseconds into the * `rand_a` field instead of random bits. * * @param instant an instant */ void reset(final Instant instant) { this.msb = instant.toEpochMilli() << 16; this.lsb = random.nextLong(); if (precision == PRECISION_MILLISECOND) { // lack of precision: put random bits in `rand_a` this.msb = (msb & upper48Bits) | random.nextLong(2); } else { // set `rand_a` field microseconds(instant); } } /** * Injects microseconds into the `rand_a` field. *

* It only works when the underlying runtime provides at least microsecond * precision. Otherwise, this method won't change the value in `rand_a` field. * * @param instant an instant */ void microseconds(final Instant instant) { // do nothing if not enough precision if (precision == PRECISION_MILLISECOND) { return; } final long shift = 12; final long scale = 1_000_000L; final long nanos = instant.getNano(); final long randa = ((nanos % scale) << shift) / scale; // previous and next and timestamps final long prev = (msb & ~versionBits); final long next = (msb & upper48Bits) | (randa & 0x0fffL); // don't let the timestamp go backwards this.msb = (next > prev) ? next : prev; } long lastTime() { return this.msb >>> 16; } /** * Returns the instant precision detected. * * @param clock a custom clock instance * @return the precision */ static int precision() { Clock clock = Clock.systemUTC(); int best = 0; int loop = 3; // the best of 3 for (int i = 0; i < loop; i++) { int x = 0; int nanosecond = clock.instant().getNano(); if (nanosecond % 1_000_000 != 0) { x = PRECISION_MICROSECOND; } else { x = PRECISION_MILLISECOND; } best = Math.max(best, x); } return best; } } static final class DefaultFunction extends UuidFunction { public DefaultFunction(IRandom random, Supplier instantFunction) { super(random, instantFunction); } @Override void increment(final Instant instant) { // set `rand_a` field microseconds(instant); // add 2^48 to `rand_b` this.lsb = (this.lsb & upper16Bits); this.lsb = (this.lsb | variantBits) + (1L << 48); if (this.lsb == overflow) { // add 1 to `rand_a` if overflow occurs this.msb = (this.msb | versionBits) + 1L; } // then randomize the lower 48 bits this.lsb = (this.lsb & upper16Bits) | this.random.nextLong(6); } } static final class Plus1Function extends UuidFunction { public Plus1Function(IRandom random, Supplier instantFunction) { super(random, instantFunction); } @Override void increment(final Instant instant) { // set `rand_a` field microseconds(instant); // just add 1 to `rand_b` this.lsb = (this.lsb | variantBits) + 1L; if (this.lsb == overflow) { // add 1 to `rand_a` if overflow occurs this.msb = (this.msb | versionBits) + 1L; } } } static final class PlusNFunction extends UuidFunction { private final LongSupplier plusNFunction; public PlusNFunction(IRandom random, Supplier instantFunction, Long incrementMax) { super(random, instantFunction); this.plusNFunction = customPlusNFunction(random, incrementMax); } @Override void increment(final Instant instant) { // set `rand_a` field microseconds(instant); // add a random n to `rand_b`, where 1 <= n <= incrementMax this.lsb = (this.lsb | variantBits) + plusNFunction.getAsLong(); if (this.lsb == overflow) { // add 1 to `rand_a` if overflow occurs this.msb = (this.msb | versionBits) + 1L; } } private LongSupplier customPlusNFunction(IRandom random, Long incrementMax) { if (incrementMax == INCREMENT_MAX_DEFAULT) { if (random instanceof SafeRandom) { return () -> { // return n, where 1 <= n <= 2^32 return random.nextLong(Integer.BYTES) + 1; }; } else { return () -> { // return n, where 1 <= n <= 2^32 return (random.nextLong() >>> 32) + 1; }; } } else { final long positive = 0x7fffffffffffffffL; if (random instanceof SafeRandom) { // the minimum number of bits and bytes for incrementMax final int bits = (int) Math.ceil(Math.log(incrementMax) / Math.log(2)); final int size = ((bits - 1) / Byte.SIZE) + 1; return () -> { // return n, where 1 <= n <= incrementMax return ((random.nextLong(size) & positive) % incrementMax) + 1; }; } else { return () -> { // return n, where 1 <= n <= incrementMax return ((random.nextLong() & positive) % incrementMax) + 1; }; } } } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/standard/TimeOrderedFactory.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.factory.standard; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.enums.UuidVersion; import com.pinecone.ulf.util.guid.i128.factory.AbstTimeBasedFactory; /** * Concrete factory for creating time-ordered unique identifiers (UUIDv6). *

* UUIDv6 is a new UUID version proposed by Peabody and Davis. *

* Warning: this can change in the future. * * @see AbstTimeBasedFactory * @see New * UUID formats * @see Revise * Universally Unique Identifier Definitions (uuidrev) */ public final class TimeOrderedFactory extends AbstTimeBasedFactory { /** * Default constructor. */ public TimeOrderedFactory() { this(builder()); } private TimeOrderedFactory(Builder builder) { super(UuidVersion.VERSION_TIME_ORDERED, builder); } /** * Returns the most significant bits of the UUID. *

* It implements the algorithm for generating UUIDv6. * * @param timestamp the number of 100-nanoseconds since 1970-01-01 (Unix epoch) * @return the MSB */ @Override protected long formatMostSignificantBits(final long timestamp) { return ((timestamp & 0x0ffffffffffff000L) << 4) // | (timestamp & 0x0000000000000fffL) // | 0x0000000000006000L; // apply version 6 } /** * Returns a builder of random-ordered factory. * * @return a builder */ public static Builder builder() { return new Builder(); } /** * Concrete builder for creating a time-ordered factory. * * @see AbstTimeBasedFactory.Builder */ public static class Builder extends AbstTimeBasedFactory.Builder { @Override public TimeOrderedFactory build() { return new TimeOrderedFactory(this); } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/CombUtil.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.util; import java.time.Instant; import java.util.UUID; /** * Utility for extracting time from COMB GUIDs. */ public final class CombUtil { private CombUtil() { } /** * Returns the prefix from a Prefix COMB. *

* The value returned is equivalent to the number of milliseconds since * 1970-01-01 (Unix epoch). * * @param comb a Prefix COMB * @return the prefix (the Unix milliseconds) */ public static long getPrefix(UUID comb) { return (comb.getMostSignificantBits() >>> 16); } /** * Returns the suffix from a Suffix COMB. *

* The value returned is equivalent to the number of milliseconds since * 1970-01-01 (Unix epoch). * * @param comb a Suffix COMB * @return the suffix (the Unix milliseconds) */ public static long getSuffix(UUID comb) { return (comb.getLeastSignificantBits() & 0x0000ffffffffffffL); } /** * Returns the instant from a Prefix COMB. * * @param comb a Prefix COMB * @return {@link Instant} */ public static Instant getPrefixInstant(UUID comb) { long milliseconds = getPrefix(comb); return Instant.ofEpochMilli(milliseconds); } /** * Returns the instant from a Suffix COMB. * * @param comb a Suffix COMB * @return {@link Instant} */ public static Instant getSuffixInstant(UUID comb) { long milliseconds = getSuffix(comb); return Instant.ofEpochMilli(milliseconds); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/MachineId.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.util; import com.pinecone.ulf.util.guid.i128.UUID128; import com.pinecone.ulf.util.guid.i128.util.internal.NetworkUtil; import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Arrays; import java.util.UUID; import static com.pinecone.ulf.util.guid.i128.util.UuidUtil.setVersion; import static com.pinecone.ulf.util.guid.i128.util.internal.ByteUtil.toHexadecimal; import static com.pinecone.ulf.util.guid.i128.util.internal.ByteUtil.toNumber; /** * Utility for generating machine ID. *

* It works in three steps: *

    *
  1. Create a string containing HOSTNAME, MAC and IP; *
  2. Create a hash of the string using SHA-256 algorithm; *
  3. Create the identifier using part of the resulting hash. *
*/ public final class MachineId { private static Long id; private static UUID uuid; private static String hexa; private static byte[] hash; private static String string; private MachineId() { } /** * Returns a number generated from the machine hash. *

* It uses the first 8 bytes of the machine hash. * * Note that the return value can be negative. * * @return a number */ public static long getMachineId() { if (id == null) { id = getMachineId(getMachineHash()); } return id; } static long getMachineId(byte[] hash) { return toNumber(hash, 0, 8); } /** * Returns a UUID generated from the machine hash. *

* It uses the first 16 bytes of the machine hash. *

* The UUID version is 4. * * @return a UUID */ public static UUID getMachineUuid() { if (uuid == null) { uuid = getMachineUuid(getMachineHash()); } return uuid; } static UUID getMachineUuid(byte[] hash) { final long mostSigBits = toNumber(hash, 0, 8); final long leastSigBits = toNumber(hash, 8, 16); return setVersion(new UUID128(mostSigBits, leastSigBits), 4); } /** * Returns the machine hash in hexadecimal format. *

* The returning string has 64 chars. * * @return a string */ public static String getMachineHexa() { if (hexa == null) { hexa = getMachineHexa(getMachineHash()); } return hexa; } static String getMachineHexa(byte[] hash) { return toHexadecimal(hash); } /** * Returns the machine hash in a byte array. *

* The returning array has 32 bytes (256 bits). * * @return a byte array */ public static byte[] getMachineHash() { if (hash == null) { hash = getMachineHash(getMachineString()); } return Arrays.copyOf(hash, hash.length); } static byte[] getMachineHash(String string) { try { return MessageDigest.getInstance("SHA-256").digest(string.getBytes(StandardCharsets.UTF_8)); } catch (NoSuchAlgorithmException e) { throw new InternalError("Message digest algorithm not supported.", e); } } /** * Returns a string containing host name, MAC and IP. *

* Output format: "hostname 11-11-11-11-11-11 222.222.222.222". * * @return a string */ public static String getMachineString() { if (string == null) { string = NetworkUtil.getMachineString(); } return string; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/UuidBuilder.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.util; import java.nio.BufferOverflowException; import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; import java.util.UUID; /** * A UUID builder. *

* Usage: * *

{@code
 * SecureRandom random = new SecureRandom();
 * UUID uuid = new UuidBuilder(4) // sets version 4 (random-based)
 * 		.put(random.nextLong()) // put the most significant 64 bits
 * 		.put(random.nextLong()) // put the least significant 64 bits
 * 		.build(); // return the built UUID
 * }
*/ public class UuidBuilder { private Integer version; // newly-created byte buffers are always BIG_ENDIAN private ByteBuffer buffer = ByteBuffer.allocate(16); /** * Instantiates a new builder without a version number. * */ public UuidBuilder() { this.version = null; } /** * Instantiates a new builder with a version number. * * @param version a value between 0 and 15 */ public UuidBuilder(int version) { if (version < 0x00L || version > 0xfL) { throw new IllegalArgumentException("Invalid version number"); } this.version = version; } /** * Puts 8 bytes containing the given long. * * @param value a long value * * @return This buffer * * @throws BufferOverflowException If there are fewer than 8 bytes remaining */ public synchronized UuidBuilder put(long value) { buffer.putLong(value); return this; } /** * Puts 4 bytes containing the given int. * * @param value an int value * * @return This buffer * * @throws BufferOverflowException If there are fewer than 4 bytes remaining */ public synchronized UuidBuilder put(int value) { buffer.putInt(value); return this; } /** * Puts 2 bytes containing the given short. * * @param value a short value * * @return This buffer * * @throws BufferOverflowException If there are fewer than 2 bytes remaining */ public synchronized UuidBuilder put(short value) { buffer.putShort(value); return this; } /** * Puts the given byte. * * @param value a byte value * * @return This buffer * * @throws BufferOverflowException If there are fewer than 1 bytes remaining */ public synchronized UuidBuilder put(byte value) { buffer.put(value); return this; } /** * Puts the given byte array. * * @param value a byte array * * @return This buffer * * @throws BufferOverflowException If there are fewer bytes remaining than the * array length */ public synchronized UuidBuilder put(byte[] array) { buffer.put(array); return this; } /** * Builds a UUID after all 16 bytes are filled. *

* This method ends the use of a builder. *

* Successive calls will always return the same UUID value. *

* Note: this method overrides bits 48 through 51 (version field) and bits 52 * through 63 (variant field), 6 bits total, to comply the UUID specification. * * @throws BufferUnderflowException If there are bytes remaining to be filled */ public synchronized UUID build() { validate(); buffer.rewind(); if (this.version != null) { // set the 4 most significant bits of the 7th byte (version field) final long msb = (buffer.getLong() & 0xffff_ffff_ffff_0fffL) | (version & 0xfL) << 12; // set the 2 most significant bits of the 9th byte to 1 and 0 (variant field) final long lsb = (buffer.getLong() & 0x3fff_ffff_ffff_ffffL) | 0x8000_0000_0000_0000L; return new UUID(msb, lsb); } final long msb = buffer.getLong(); final long lsb = buffer.getLong(); return new UUID(msb, lsb); } private synchronized void validate() { if (buffer.hasRemaining()) { throw new BufferUnderflowException(); } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/UuidComparator.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.util; import java.util.Comparator; import java.util.UUID; import java.util.function.ToIntBiFunction; /** * Comparator for UUIDs. *

* The default static method compares two time-based UUIDs by comparing the time * stamps first and then comparing the least significant bits as unsigned 64-bit * integers. If both UUIDs are not time-based then it compares them as unsigned * 128-bit integers. *

* The opaque static method compares two UUIDs as unsigned 128-bit integers. * It's the same as lexicographic sorting of UUID canonical strings. */ public final class UuidComparator implements Comparator { private final ToIntBiFunction comparator; private static final UuidComparator INSTANCE_DEFAULT = new UuidComparator(UuidComparator::defaultCompare); private static final UuidComparator INSTANCE_OPAQUE = new UuidComparator(UuidComparator::opaqueCompare); private UuidComparator(ToIntBiFunction comparator) { this.comparator = comparator; } /** * Creates a default implementation of {@link UuidComparator}. * * @see UuidComparator#defaultCompare(UUID, UUID) */ public UuidComparator() { this(UuidComparator::defaultCompare); } /** * Returns a default implementation of {@link UuidComparator}. * * @return a {@link UuidComparator} * @see UuidComparator#defaultCompare(UUID, UUID) */ public static UuidComparator getDefaultInstance() { return INSTANCE_DEFAULT; } /** * Returns an opaque implementation of {@link UuidComparator}. * * @return a opaque {@link UuidComparator} * @see UuidComparator#opaqueCompare(UUID, UUID) */ public static UuidComparator getOpaqueInstance() { return INSTANCE_OPAQUE; } /** * Compares two UUIDs. *

* The default static method compares two time-based UUIDs by comparing the time * stamps first and then comparing the least significant bits as unsigned 64-bit * integers. If both UUIDs are not time-based then it compares them as unsigned * 128-bit integers. *

* The first of two UUIDs is greater than the second if the time stamp is * greater for the first UUID. If the time stamps are equal, the first of two * UUIDs is greater than the second if the most significant byte in which they * differ is greater for the first UUID. *

* It can be useful for these reasons: *

    *
  1. {@link UUID#compareTo(UUID)} doesn't work well for time-based UUIDs; *
  2. {@link UUID#compareTo(UUID)} can lead to unexpected behavior due to * signed {@code long} comparison; *
  3. {@link UUID#compareTo(UUID)} throws {@link NullPointerException} if a * {@code null} UUID is given. *
* * @param uuid1 a {@code UUID} * @param uuid2 another {@code UUID} * @return -1, 0 or 1 as {@code u1} is less than, equal to, or greater than * {@code u2} */ public static int defaultCompare(UUID uuid1, UUID uuid2) { UUID u1 = uuid1 != null ? uuid1 : new UUID(0L, 0L); UUID u2 = uuid2 != null ? uuid2 : new UUID(0L, 0L); // time-based comparison is done by timestamp first if (isTimeBased(u1) && isTimeBased(u2)) { UUID rearranged1 = new UUID(u1.timestamp(), u1.getLeastSignificantBits()); UUID rearranged2 = new UUID(u2.timestamp(), u2.getLeastSignificantBits()); return opaqueCompare(rearranged1, rearranged2); } // unsigned 128 bit integers return opaqueCompare(u1, u2); } /** * Compares two UUIDs. *

* The opaque static method compares two UUIDs as unsigned 128-bit integers. * It's the same as lexicographic sorting of UUID canonical strings. *

* The first of two UUIDs is greater than the second if the most significant * byte in which they differ is greater for the first UUID. *

* The opaque method is faster than the default method as it does not check the * UUID version. *

* It's referred to as "opaque" just because it works like a "blind byte-to-byte * comparison". *

* It can be useful for these reasons: *

    *
  1. {@link UUID#compareTo(UUID)} can lead to unexpected behavior due to * signed {@code long} comparison; *
  2. {@link UUID#compareTo(UUID)} throws {@link NullPointerException} if a * {@code null} UUID is given. *
* * @param uuid1 a {@code UUID} * @param uuid2 another {@code UUID} * @return -1, 0 or 1 as {@code u1} is less than, equal to, or greater than * {@code u2} */ public static int opaqueCompare(UUID uuid1, UUID uuid2) { UUID u1 = uuid1 != null ? uuid1 : new UUID(0L, 0L); UUID u2 = uuid2 != null ? uuid2 : new UUID(0L, 0L); // used to compare as UNSIGNED longs final long min = 0x8000000000000000L; final long a = u1.getMostSignificantBits() + min; final long b = u2.getMostSignificantBits() + min; if (a > b) return 1; else if (a < b) return -1; final long c = u1.getLeastSignificantBits() + min; final long d = u2.getLeastSignificantBits() + min; if (c > d) return 1; else if (c < d) return -1; return 0; } /** * Compares two UUIDs. * * @param uuid1 a {@code UUID} * @param uuid2 another {@code UUID} * @return -1, 0 or 1 as {@code u1} is less than, equal to, or greater than * {@code u2} * @see UuidComparator#defaultCompare(UUID, UUID) */ @Override public int compare(UUID uuid1, UUID uuid2) { return this.comparator.applyAsInt(uuid1, uuid2); } private static boolean isTimeBased(UUID uuid) { return uuid.version() == 1 && uuid.variant() == 2; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/UuidTime.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.util; import java.time.Instant; /** * Utility for UUID time stamps. *

* The UUID time stamp is a 60-bit number. *

* The UUID time stamp resolution is 100ns, i.e., the UUID clock ticks every * 100-nanosecond interval. *

* In JDK 8, {@link Instant#now()} has millisecond precision, in spite of * {@link Instant} has nanoseconds resolution. In JDK 9+,{@link Instant#now()} * has microsecond precision. * * @see Current time in * microseconds in java * @see Increase the * precision of the implementation of java.time.Clock.systemUTC() */ public final class UuidTime { /** * The Unix epoch. */ public static final Instant EPOCH_UNIX = Instant.parse("1970-01-01T00:00:00.000Z"); // 0s /** * The Gregorian epoch. */ public static final Instant EPOCH_GREG = Instant.parse("1582-10-15T00:00:00.000Z"); // -12219292800s /** * The Unix epoch in seconds. */ public static final long EPOCH_UNIX_SECONDS = EPOCH_UNIX.getEpochSecond(); /** * The Gregorian epoch in seconds. */ public static final long EPOCH_GREG_SECONDS = EPOCH_GREG.getEpochSecond(); /** * Number nanos per clock tick. */ public static final long NANOS_PER_TICK = 100; // 1 tick = 100ns /** * Number of clock ticks per millisecond. */ public static final long TICKS_PER_MILLI = 10_000; // 1ms = 10,000 ticks /** * Number of clock ticks per second. */ public static final long TICKS_PER_SECOND = 10_000_000; // 1s = 10,000,000 ticks private UuidTime() { } /** * Returns the number of 100ns since 1970-01-01 (Unix epoch). *

* It uses {@link Instant#now()} to get the the current time. * * @return a number of 100ns since 1970-01-01 (Unix epoch). */ public static long getUnixTimestamp() { return toUnixTimestamp(Instant.now()); } /** * Returns the number of 100ns since 1582-10-15 (Gregorian epoch). *

* It uses {@link Instant#now()} to get the the current time. * * @return a number of 100ns since 1582-10-15 (Gregorian epoch). */ public static long getGregTimestamp() { return toGregTimestamp(Instant.now()); } /** * Converts a number of 100ns since 1582-10-15 (Gregorian epoch) into a number * of 100ns since 1970-01-01 (Unix epoch). * * @param gregTimestamp a number of 100ns since 1582-10-15 (Gregorian epoch) * @return a number of 100ns since 1970-01-01 (Unix epoch) */ public static long toUnixTimestamp(final long gregTimestamp) { return gregTimestamp + (EPOCH_GREG_SECONDS * TICKS_PER_SECOND); } /** * Converts a number of 100ns since 1970-01-01 (Unix epoch) into a number of * 100ns since 1582-10-15 (Gregorian epoch). * * @param unixTimestamp a number of 100ns since 1970-01-01 (Unix epoch) * @return a number of 100ns since 1582-10-15 (Gregorian epoch). */ public static long toGregTimestamp(final long unixTimestamp) { return unixTimestamp - (EPOCH_GREG_SECONDS * TICKS_PER_SECOND); } /** * Converts an {@link Instant} into a number of 100ns since 1970-01-01 (Unix * epoch). * * @param instant an instant * @return a number of 100ns since 1970-01-01 (Unix epoch). */ public static long toUnixTimestamp(final Instant instant) { final long seconds = instant.getEpochSecond() * TICKS_PER_SECOND; final long nanos = instant.getNano() / NANOS_PER_TICK; return seconds + nanos; } /** * Converts an {@link Instant} into a number of 100ns since 1582-10-15 * (Gregorian epoch). * * @param instant an instant * @return a number of 100ns since 1582-10-15 (Gregorian epoch). */ public static long toGregTimestamp(final Instant instant) { final long seconds = (instant.getEpochSecond() - EPOCH_GREG_SECONDS) * TICKS_PER_SECOND; final long nanos = instant.getNano() / NANOS_PER_TICK; return seconds + nanos; } /** * Converts a number of 100ns since 1970-01-01 (Unix epoch) into an * {@link Instant}. * * @param unixTimestamp a number of 100ns since 1970-01-01 (Unix epoch) * @return an instant */ public static Instant fromUnixTimestamp(final long unixTimestamp) { final long seconds = unixTimestamp / TICKS_PER_SECOND; final long nanos = (unixTimestamp % TICKS_PER_SECOND) * NANOS_PER_TICK; return Instant.ofEpochSecond(seconds, nanos); } /** * Converts a number of 100ns since 1582-10-15 (Gregorian epoch) into an * {@link Instant}. * * @param gregTimestamp a number of 100ns since 1582-10-15 (Gregorian epoch) * @return an instant */ public static Instant fromGregTimestamp(final long gregTimestamp) { final long seconds = (gregTimestamp / TICKS_PER_SECOND) + EPOCH_GREG_SECONDS; final long nanos = (gregTimestamp % TICKS_PER_SECOND) * NANOS_PER_TICK; return Instant.ofEpochSecond(seconds, nanos); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/UuidUtil.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.util; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.UUID128; import com.pinecone.ulf.util.guid.i128.enums.UuidVariant; import com.pinecone.ulf.util.guid.i128.enums.UuidVersion; import java.time.Instant; import java.util.Objects; import java.util.UUID; /** * Utility for checking UUID version, extracting UUID details, etc. */ public final class UuidUtil { private static final String MESSAGE_NOT_A_TIME_BASED_UUID = "Not a time-based, time-ordered or DCE Security UUID: %s."; private static final String MESSAGE_NOT_A_TIME_ORDERED_EPOCH_UUID = "Not a time-ordered with Unix Epoch UUID: %s."; private static final String MESSAGE_NOT_A_DCE_SECURITY_UUID = "Not a DCE Security UUID: %s."; private UuidUtil() { } /** * Get a copy of a UUID. *

* It is just a convenience method for cloning UUIDs. * * @param uuid a UUID * @return a copy of a UUID */ public static GUID128 copy(GUID128 uuid) { return new UUID128(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits()) { }; } /** * Checks whether the UUID is equal to the Nil UUID. *

* The Nil UUID is special UUID that has all 128 bits set to zero. * * @param uuid a UUID * @return boolean true if it is an RFC 9562 variant * @exception NullPointerException if null */ public static boolean isNil(GUID128 uuid) { Objects.requireNonNull(uuid, "Null UUID is not equal to Nil UUID"); return uuid.getMostSignificantBits() == 0L && uuid.getLeastSignificantBits() == 0L; } /** * Checks whether the UUID is equal to the Max UUID. *

* The Max UUID is special UUID that has all 128 bits set to one. * * @param uuid a UUID * @return boolean true if it is an RFC 9562 variant * @exception NullPointerException if null */ public static boolean isMax(GUID128 uuid) { Objects.requireNonNull(uuid, "Null UUID is not equal to Max UUID"); return uuid.getMostSignificantBits() == -1L && uuid.getLeastSignificantBits() == -1L; } /** * Get the UUID version. * * @param uuid a UUID * @return a {@link UuidVersion} * @see UuidVersion */ public static UuidVersion getVersion(GUID128 uuid) { return UuidVersion.getVersion(uuid.version()); } /** * Get the UUID version. * * @param uuid a UUID * @return a {@link UuidVariant} * @see UuidVariant */ public static UuidVariant getVariant(GUID128 uuid) { return UuidVariant.getVariant(uuid.variant()); } /** * Applies UUID version bits into the UUID * * @param uuid a UUID * @param version a version * @return a UUID */ public static UUID setVersion(GUID128 uuid, int version) { long msb = uuid.getMostSignificantBits(); long lsb = uuid.getLeastSignificantBits(); msb = (msb & 0xffffffffffff0fffL) | ((version & 0x0000000f) << 12); // apply version lsb = (lsb & 0x3fffffffffffffffL) | 0x8000000000000000L; // apply variant return new UUID(msb, lsb); } /** * Checks whether the UUID variant is the one defined by the RFC 9562. * * @param uuid a UUID * @return boolean true if it is an RFC 9562 variant */ public static boolean isStandard(GUID128 uuid) { return isVariant(uuid, UuidVariant.VARIANT_STANDARD); } /** * Checks whether the UUID variant is reserved NCS. * * @param uuid a UUID * @return boolean true if it is an reserved NCS variant */ public static boolean isReservedNcs(GUID128 uuid) { return isVariant(uuid, UuidVariant.VARIANT_RESERVED_NCS); } /** * Checks whether the UUID variant is reserved Microsoft. * * @param uuid a UUID * @return boolean true if it is an reserved Microsoft variant */ public static boolean isReservedMicrosoft(GUID128 uuid) { return isVariant(uuid, UuidVariant.VARIANT_RESERVED_MICROSOFT); } /** * Checks whether the UUID variant is reserved future. * * @param uuid a UUID * @return boolean true if it is an reserved future variant */ public static boolean isReservedFuture(GUID128 uuid) { return isVariant(uuid, UuidVariant.VARIANT_RESERVED_FUTURE); } /** * Checks whether the UUID version 4. * * @param uuid a UUID * @return boolean true if it is a random UUID */ public static boolean isRandomBased(GUID128 uuid) { return isVersion(uuid, UuidVersion.VERSION_RANDOM_BASED); } /** * Checks whether the UUID version 3. * * @param uuid a UUID * @return boolean true if it is a name-based UUID */ public static boolean isNameBasedMd5(GUID128 uuid) { return isVersion(uuid, UuidVersion.VERSION_NAME_BASED_MD5); } /** * Checks whether the UUID version 5. * * @param uuid a UUID * @return boolean true if it is a name-based UUID */ public static boolean isNameBasedSha1(GUID128 uuid) { return isVersion(uuid, UuidVersion.VERSION_NAME_BASED_SHA1); } /** * Checks whether the UUID version 1. * * @param uuid a UUID * @return boolean true if it is a time-based UUID */ public static boolean isTimeBased(GUID128 uuid) { return isVersion(uuid, UuidVersion.VERSION_TIME_BASED); } /** * Checks whether the UUID version 6. * * @param uuid a UUID * @return boolean true if it is a time-ordered UUID */ public static boolean isTimeOrdered(GUID128 uuid) { return isVersion(uuid, UuidVersion.VERSION_TIME_ORDERED); } /** * Checks whether the UUID version 7. * * @param uuid a UUID * @return boolean true if it is a time-ordered with Unix Epoch UUID */ public static boolean isTimeOrderedEpoch(GUID128 uuid) { return isVersion(uuid, UuidVersion.VERSION_TIME_ORDERED_EPOCH); } /** * Checks whether the UUID version 2. * * @param uuid a UUID * @return boolean true if it is a DCE Security UUID */ public static boolean isDceSecurity(GUID128 uuid) { return isVersion(uuid, UuidVersion.VERSION_DCE_SECURITY); } /** * Checks whether the UUID version 8. * * @param uuid a UUID * @return boolean true if it is a custom UUID */ public static boolean isCustom(GUID128 uuid) { return isVersion(uuid, UuidVersion.VERSION_CUSTOM); } /** * Returns the instant from a time-based, time-ordered or DCE Security UUID. * * @param uuid a UUID * @return {@link Instant} * @throws IllegalArgumentException if the input is not a time-based, * time-ordered or DCE Security UUID. */ public static Instant getInstant(GUID128 uuid) { if (isTimeOrderedEpoch(uuid)) { final long unixTimestamp = getUnixTimestamp(uuid); return UuidTime.fromUnixTimestamp(unixTimestamp); } else { final long gregTimestamp = getGregTimestamp(uuid); return UuidTime.fromGregTimestamp(gregTimestamp); } } /** * Returns the time stamp from a time-based, time-ordered or DCE Security UUID. *

* The value returned by this method is the number of 100-nanos since 1582-10-15 * (Gregorian epoch). * * @param uuid a UUID * @return long the timestamp * @throws IllegalArgumentException if the input is not a time-based, * time-ordered or DCE Security UUID. */ public static long getTimestamp(GUID128 uuid) { if (isTimeOrderedEpoch(uuid)) { return UuidTime.toGregTimestamp(getUnixTimestamp(uuid)); } else { return getGregTimestamp(uuid); } } private static long getUnixTimestamp(GUID128 uuid) { if (UuidUtil.isTimeOrderedEpoch(uuid)) { return getTimeOrderedEpochTimestamp(uuid.getMostSignificantBits()); } else { throw new IllegalArgumentException(String.format(MESSAGE_NOT_A_TIME_ORDERED_EPOCH_UUID, uuid.toString())); } } private static long getGregTimestamp(GUID128 uuid) { if (UuidUtil.isTimeBased(uuid)) { return getTimeBasedTimestamp(uuid.getMostSignificantBits()); } else if (UuidUtil.isTimeOrdered(uuid)) { return getTimeOrderedTimestamp(uuid.getMostSignificantBits()); } else if (UuidUtil.isDceSecurity(uuid)) { return getTimeBasedTimestamp(uuid.getMostSignificantBits() & 0x00000000ffffffffL); } else { throw new IllegalArgumentException(String.format(MESSAGE_NOT_A_TIME_BASED_UUID, uuid.toString())); } } /** * Get the node identifier from a time-based, time-ordered or DCE Security UUID. * * @param uuid a UUID * @return long the node identifier * @throws IllegalArgumentException if the input is not a time-based, * time-ordered or DCE Security UUID. */ public static long getNodeIdentifier(GUID128 uuid) { if (!(UuidUtil.isTimeBased(uuid) || UuidUtil.isTimeOrdered(uuid) || UuidUtil.isDceSecurity(uuid))) { throw new IllegalArgumentException(String.format(MESSAGE_NOT_A_TIME_BASED_UUID, uuid.toString())); } return uuid.getLeastSignificantBits() & 0x0000ffffffffffffL; } /** * Get the clock sequence from a time-based, time-ordered or DCE Security UUID. * * @param uuid a UUID * @return int the clock sequence * @throws IllegalArgumentException if the input is not a time-based, * time-ordered or DCE Security UUID. */ public static int getClockSequence(GUID128 uuid) { if (!(UuidUtil.isTimeBased(uuid) || UuidUtil.isTimeOrdered(uuid)) || UuidUtil.isDceSecurity(uuid)) { throw new IllegalArgumentException(String.format(MESSAGE_NOT_A_TIME_BASED_UUID, uuid.toString())); } if (UuidUtil.isDceSecurity(uuid)) { return (int) (uuid.getLeastSignificantBits() >>> 56) & 0x0000003f; } return (int) (uuid.getLeastSignificantBits() >>> 48) & 0x00003fff; } /** * Get the local domain number from a DCE Security UUID. * * @param uuid a UUID * @return the local domain * @throws IllegalArgumentException if the input is not a DCE Security UUID. */ public static byte getLocalDomain(GUID128 uuid) { if (!UuidUtil.isDceSecurity(uuid)) { throw new IllegalArgumentException(String.format(MESSAGE_NOT_A_DCE_SECURITY_UUID, uuid.toString())); } return (byte) ((uuid.getLeastSignificantBits() & 0x00ff000000000000L) >>> 48); } /** * Get the local identifier number from a DCE Security UUID. * * @param uuid a UUID * @return the local identifier * @throws IllegalArgumentException if the input is not a DCE Security UUID. */ public static int getLocalIdentifier(GUID128 uuid) { if (!UuidUtil.isDceSecurity(uuid)) { throw new IllegalArgumentException(String.format(MESSAGE_NOT_A_DCE_SECURITY_UUID, uuid.toString())); } return (int) (uuid.getMostSignificantBits() >>> 32); } /** * Check the UUID variant. * * @param uuid a UUID * @param variant a variant * @return true if the the the variant is correct * @exception NullPointerException if null */ private static boolean isVariant(GUID128 uuid, UuidVariant variant) { Objects.requireNonNull(uuid, "Null UUID"); return (uuid.variant() == variant.getValue()); } /** * Check the UUID version. * * @param uuid a UUID * @param variant a version * @return true if the the the version is correct * @exception NullPointerException if null */ private static boolean isVersion(GUID128 uuid, UuidVersion version) { Objects.requireNonNull(uuid, "Null UUID"); return isStandard(uuid) && (uuid.version() == version.getValue()); } private static long getTimeBasedTimestamp(long msb) { long hii = (msb & 0xffffffff00000000L) >>> 32; long mid = (msb & 0x00000000ffff0000L) << 16; long low = (msb & 0x0000000000000fffL) << 48; return (hii | mid | low); } private static long getTimeOrderedTimestamp(long msb) { long himid = (msb & 0xffffffffffff0000L) >>> 4; long low = (msb & 0x0000000000000fffL); return (himid | low); } private static long getTimeOrderedEpochTimestamp(long msb) { // 100ns ticks since 1970 final long ticksPerMilli = 10_000; // 1ms = 10,000 ticks return ((msb & 0xffffffffffff0000L) >>> 16) * ticksPerMilli; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/UuidValidator.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.util; import com.pinecone.framework.util.id.GUID; import com.pinecone.ulf.util.guid.i128.codec.base.Base16Codec; import com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException; import com.pinecone.ulf.util.guid.i128.util.immutable.ByteArray; import java.util.UUID; /** * Utility for UUID validation. *

* Using it is much faster than using on regular expression. *

* Examples of valid string formats: *

    *
  • 12345678-abcd-abcd-abcd-123456789abcd (36 hexadecimal chars, * lower case and with hyphen) *
  • 12345678-ABCD-ABCD-ABCD-123456789ABCD (36 hexadecimal chars, * UPPER CASE and with hyphen) *
  • 12345678abcdabcdabcd123456789abcd (32 hexadecimal chars, * lower case and WITHOUT hyphen) *
  • 12345678ABCDABCDABCD123456789ABCD (32 hexadecimal chars, * UPPER CASE and WITHOUT hyphen) *
*/ public final class UuidValidator { private static final ByteArray MAP = Base16Codec.INSTANCE.getBase().getMap(); private UuidValidator() { } /** * Checks if the UUID is valid. * * @param uuid a UUID * @return true if valid, false if invalid */ public static boolean isValid(final GUID uuid) { return uuid != null; } /** * Checks if the UUID is valid. * * @param uuid a UUID * @param version a version number * @return true if valid, false if invalid */ public static boolean isValid(final GUID uuid, int version) { return uuid != null && isVersion(uuid, version); } /** * Checks if the UUID byte array is valid. * * @param uuid a UUID byte array * @return true if valid, false if invalid */ public static boolean isValid(final byte[] uuid) { return uuid != null && uuid.length == 16; } /** * Checks if the UUID byte array is valid. * * @param uuid a UUID byte array * @param version a version number * @return true if valid, false if invalid */ public static boolean isValid(final byte[] uuid, int version) { return uuid != null && uuid.length == 16 && isVersion(uuid, version); } /** * Checks if the UUID string is valid. * * @param uuid a UUID string * @return true if valid, false if invalid */ public static boolean isValid(final String uuid) { return uuid != null && uuid.length() != 0 && isParseable(uuid.toCharArray()); } /** * Checks if the UUID string is valid. * * @param uuid a UUID string * @param version a version number * @return true if valid, false if invalid */ public static boolean isValid(final String uuid, int version) { return uuid != null && uuid.length() != 0 && isParseable(uuid.toCharArray(), version); } /** * Checks if the UUID char array is valid. * * @param uuid a UUID char array * @return true if valid, false if invalid */ public static boolean isValid(final char[] uuid) { return uuid != null && uuid.length != 0 && isParseable(uuid); } /** * Checks if the UUID char array is valid. * * @param uuid a UUID char array * @param version a version number * @return true if valid, false if invalid */ public static boolean isValid(final char[] uuid, int version) { return uuid != null && uuid.length != 0 && isParseable(uuid, version); } /** * Checks if the UUID is valid. * * @param uuid a UUID * @throws InvalidUuidException if the argument is invalid */ public static void validate(final GUID uuid) { if (uuid == null) { throw InvalidUuidException.newInstance(null); } } /** * Checks if the UUID is valid. * * @param uuid a UUID * @param version a version number * @throws InvalidUuidException if the argument is invalid */ public static void validate(final GUID uuid, int version) { if (uuid == null || !isVersion(uuid, version)) { throw InvalidUuidException.newInstance(uuid); } } /** * Checks if the UUID byte array is valid. * * @param uuid a UUID byte array * @throws InvalidUuidException if the argument is invalid */ public static void validate(final byte[] uuid) { if (uuid == null || uuid.length != 16) { throw InvalidUuidException.newInstance(uuid); } } /** * Checks if the UUID byte array is valid. * * @param uuid a UUID byte array * @param version a version number * @throws InvalidUuidException if the argument is invalid */ public static void validate(final byte[] uuid, int version) { if (uuid == null || uuid.length != 16 || !isVersion(uuid, version)) { throw InvalidUuidException.newInstance(uuid); } } /** * Checks if the UUID string is a valid. * * @param uuid a UUID string * @throws InvalidUuidException if the argument is invalid */ public static void validate(final String uuid) { if (uuid == null || !isParseable(uuid.toCharArray())) { throw InvalidUuidException.newInstance(uuid); } } /** * Checks if the UUID string is a valid. * * @param uuid a UUID string * @param version a version number * @throws InvalidUuidException if the argument is invalid */ public static void validate(final String uuid, int version) { if (uuid == null || !isParseable(uuid.toCharArray(), version)) { throw InvalidUuidException.newInstance(uuid); } } /** * Checks if the UUID char array is valid. * * @param uuid a UUID char array * @throws InvalidUuidException if the argument is invalid */ public static void validate(final char[] uuid) { if (uuid == null || !isParseable(uuid)) { throw InvalidUuidException.newInstance(uuid); } } /** * Checks if the UUID char array is valid. * * @param uuid a UUID char array * @param version a version number * @throws InvalidUuidException if the argument is invalid */ public static void validate(final char[] uuid, int version) { if (uuid == null || !isParseable(uuid, version)) { throw InvalidUuidException.newInstance(uuid); } } private static final int[] DASH_POSITIONS = {8, 13, 18, 23}; private static final int WITH_DASH_UUID_LENGTH = 36; private static final int WITHOUT_DASH_UUID_LENGTH = 32; private static final int MAX_DASH_COUNT = 4; /** * Checks if the UUID char array can be parsed. * * @param chars a char array * @return true if valid, false if invalid */ protected static boolean isParseable(final char[] chars) { int dashCount = 0; for (int i = 0; i < chars.length; i++) { if (chars[i] > MAP.length() || MAP.get(chars[i]) == -1) { if (chars[i] == '-') { dashCount++; continue; } return false; // invalid character! } } if (chars.length == WITH_DASH_UUID_LENGTH && dashCount == MAX_DASH_COUNT) { // check if the hyphens positions are correct return chars[DASH_POSITIONS[0]] == '-' && chars[DASH_POSITIONS[1]] == '-' && chars[DASH_POSITIONS[2]] == '-' && chars[DASH_POSITIONS[3]] == '-'; } return chars.length == WITHOUT_DASH_UUID_LENGTH && dashCount == 0; } /** * Checks if the UUID char array can be parsed. * * @param chars a char array * @param version a version number * @return true if valid, false if invalid */ protected static boolean isParseable(final char[] chars, int version) { return isVersion(chars, version) && isParseable(chars); } /** * Checks the version number of a UUID. * * @param uuid a UUID * @param version a version number * @return true if the UUID version is equal to the expected version number */ protected static boolean isVersion(GUID uuid, int version) { // boolean versionOk = ((version & ~0xf) == 0) && (uuid.version() == version); // boolean variantOk = uuid.variant() == 2; // RFC 9562 // return versionOk && variantOk; return true; } /** * Checks the version number of a UUID byte array. * * @param bytes a byte array * @param version a version number * @return true if the UUID version is equal to the expected version number */ protected static boolean isVersion(byte[] bytes, int version) { boolean versionOk = ((version & ~0xf) == 0) && (((bytes[6] & 0xff) >>> 4) == version); boolean variantOk = ((bytes[8] & 0xff) >>> 6) == 2; // RFC 9562 return versionOk && variantOk; } /** * Checks the version number of a UUID char array. * * @param chars a string * @param version a version number * @return true if the UUID version is equal to the expected version number */ protected static boolean isVersion(char[] chars, int version) { // valid if between 0x0 and 0xf if ((version & ~0xf) != 0) { return false; } int ver = 0; // version index int var = 0; // variant index switch (chars.length) { case 32: // without hyphen ver = 12; var = 16; break; case 36: // with hyphen ver = 14; var = 19; break; default: return false; } final char[] lower = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' }; final char[] upper = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' }; boolean versionOk = ((version & ~0xf) == 0) && (chars[ver] == lower[version] || chars[ver] == upper[version]); boolean variantOk = chars[var] == '8' || chars[var] == '9' // || chars[var] == 'a' || chars[var] == 'b' || chars[var] == 'A' || chars[var] == 'B'; return versionOk && variantOk; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/immutable/ByteArray.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.util.immutable; import java.util.Arrays; /** * Immutable array of bytes. */ public final class ByteArray { private final byte[] array; private ByteArray(byte[] a) { array = Arrays.copyOf(a, a.length); } /** * Creates an instance of this class. * * @param a an array of bytes * @return a new instance */ public static ByteArray from(byte[] a) { return new ByteArray(a); } /** * Return the byte at a position. * * @param index the position * @return a byte */ public byte get(int index) { return array[index]; } /** * Returns the array length * * @return the length */ public int length() { return this.array.length; } /** * Returns copy of the array. * * @return an array of bytes */ public byte[] array() { return Arrays.copyOf(array, array.length); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + Arrays.hashCode(array); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; ByteArray other = (ByteArray) obj; return Arrays.equals(array, other.array); } @Override public String toString() { return "ByteArray [array=" + Arrays.toString(array) + "]"; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/immutable/CharArray.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.util.immutable; import java.util.Arrays; /** * Immutable array of chars. */ public final class CharArray { private final char[] array; private CharArray(char[] a) { array = Arrays.copyOf(a, a.length); } /** * Creates an instance of this class. * * @param a an array of chars. * @return a new instance */ public static CharArray from(char[] a) { return new CharArray(a); } /** * Return the char at a position. * * @param index the position * @return a char */ public char get(int index) { return array[index]; } /** * Returns the array length. * * @return the length */ public int length() { return this.array.length; } /** * Returns copy of the array. * * @return an array of chars */ public char[] array() { return array.clone(); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + Arrays.hashCode(array); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; CharArray other = (CharArray) obj; return Arrays.equals(array, other.array); } @Override public String toString() { return "CharArray [array=" + Arrays.toString(array) + "]"; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/internal/ByteUtil.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.util.internal; /** * Utility class that contains many static methods for byte handling. */ public final class ByteUtil { private ByteUtil() { } /** * Get a number from a given array of bytes. * * @param bytes a byte array * @return a long */ public static long toNumber(final byte[] bytes) { return toNumber(bytes, 0, bytes.length); } /** * Get a number from a given array of bytes. * * @param bytes a byte array * @param start first byte of the array * @param end last byte of the array (exclusive) * @return a long */ public static long toNumber(final byte[] bytes, final int start, final int end) { long result = 0; for (int i = start; i < end; i++) { result = (result << 8) | (bytes[i] & 0xffL); } return result; } /** * Get a hexadecimal string from given array of bytes. * * @param bytes byte array * @return a string */ public static String toHexadecimal(final byte[] bytes) { final char[] chars = new char[bytes.length * 2]; for (int i = 0, j = 0; i < bytes.length; i++, j += 2) { final int v = bytes[i] & 0xff; chars[j] = toHexChar(v >>> 4); chars[j + 1] = toHexChar(v & 0x0f); } return new String(chars); } /** * Get a hexadecimal from a number value. * * @param number a number * @return a char */ private static char toHexChar(final int number) { if (number >= 0x00 && number <= 0x09) { // ASCII codes from 0 to 9 return (char) (0x30 + number); } else if (number >= 0x0a && number <= 0x0f) { // ASCII codes from 'a' to 'f' return (char) (0x57 + number); } return 0; } /** * Converts an array of bytes into an array of integers. Each integer is formed by combining 4 bytes * from the input array. This method assumes that the input byte array is at least 16 bytes long. * The conversion is done by treating each set of 4 bytes as a single integer, with the first byte being the most significant. * * @param bytes An array of bytes to be converted into integers. This array should be at least 16 bytes long. * @return An array of 4 integers, where each integer is formed by combining 4 bytes from the input array. */ public static int[] toInts(byte[] bytes) { int[] ints = new int[4]; ints[0] |= (bytes[0x0] & 0xff) << 24; ints[0] |= (bytes[0x1] & 0xff) << 16; ints[0] |= (bytes[0x2] & 0xff) << 8; ints[0] |= (bytes[0x3] & 0xff); ints[1] |= (bytes[0x4] & 0xff) << 24; ints[1] |= (bytes[0x5] & 0xff) << 16; ints[1] |= (bytes[0x6] & 0xff) << 8; ints[1] |= (bytes[0x7] & 0xff); ints[2] |= (bytes[0x8] & 0xff) << 24; ints[2] |= (bytes[0x9] & 0xff) << 16; ints[2] |= (bytes[0xa] & 0xff) << 8; ints[2] |= (bytes[0xb] & 0xff); ints[3] |= (bytes[0xc] & 0xff) << 24; ints[3] |= (bytes[0xd] & 0xff) << 16; ints[3] |= (bytes[0xe] & 0xff) << 8; ints[3] |= (bytes[0xf] & 0xff); return ints; } /** * Converts an array of integers into an array of bytes. Each integer is decomposed into 4 bytes, * with the most significant byte being placed first. This method produces a byte array of length 16, * assuming the input array contains exactly 4 integers. The conversion is performed by shifting * and masking operations to extract each byte from the integers. * * @param ints An array of integers to be converted into bytes. This array should contain exactly 4 integers. * @return A byte array of length 16, where each group of 4 bytes represents one of the integers from the input array. */ public static byte[] fromInts(int[] ints) { byte[] bytes = new byte[16]; bytes[0x0] = (byte) (ints[0] >>> 24); bytes[0x1] = (byte) (ints[0] >>> 16); bytes[0x2] = (byte) (ints[0] >>> 8); bytes[0x3] = (byte) (ints[0]); bytes[0x4] = (byte) (ints[1] >>> 24); bytes[0x5] = (byte) (ints[1] >>> 16); bytes[0x6] = (byte) (ints[1] >>> 8); bytes[0x7] = (byte) (ints[1]); bytes[0x8] = (byte) (ints[2] >>> 24); bytes[0x9] = (byte) (ints[2] >>> 16); bytes[0xa] = (byte) (ints[2] >>> 8); bytes[0xb] = (byte) (ints[2]); bytes[0xc] = (byte) (ints[3] >>> 24); bytes[0xd] = (byte) (ints[3] >>> 16); bytes[0xe] = (byte) (ints[3] >>> 8); bytes[0xf] = (byte) (ints[3]); return bytes; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/internal/JavaVersionUtil.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.util.internal; public class JavaVersionUtil { /** * Returns the java major version number. * * @see JDK Releases * @return major version number */ public static int getJavaVersion() { try { String property = System.getProperty("java.version"); if (property != null) { String[] version = property.split("\\."); if (version[0].equals("1")) { return Integer.parseInt(version[1]); } else { return Integer.parseInt(version[0]); } } else { return 8; } } catch (NumberFormatException | IndexOutOfBoundsException e) { return 8; } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/internal/NetworkUtil.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.util.internal; import java.net.InetAddress; import java.net.NetworkInterface; import java.net.SocketException; import java.net.UnknownHostException; import java.util.Enumeration; /** * Utility class that returns host name, MAC and IP. */ public final class NetworkUtil { private static String hostname; private static String mac; private static String ip; private NetworkUtil() { } /** * Returns the host name if found. *

* Sequence of HOSTNAME search: *

    *
  1. Try to find the HOSTNAME variable on LINUX environment; *
  2. Try to find the COMPUTERNAME variable on WINDOWS environment; *
  3. Try to find the host name by calling * {@code InetAddress.getLocalHost().getHostName()} (the expensive way); *
  4. If no host name is found, return {@code null}. *
* * @return a string containing the host name */ public static synchronized String hostname() { if (hostname != null) { return hostname; } // try to find HOSTNAME on LINUX hostname = System.getenv("HOSTNAME"); if (hostname != null && !hostname.isEmpty()) { return hostname; } // try to find COMPUTERNAME on WINDOWS hostname = System.getenv("COMPUTERNAME"); if (hostname != null && !hostname.isEmpty()) { return hostname; } try { // try to find HOSTNAME for the local host hostname = InetAddress.getLocalHost().getHostName(); if (hostname != null && !hostname.isEmpty()) { return hostname; } } catch (UnknownHostException e) { return null; } // not found return null; } /** * Returns the MAC address if found. *

* Output format: "00-00-00-00-00-00" (in upper case) * * @param nic a network interface * @return a string containing the MAC address */ public static synchronized String mac(NetworkInterface nic) { if (mac != null) { return mac; } try { if (nic != null && nic.getHardwareAddress() != null) { byte[] ha = nic.getHardwareAddress(); String[] hex = new String[ha.length]; for (int i = 0; i < ha.length; i++) { hex[i] = String.format("%02X", ha[i]); } mac = String.join("-", hex); return mac; } } catch (SocketException e) { return null; } // not found return null; } /** * Returns the IP address if found. *

* Output format: "0.0.0.0" (if IPv4) * * @param nic a network interface * @return a string containing the IP address */ public static synchronized String ip(NetworkInterface nic) { if (ip != null) { return ip; } if (nic != null) { Enumeration ips = nic.getInetAddresses(); if (ips.hasMoreElements()) { ip = ips.nextElement().getHostAddress(); return ip; } } // not found return null; } /** * Returns a string containing host name, MAC and IP. *

* Output format: "hostname 11-11-11-11-11-11 222.222.222.222" * * @return a string containing the host name, MAC and IP */ public static synchronized String getMachineString() { NetworkInterface nic = nic(); String hostname = NetworkUtil.hostname(); String mac = NetworkUtil.mac(nic); String ip = NetworkUtil.ip(nic); return String.join(" ", hostname, mac, ip); } /** * Returns a network interface. *

* It tries to return the network interface associated to the host name. *

* If that network interface is not found, it tries to return the first network * interface that satisfies these conditions: *

    *
  • it is up and running; *
  • it is not loopback; *
  • it is not virtual; *
  • it has a hardware address. *
*

* If no acceptable network interface is found, it returns null. * * @return a network interface. */ public static synchronized NetworkInterface nic() { try { InetAddress ip = null; NetworkInterface nic = null; Enumeration enu = null; // try to find the network interface for the host name ip = InetAddress.getByName(hostname()); nic = NetworkInterface.getByInetAddress(ip); if (acceptable(nic)) { return nic; } // try to find the first network interface enu = NetworkInterface.getNetworkInterfaces(); while (enu.hasMoreElements()) { nic = enu.nextElement(); if (acceptable(nic)) { return nic; } } } catch (UnknownHostException | SocketException e) { return null; } // NIC not found return null; } /** * Checks if the network interface is acceptable. * * @param nic a network interface * @return true if acceptable */ private static synchronized boolean acceptable(NetworkInterface nic) { try { if (nic != null && nic.isUp() && !nic.isLoopback() && !nic.isVirtual()) { byte[] mac = nic.getHardwareAddress(); if (mac != null && mac.length == 6) { return true; } } } catch (SocketException e) { return false; } return false; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/internal/RandomUtil.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.util.internal; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.util.Random; import java.util.concurrent.locks.ReentrantLock; /** * Utility class that provides random generator services. *

* The current implementation uses a pool {@link SecureRandom}. *

* The pool size depends on the number of processors available, up to a maximum * of 32. The minimum is 4. *

* The pool items are deleted very often to avoid holding them for too long. * They are also deleted to avoid holding more instances than threads running. *

* The PRNG algorithm can be specified by system property or environment * variable. See {@link RandomUtil#newSecureRandom()}. */ public final class RandomUtil { private RandomUtil() { } /** * Returns a random 64-bit number. * * @return a number */ public static long nextLong() { return SecureRandomPool.nextLong(); } /** * Returns an array of random bytes. * * @param length the array length * @return a byte array */ public static byte[] nextBytes(final int length) { return SecureRandomPool.nextBytes(length); } /** * Returns a new instance of {@link SecureRandom}. *

* It tries to create an instance with the algorithm name specified in the * system property `uuidcreator.securerandom` or in the environment variable * `UUIDCREATOR_SECURERANDOM`. If the algorithm name is not supported by the * runtime, it returns an instance with the default algorithm. *

* It can be useful to make use of SHA1PRNG or DRBG as a non-blocking source of * random bytes. The SHA1PRNG algorithm is default on operating systems that * don't have '/dev/random', e.g., on Windows. The DRBG algorithm is available * in JDK 9+. *

* To control the algorithm used by this method, use the system property * `uuidcreator.securerandom` or the environment variable * `UUIDCREATOR_SECURERANDOM` as in examples below. *

* System property: * *

{@code
	 * # Use the the algorithm SHA1PRNG for SecureRandom
	 * -Duuidcreator.securerandom="SHA1PRNG"
	 * 
	 * # Use the the algorithm DRBG for SecureRandom (JDK9+)
	 * -Duuidcreator.securerandom="DRBG"
	 * }
* *

* Environment variable: * *

{@code
	 * # Use the the algorithm SHA1PRNG for SecureRandom
	 * export UUIDCREATOR_SECURERANDOM="SHA1PRNG"
	 * 
	 * # Use the the algorithm DRBG for SecureRandom (JDK9+)
	 * export UUIDCREATOR_SECURERANDOM="DRBG"
	 * }
* * @return a new {@link SecureRandom}. */ public static SecureRandom newSecureRandom() { String algorithm = SettingsUtil.getSecureRandom(); if (algorithm != null) { try { return SecureRandom.getInstance(algorithm); } catch (NoSuchAlgorithmException e) { return new SecureRandom(); } } return new SecureRandom(); } private static class SecureRandomPool { private static final Random random = new Random(); private static final int POOL_SIZE = processors(); private static final Random[] POOL = new Random[POOL_SIZE]; private static final ReentrantLock lock = new ReentrantLock(); private SecureRandomPool() { } public static long nextLong() { return ByteUtil.toNumber(nextBytes(Long.BYTES)); } public static byte[] nextBytes(final int length) { final byte[] bytes = new byte[length]; current().nextBytes(bytes); // every now and then if (bytes.length > 0 && bytes[0x00] == 0) { // delete a random item from the pool delete(random.nextInt(POOL_SIZE)); } return bytes; } private static Random current() { // calculate the pool index given the current thread ID final int index = (int) Thread.currentThread().getId() % POOL_SIZE; lock.lock(); try { // lazy loading instance if (POOL[index] == null) { POOL[index] = RandomUtil.newSecureRandom(); } return POOL[index]; } finally { lock.unlock(); } } private static void delete(int index) { lock.lock(); try { POOL[index] = null; } finally { lock.unlock(); } } private static int processors() { final int min = 4; final int max = 32; // get the number of processors from the runtime final int processors = Runtime.getRuntime().availableProcessors(); if (processors < min) { return min; } else if (processors > max) { return max; } return processors; } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/internal/SettingsUtil.java ================================================ /* * MIT License * * Copyright (c) 2018-2025 Fabio Lima * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.pinecone.ulf.util.guid.i128.util.internal; /** * Utility class that reads system properties and environment variables. *

* List of system properties: *

    *
  • uuidcreator.node *
  • uuidcreator.securerandom *
*

* List of environment variables: *

    *
  • UUIDCREATOR_NODE *
  • UUIDCREATOR_SECURERANDOM *
*

* System properties has prevalence over environment variables. */ public final class SettingsUtil { /** * The property name prefix. */ protected static final String PROPERTY_PREFIX = "uuidcreator"; /** * The property name for the node number. */ public static final String PROPERTY_NODE = "node"; /** * The property name for the secure random algorithm. */ public static final String PROPERTY_SECURERANDOM = "securerandom"; /** * Default constructor. */ protected SettingsUtil() { } /** * Get the node identifier. * * @return a number */ public static Long getNodeIdentifier() { String value = getProperty(PROPERTY_NODE); if (value == null) { return null; } try { return Long.decode(value); } catch (NumberFormatException e) { return null; } } /** * Set the node identifier * * @param node a number */ public static void setNodeIdentifier(Long node) { String value = Long.toString(node); setProperty(PROPERTY_NODE, value); } /** * Get the secure random algorithm. * * @return a string */ public static String getSecureRandom() { return getProperty(PROPERTY_SECURERANDOM); } /** * Set the secure random algorithm * * @param algorithm a string */ public static void setSecureRandom(String algorithm) { setProperty(PROPERTY_SECURERANDOM, algorithm); } /** * Get a property. * * @param name the name * @return a string */ public static String getProperty(String name) { String fullName = getPropertyName(name); String value = System.getProperty(fullName); if (!isEmpty(value)) { return value; } fullName = getEnvinronmentName(name); value = System.getenv(fullName); if (!isEmpty(value)) { return value; } return null; } /** * Set a property. * * @param key the key * @param value the value */ public static void setProperty(String key, String value) { System.setProperty(getPropertyName(key), value); } /** * Clear a property. * * @param key the key */ public static void clearProperty(String key) { System.clearProperty(getPropertyName(key)); } /** * Get a property name. * * @param key a key * @return a string */ protected static String getPropertyName(String key) { return String.join(".", PROPERTY_PREFIX, key); } /** * Get an environment variable name. * * @param key a key * @return a string */ protected static String getEnvinronmentName(String key) { return String.join("_", PROPERTY_PREFIX, key).toUpperCase().replace(".", "_"); } private static boolean isEmpty(String value) { return value == null || value.isEmpty(); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/BitsAllocator.java ================================================ /* * Copyright (c) 2017 Baidu, Inc. All Rights Reserve. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.pinecone.ulf.util.guid.i64; import org.apache.commons.lang.builder.ToStringBuilder; import org.apache.commons.lang.builder.ToStringStyle; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.Assert; /** * Allocate 64 bits for the UID(long)
* sign (fixed 1bit) -> deltaSecond -> workerId -> sequence(within the same second) * * @author yutianbao */ public class BitsAllocator implements Pinenut { /** * Total 64 bits */ public static final int TOTAL_BITS = 1 << 6; /** * Bits for [sign-> second-> workId-> sequence] */ private int signBits = 1; private final int timestampBits; private final int workerIdBits; private final int sequenceBits; /** * Max value for workId & sequence */ private final long maxDeltaSeconds; private final long maxWorkerId; private final long maxSequence; /** * Shift for timestamp & workerId */ private final int timestampShift; private final int workerIdShift; /** * Constructor with timestampBits, workerIdBits, sequenceBits
* The highest bit used for sign, so 63 bits for timestampBits, workerIdBits, sequenceBits */ public BitsAllocator( int timestampBits, int workerIdBits, int sequenceBits ) { // make sure allocated 64 bits int allocateTotalBits = signBits + timestampBits + workerIdBits + sequenceBits; Assert.isTrue(allocateTotalBits == TOTAL_BITS, "allocate not enough 64 bits"); // initialize bits this.timestampBits = timestampBits; this.workerIdBits = workerIdBits; this.sequenceBits = sequenceBits; // initialize max value this.maxDeltaSeconds = ~(-1L << timestampBits); this.maxWorkerId = ~(-1L << workerIdBits); this.maxSequence = ~(-1L << sequenceBits); // initialize shift this.timestampShift = workerIdBits + sequenceBits; this.workerIdShift = sequenceBits; } /** * Allocate bits for UID according to delta seconds & workerId & sequence
* Note that: The highest bit will always be 0 for sign * * @param deltaSeconds * @param workerId * @param sequence * @return */ public long allocate( long deltaSeconds, long workerId, long sequence ) { return (deltaSeconds << timestampShift) | (workerId << workerIdShift) | sequence; } /** * Getters */ public int getSignBits() { return signBits; } public int getTimestampBits() { return timestampBits; } public int getWorkerIdBits() { return workerIdBits; } public int getSequenceBits() { return sequenceBits; } public long getMaxDeltaSeconds() { return maxDeltaSeconds; } public long getMaxWorkerId() { return maxWorkerId; } public long getMaxSequence() { return maxSequence; } public int getTimestampShift() { return timestampShift; } public int getWorkerIdShift() { return workerIdShift; } @Override public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/GUID64.java ================================================ package com.pinecone.ulf.util.guid.i64; import com.pinecone.framework.util.Bytes; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.Identification; import com.pinecone.framework.util.id.IllegalIdentificationException; public class GUID64 implements GUID { public static final long SignBits = 1; public static final long TimestampBits = 29; public static final long WorkerIdBits = 21; public static final long SequenceBits = 13; public static final int Sizeof = 8; protected long guid; public GUID64() { } public GUID64( String hexID64 ) { this.parse( hexID64 ); } public GUID64( long guid ) { this.guid = guid; } public long getSequence() { long totalBits = BitsAllocator.TOTAL_BITS; return (this.guid << (totalBits - GUID64.SequenceBits)) >>> (totalBits - GUID64.SequenceBits); } public long getWorkerId() { long totalBits = BitsAllocator.TOTAL_BITS; return (this.guid << (GUID64.TimestampBits + GUID64.SignBits)) >>> (totalBits - GUID64.WorkerIdBits); } public long getDeltaSeconds() { return this.guid >>> (GUID64.WorkerIdBits + GUID64.SequenceBits); } protected void parseByStringParts( String[] parts ) throws IllegalIdentificationException { try{ // 将十六进制字符串转换为十进制整数 long deltaSeconds = Long.parseLong(parts[0], 16); long workerId = Long.parseLong(parts[1], 16); long sequence = Long.parseLong(parts[2], 16); long deltaSecondsPart = deltaSeconds << (GUID64.WorkerIdBits + GUID64.SequenceBits); long workerIdPart = workerId << GUID64.SequenceBits; this.guid = deltaSecondsPart | workerIdPart | sequence; } catch ( RuntimeException e ) { throw new IllegalIdentificationException( e ); } } @Override public Identification parse( String hexID64 ) throws IllegalIdentificationException { // 分离UUID的各个部分 String[] parts = hexID64.split("-"); this.parseByStringParts( parts ); return this; } @Override public String toString(){ String deltaSecondsHex = String.format( "%07x", this.getDeltaSeconds() ); String workerIdHex = String.format( "%06x", this.getWorkerId() ); String sequenceHex = String.format( "%04x", this.getSequence() ); return deltaSecondsHex + "-" + workerIdHex + "-" + sequenceHex; } @Override public String toJSONString() { return "\"" + this.toString() + "\""; } @Override public boolean equals( Object obj ) { if( !super.equals(obj) ) { if( obj instanceof GUID64 ) { return this.guid == ((GUID64) obj).guid; } } return false; } @Override public int hashCode() { return Long.hashCode( this.guid ); } @Override public long hashCode64() { return this.guid; } @Override public int intVal() { return (int) this.guid; } @Override public long longVal() { return this.guid; } @Override public byte[] toBytesBE() { return Bytes.int64ToBytesBE( this.guid ); } @Override public byte[] toBytesLE() { return Bytes.int64ToBytesLE( this.guid ); } @Override public int sizeof() { return Sizeof; } @Override public int compareTo( Identification that ) { GUID64 val; if ( that instanceof GUID64 ) { val = (GUID64) that; } else { throw new IllegalArgumentException( "Not GUID64" ); } return Long.compare( this.guid, val.guid ); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/GUID72.java ================================================ package com.pinecone.ulf.util.guid.i64; import com.pinecone.framework.util.id.IllegalIdentificationException; public class GUID72 extends GUID64 { public static final int Sizeof = 9; // 8 bytes for GUID64 + 1 byte for nanoSeed private byte nanoSeed; public GUID72() { } public GUID72( String hexID72 ) { this.parse( hexID72 ); } public GUID72( long guid64, byte nanoSeed ) { super( guid64 ); this.nanoSeed = nanoSeed; } public int getNanoSeed() { return this.nanoSeed; } public void setNanoSeed( byte nanoSeed ) { this.nanoSeed = nanoSeed; } @Override public GUID72 parse( String hexID72 ) throws IllegalIdentificationException { //Debug.trace( "解析字符串"+hexID72 ); try{ String[] parts = hexID72.split("-"); this.parseByStringParts( parts ); this.nanoSeed = (byte) Integer.parseInt( parts[3], 16 ); } catch ( NumberFormatException | IndexOutOfBoundsException e ) { throw new IllegalIdentificationException( e ); } return this; } @Override public byte[] toBytesLE() { byte[] b = new byte[9]; b[0] = (byte) this.guid; b[1] = (byte) (this.guid >> 8); b[2] = (byte) (this.guid >> 16); b[3] = (byte) (this.guid >> 24); b[4] = (byte) (this.guid >> 32); b[5] = (byte) (this.guid >> 40); b[6] = (byte) (this.guid >> 48); b[7] = (byte) (this.guid >> 56); b[8] = this.nanoSeed; return b; } @Override public byte[] toBytesBE() { byte[] b = new byte[9]; b[0] = (byte) (this.guid >> 56); b[1] = (byte) (this.guid >> 48); b[2] = (byte) (this.guid >> 40); b[3] = (byte) (this.guid >> 32); b[4] = (byte) (this.guid >> 24); b[5] = (byte) (this.guid >> 16); b[6] = (byte) (this.guid >> 8); b[7] = (byte) this.guid; b[8] = this.nanoSeed; return b; } @Override public String toString() { String nanoSeedHex = String.format( "%02x", this.nanoSeed ); return super.toString() + "-" + nanoSeedHex; } @Override public int sizeof() { return Sizeof; } @Override public String toJSONString() { return "\"" + this.toString() + "\""; } @Override public boolean equals( Object obj ) { boolean b = false; if( obj instanceof GUID72 ) { b = this.nanoSeed == ((GUID72) obj).nanoSeed; } return super.equals(obj) && b; } @Override public int hashCode() { return Long.hashCode( this.guid ) ^ Byte.hashCode( this.nanoSeed ); } @Override public long hashCode64() { return super.hashCode64() ^ Byte.hashCode( this.nanoSeed ); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/GuidAllocator64.java ================================================ package com.pinecone.ulf.util.guid.i64; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.id.GuidGenerateException; import com.pinecone.ulf.util.guid.i64.worker.WorkerIdAssigner; public interface GuidAllocator64 extends GuidAllocator { long nextGUIDi64() throws GuidGenerateException; String explain( long guid64 ); GUID nextGUID64(); void setWorkerIdAssigner( WorkerIdAssigner workerIdAssigner ); } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/GuidAllocator72.java ================================================ package com.pinecone.ulf.util.guid.i64; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.id.GuidGenerateException; import com.pinecone.ulf.util.guid.i64.worker.WorkerIdAssigner; public interface GuidAllocator72 extends GuidAllocator64 { GUID nextGUID72(); } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/GuidAllocator72V2.java ================================================ package com.pinecone.ulf.util.guid.i64; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidGenerateException; import com.pinecone.ulf.util.guid.i64.utils.DateUtils; import com.pinecone.ulf.util.guid.i64.worker.GenericDisposableWorkerIdAssigner; import com.pinecone.ulf.util.guid.i64.worker.WorkerIdAssigner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.InitializingBean; import java.time.LocalDateTime; import java.time.temporal.ChronoUnit; import java.util.Date; import java.util.concurrent.TimeUnit; public class GuidAllocator72V2 implements GuidAllocator72, InitializingBean { private static final Logger LOGGER = LoggerFactory.getLogger(GuidAllocator72V2.class); /** Bits allocate */ protected int timeBits = 28; protected int workerBits = 22; protected int seqBits = 13; /** Customer epoch, unit as second. For example 2016-05-20 (ms: 1463673600000)*/ // TODO protected String epochStr = "2024-10-01"; protected long epochSeconds = TimeUnit.MILLISECONDS.toSeconds( DateUtils.parseByDayPattern( this.epochStr ).getTime() ); /** Stable fields after spring bean initializing */ protected BitsAllocator bitsAllocator; protected long workerId; /** Volatile fields caused by nextId() */ protected long sequence = 0L; protected long lastSecond = -1L; /** Spring property */ protected WorkerIdAssigner workerIdAssigner; public GuidAllocator72V2() { this( new GenericDisposableWorkerIdAssigner() ); } public GuidAllocator72V2(WorkerIdAssigner idAssigner ) { this.workerIdAssigner = idAssigner; this.afterPropertiesSet(); } @Override public void afterPropertiesSet() { // initialize bits allocator this.bitsAllocator = new BitsAllocator(this.timeBits, this.workerBits, this.seqBits); // initialize worker id this.workerId = this.workerIdAssigner.assignWorkerId(); if ( this.workerId > this.bitsAllocator.getMaxWorkerId() ) { throw new IllegalStateException( "Worker id " + this.workerId + " exceeds the max " + this.bitsAllocator.getMaxWorkerId() ); } LOGGER.info( "Initialized bits(1, {}, {}, {}) for workerID:{}", this.timeBits, this.workerBits, this.seqBits, this.workerId ); } @Override public long nextGUIDi64() throws GuidGenerateException { try { return this.nextId(); } catch ( Exception e ) { LOGGER.error("Generate unique id exception. ", e); throw new GuidGenerateException(e); } } @Override public String explain( long guid64 ) { long totalBits = BitsAllocator.TOTAL_BITS; long signBits = this.bitsAllocator.getSignBits(); long timestampBits = this.bitsAllocator.getTimestampBits(); long workerIdBits = this.bitsAllocator.getWorkerIdBits(); long sequenceBits = this.bitsAllocator.getSequenceBits(); // parse UID long sequence = (guid64 << (totalBits - sequenceBits)) >>> (totalBits - sequenceBits); long workerId = (guid64 << (timestampBits + signBits)) >>> (totalBits - workerIdBits); long deltaSeconds = guid64 >>> (workerIdBits + sequenceBits); Date thatTime = new Date(TimeUnit.SECONDS.toMillis(epochSeconds + deltaSeconds)); String thatTimeStr = DateUtils.formatByDateTimePattern(thatTime); // format as string return String.format("{\"UID\":\"%d\",\"timestamp\":\"%s\",\"workerId\":\"%d\",\"sequence\":\"%d\"}", guid64, thatTimeStr, workerId, sequence); } // @Override // public GUID64 parseGUID64(long uid) { // long totalBits = BitsAllocator.TOTAL_BITS; // long signBits = bitsAllocator.getSignBits(); // long timestampBits = bitsAllocator.getTimestampBits(); // long workerIdBits = bitsAllocator.getWorkerIdBits(); // long sequenceBits = bitsAllocator.getSequenceBits(); // // // parse UID // long sequence = (uid << (totalBits - sequenceBits)) >>> (totalBits - sequenceBits); // long workerId = (uid << (timestampBits + signBits)) >>> (totalBits - workerIdBits); // long deltaSeconds = uid >>> (workerIdBits + sequenceBits); // // Date thatTime = new Date(TimeUnit.SECONDS.toMillis(epochSeconds + deltaSeconds)); // String thatTimeStr = DateUtils.formatByDateTimePattern(thatTime); // // // format as string // return new GUID64(sequence, workerId, deltaSeconds); // } // @Override public GUID nextGUID() { return this.nextGUID72(); } @Override public GUID parse( String hexId ) { return new GUID72( hexId ); } @Override public GUID nextGUID72() { //先获取GUID64 long guid64 = this.nextGUIDi64(); //Debug.trace( guid64 ); //获取纳秒种子 LocalDateTime now = LocalDateTime.now(); long nanoseconds = now.toLocalTime().truncatedTo( ChronoUnit.NANOS ).getNano(); int truncatedNanos = (int) (nanoseconds % 256L); // 截取为8位 //String nanoSeed = String.format("%02x", truncatedNanos); return new GUID72( guid64, (byte) truncatedNanos ); } @Override public GUID nextGUID64() { return new GUID64( this.nextGUIDi64() ); } /** * Get UID * * @return UID * @throws GuidGenerateException in the case: Clock moved backwards; Exceeds the max timestamp */ protected synchronized long nextId() { long currentSecond = getCurrentSecond(); // Clock moved backwards, refuse to generate uid if (currentSecond < this.lastSecond) { long refusedSeconds = this.lastSecond - currentSecond; throw new GuidGenerateException("Clock moved backwards. Refusing for %d seconds", refusedSeconds); } // At the same second, increase sequence if (currentSecond == this.lastSecond) { this.sequence = ( this.sequence + 1 ) & this.bitsAllocator.getMaxSequence(); // Exceed the max sequence, we wait the next second to generate uid if ( this.sequence == 0 ) { currentSecond = this.getNextSecond( this.lastSecond ); } // At the different second, sequence restart from zero } else { this.sequence = 0L; } this.lastSecond = currentSecond; // Allocate bits for UID return this.bitsAllocator.allocate(currentSecond - epochSeconds, this.workerId, this.sequence); } /** * Get next millisecond */ private long getNextSecond( long lastTimestamp ) { long timestamp = getCurrentSecond(); while (timestamp <= lastTimestamp) { timestamp = getCurrentSecond(); } return timestamp; } /** * Get current second */ private long getCurrentSecond() { long currentSecond = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis()); if (currentSecond - epochSeconds > this.bitsAllocator.getMaxDeltaSeconds()) { throw new GuidGenerateException("Timestamp bits is exhausted. Refusing UID generate. Now: " + currentSecond); } return currentSecond; } @Override public void setWorkerIdAssigner( WorkerIdAssigner workerIdAssigner ) { this.workerIdAssigner = workerIdAssigner; } public void setTimeBits ( int timeBits ) { if (timeBits > 0) { this.timeBits = timeBits; } } public void setWorkerBits ( int workerBits ) { if (workerBits > 0) { this.workerBits = workerBits; } } public void setSeqBits ( int seqBits ) { if (seqBits > 0) { this.seqBits = seqBits; } } public void setEpochStr ( String epochStr ) { if ( StringUtils.isNotBlank(epochStr) ) { this.epochStr = epochStr; this.epochSeconds = TimeUnit.MILLISECONDS.toSeconds(DateUtils.parseByDayPattern(epochStr).getTime()); } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/utils/DateUtils.java ================================================ /* * Copyright (c) 2017 Baidu, Inc. All Rights Reserve. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.pinecone.ulf.util.guid.i64.utils; import org.apache.commons.lang.time.DateFormatUtils; import java.text.ParseException; import java.util.Calendar; import java.util.Date; /** * DateUtils provides date formatting, parsing * * @author yutianbao */ public abstract class DateUtils extends org.apache.commons.lang.time.DateUtils { /** * Patterns */ public static final String DAY_PATTERN = "yyyy-MM-dd"; public static final String DATETIME_PATTERN = "yyyy-MM-dd HH:mm:ss"; public static final String DATETIME_MS_PATTERN = "yyyy-MM-dd HH:mm:ss.SSS"; public static final Date DEFAULT_DATE = DateUtils.parseByDayPattern("1970-01-01"); /** * Parse date by 'yyyy-MM-dd' pattern * * @param str * @return */ public static Date parseByDayPattern(String str) { return parseDate(str, DAY_PATTERN); } /** * Parse date by 'yyyy-MM-dd HH:mm:ss' pattern * * @param str * @return */ public static Date parseByDateTimePattern(String str) { return parseDate(str, DATETIME_PATTERN); } /** * Parse date without Checked exception * * @param str * @param pattern * @return * @throws RuntimeException when ParseException occurred */ public static Date parseDate(String str, String pattern) { try { return parseDate(str, new String[]{pattern}); } catch (ParseException e) { throw new RuntimeException(e); } } /** * Format date into string * * @param date * @param pattern * @return */ public static String formatDate(Date date, String pattern) { return DateFormatUtils.format(date, pattern); } /** * Format date by 'yyyy-MM-dd' pattern * * @param date * @return */ public static String formatByDayPattern(Date date) { if (date != null) { return DateFormatUtils.format(date, DAY_PATTERN); } else { return null; } } /** * Format date by 'yyyy-MM-dd HH:mm:ss' pattern * * @param date * @return */ public static String formatByDateTimePattern(Date date) { return DateFormatUtils.format(date, DATETIME_PATTERN); } /** * Get current day using format date by 'yyyy-MM-dd HH:mm:ss' pattern * * @return * @author yebo */ public static String getCurrentDayByDayPattern() { Calendar cal = Calendar.getInstance(); return formatByDayPattern(cal.getTime()); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/utils/DockerUtils.java ================================================ /* * Copyright (c) 2017 Baidu, Inc. All Rights Reserve. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.pinecone.ulf.util.guid.i64.utils; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * DockerUtils * * @author yutianbao */ public abstract class DockerUtils { private static final Logger LOGGER = LoggerFactory.getLogger(DockerUtils.class); /** Environment param keys */ private static final String ENV_KEY_HOST = "JPAAS_HOST"; private static final String ENV_KEY_PORT = "JPAAS_HTTP_PORT"; private static final String ENV_KEY_PORT_ORIGINAL = "JPAAS_HOST_PORT_8080"; /** Docker host & port */ private static String DOCKER_HOST = ""; private static String DOCKER_PORT = ""; /** Whether is docker */ private static boolean IS_DOCKER; static { retrieveFromEnv(); } /** * Retrieve docker host * * @return empty string if not a docker */ public static String getDockerHost() { return DOCKER_HOST; } /** * Retrieve docker port * * @return empty string if not a docker */ public static String getDockerPort() { return DOCKER_PORT; } /** * Whether a docker * * @return */ public static boolean isDocker() { return IS_DOCKER; } /** * Retrieve host & port from environment */ private static void retrieveFromEnv() { // retrieve host & port from environment DOCKER_HOST = System.getenv(ENV_KEY_HOST); DOCKER_PORT = System.getenv(ENV_KEY_PORT); // not found from 'JPAAS_HTTP_PORT', then try to find from 'JPAAS_HOST_PORT_8080' if (StringUtils.isBlank(DOCKER_PORT)) { DOCKER_PORT = System.getenv(ENV_KEY_PORT_ORIGINAL); } boolean hasEnvHost = StringUtils.isNotBlank(DOCKER_HOST); boolean hasEnvPort = StringUtils.isNotBlank(DOCKER_PORT); // docker can find both host & port from environment if (hasEnvHost && hasEnvPort) { IS_DOCKER = true; // found nothing means not a docker, maybe an actual machine } else if (!hasEnvHost && !hasEnvPort) { IS_DOCKER = false; } else { LOGGER.error("Missing host or port from env for Docker. host:{}, port:{}", DOCKER_HOST, DOCKER_PORT); throw new RuntimeException( "Missing host or port from env for Docker. host:" + DOCKER_HOST + ", port:" + DOCKER_PORT); } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/utils/EnumUtils.java ================================================ /* * Copyright (c) 2017 Baidu, Inc. All Rights Reserve. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.pinecone.ulf.util.guid.i64.utils; import com.pinecone.framework.util.Assert; /** * EnumUtils provides the operations for {@link ValuedEnum} such as Parse, value of... * * @author yutianbao */ public abstract class EnumUtils { /** * Parse the bounded value into ValuedEnum * * @param clz * @param value * @return */ public static , V> T parse(Class clz, V value) { Assert.notNull(clz, "clz can not be null"); if (value == null) { return null; } for (T t : clz.getEnumConstants()) { if (value.equals(t.value())) { return t; } } return null; } /** * Null-safe valueOf function * * @param * @param enumType * @param name * @return */ public static > T valueOf(Class enumType, String name) { if (name == null) { return null; } return Enum.valueOf(enumType, name); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/utils/NamingThreadFactory.java ================================================ /* * Copyright (c) 2017 Baidu, Inc. All Rights Reserve. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.pinecone.ulf.util.guid.i64.utils; import org.apache.commons.lang.ClassUtils; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.Thread.UncaughtExceptionHandler; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ThreadFactory; import java.util.concurrent.atomic.AtomicLong; /** * Named thread in ThreadFactory. If there is no specified name for thread, it * will auto detect using the invoker classname instead. * * @author yutianbao */ public class NamingThreadFactory implements ThreadFactory { private static final Logger LOGGER = LoggerFactory.getLogger(NamingThreadFactory.class); /** * Thread name pre */ private String name; /** * Is daemon thread */ private boolean daemon; /** * UncaughtExceptionHandler */ private UncaughtExceptionHandler uncaughtExceptionHandler; /** * Sequences for multi thread name prefix */ private final ConcurrentHashMap sequences; /** * Constructors */ public NamingThreadFactory() { this(null, false, null); } public NamingThreadFactory(String name) { this(name, false, null); } public NamingThreadFactory(String name, boolean daemon) { this(name, daemon, null); } public NamingThreadFactory(String name, boolean daemon, UncaughtExceptionHandler handler) { this.name = name; this.daemon = daemon; this.uncaughtExceptionHandler = handler; this.sequences = new ConcurrentHashMap(); } @Override public Thread newThread(Runnable r) { Thread thread = new Thread(r); thread.setDaemon(this.daemon); // If there is no specified name for thread, it will auto detect using the invoker classname instead. // Notice that auto detect may cause some performance overhead String prefix = this.name; if (StringUtils.isBlank(prefix)) { prefix = getInvoker(2); } thread.setName(prefix + "-" + getSequence(prefix)); // no specified uncaughtExceptionHandler, just do logging. if (this.uncaughtExceptionHandler != null) { thread.setUncaughtExceptionHandler(this.uncaughtExceptionHandler); } else { thread.setUncaughtExceptionHandler(new UncaughtExceptionHandler() { public void uncaughtException(Thread t, Throwable e) { LOGGER.error("unhandled exception in thread: " + t.getId() + ":" + t.getName(), e); } }); } return thread; } /** * Get the method invoker's class name * * @param depth * @return */ private String getInvoker(int depth) { Exception e = new Exception(); StackTraceElement[] stes = e.getStackTrace(); if (stes.length > depth) { return ClassUtils.getShortClassName(stes[depth].getClassName()); } return getClass().getSimpleName(); } /** * Get sequence for different naming prefix * * @param invoker * @return */ private long getSequence(String invoker) { AtomicLong r = this.sequences.get(invoker); if (r == null) { r = new AtomicLong(0); AtomicLong previous = this.sequences.putIfAbsent(invoker, r); if (previous != null) { r = previous; } } return r.incrementAndGet(); } /** * Getters & Setters */ public String getName() { return name; } public void setName(String name) { this.name = name; } public boolean isDaemon() { return daemon; } public void setDaemon(boolean daemon) { this.daemon = daemon; } public UncaughtExceptionHandler getUncaughtExceptionHandler() { return uncaughtExceptionHandler; } public void setUncaughtExceptionHandler(UncaughtExceptionHandler handler) { this.uncaughtExceptionHandler = handler; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/utils/NetUtils.java ================================================ /* * Copyright (c) 2017 Baidu, Inc. All Rights Reserve. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.pinecone.ulf.util.guid.i64.utils; import java.net.InetAddress; import java.net.NetworkInterface; import java.net.SocketException; import java.util.Enumeration; /** * NetUtils * * @author yutianbao */ public abstract class NetUtils { /** * Pre-loaded local address */ public static InetAddress localAddress; static { try { localAddress = getLocalInetAddress(); } catch (SocketException e) { throw new RuntimeException("fail to get local ip."); } } /** * Retrieve the first validated local ip address(the Public and LAN ip addresses are validated). * * @return the local address * @throws SocketException the socket exception */ public static InetAddress getLocalInetAddress() throws SocketException { // enumerates all network interfaces Enumeration enu = NetworkInterface.getNetworkInterfaces(); while (enu.hasMoreElements()) { NetworkInterface ni = enu.nextElement(); if (ni.isLoopback()) { continue; } Enumeration addressEnumeration = ni.getInetAddresses(); while (addressEnumeration.hasMoreElements()) { InetAddress address = addressEnumeration.nextElement(); // ignores all invalidated addresses if (address.isLinkLocalAddress() || address.isLoopbackAddress() || address.isAnyLocalAddress()) { continue; } return address; } } throw new RuntimeException("No validated local address!"); } /** * Retrieve local address * * @return the string local address */ public static String getLocalAddress() { return localAddress.getHostAddress(); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/utils/PaddedAtomicLong.java ================================================ /* * Copyright (c) 2017 Baidu, Inc. All Rights Reserve. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.pinecone.ulf.util.guid.i64.utils; import java.util.concurrent.atomic.AtomicLong; /** * Represents a padded {@link AtomicLong} to prevent the FalseSharing problem

* * The CPU cache line commonly be 64 bytes, here is a sample of cache line after padding:
* 64 bytes = 8 bytes (object reference) + 6 * 8 bytes (padded long) + 8 bytes (a long value) * * @author yutianbao */ public class PaddedAtomicLong extends AtomicLong { private static final long serialVersionUID = -3415778863941386253L; /** Padded 6 long (48 bytes) */ public volatile long p1, p2, p3, p4, p5, p6 = 7L; /** * Constructors from {@link AtomicLong} */ public PaddedAtomicLong() { super(); } public PaddedAtomicLong(long initialValue) { super(initialValue); } /** * To prevent GC optimizations for cleaning unused padded references */ public long sumPaddingToPreventOptimization() { return p1 + p2 + p3 + p4 + p5 + p6; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/utils/ValuedEnum.java ================================================ /* * Copyright (c) 2017 Baidu, Inc. All Rights Reserve. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.pinecone.ulf.util.guid.i64.utils; /** * {@code ValuedEnum} defines an enumeration which is bounded to a value, you * may implements this interface when you defines such kind of enumeration, that * you can use {@link EnumUtils} to simplify parse and valueOf operation. * * @author yutianbao */ public interface ValuedEnum { T value(); } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/worker/GenericDisposableWorkerIdAssigner.java ================================================ package com.pinecone.ulf.util.guid.i64.worker; import com.pinecone.ulf.util.guid.i64.utils.DockerUtils; import com.pinecone.ulf.util.guid.i64.utils.NetUtils; import com.pinecone.ulf.util.guid.i64.worker.entity.WorkerNodeEntity; import org.apache.commons.lang.math.RandomUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.atomic.AtomicInteger; public class GenericDisposableWorkerIdAssigner implements WorkerIdAssigner { private static final Logger LOGGER = LoggerFactory.getLogger(GenericDisposableWorkerIdAssigner.class); // 使用 ConcurrentHashMap 保存已分配的 worker ID,键是 worker ID,值是 WorkerNodeEntity private static final ConcurrentHashMap WORKER_NODES = new ConcurrentHashMap<>(); // 使用 AtomicInteger 作为 worker ID 的分配器 private static final AtomicInteger NEXT_ID = new AtomicInteger(0); /** * 基于内存中的列表分配 worker ID。 * * @return 分配的 worker ID */ @Override public long assignWorkerId() { // 构建 worker 节点实体 // todo 要将实体类确定下来,再根据其他逻辑生成wordId,目前为测试版本 WorkerNodeEntity workerNodeEntity = this.buildWorkerNode(); // 从 NEXT_ID 获取下一个可用的 worker ID int id = NEXT_ID.getAndIncrement(); // 将 worker ID 和对应的 WorkerNodeEntity 存入内存中的列表 WORKER_NODES.put((long) id, workerNodeEntity); // 使用 ThreadLocalRandom 生成介于 0(包括)和 1000(不包括)之间的随机整数 return ThreadLocalRandom.current().nextInt( 0, 1000 ); } /** * 根据 IP 和端口构建 worker 节点实体 */ private WorkerNodeEntity buildWorkerNode() { WorkerNodeEntity workerNodeEntity = new WorkerNodeEntity(); if ( DockerUtils.isDocker() ) { workerNodeEntity.setType(WorkerNodeType.CONTAINER.value()); workerNodeEntity.setHostName(DockerUtils.getDockerHost()); workerNodeEntity.setPort(DockerUtils.getDockerPort()); } else { workerNodeEntity.setType(WorkerNodeType.ACTUAL.value()); workerNodeEntity.setHostName(NetUtils.getLocalAddress()); workerNodeEntity.setPort(System.currentTimeMillis() + "-" + RandomUtils.nextInt(100000)); } return workerNodeEntity; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/worker/WorkerIdAssigner.java ================================================ package com.pinecone.ulf.util.guid.i64.worker; import com.pinecone.framework.system.prototype.Pinenut; public interface WorkerIdAssigner extends Pinenut { long assignWorkerId(); } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/worker/WorkerNodeType.java ================================================ /* * Copyright (c) 2017 Baidu, Inc. All Rights Reserve. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.pinecone.ulf.util.guid.i64.worker; import com.pinecone.ulf.util.guid.i64.utils.ValuedEnum; /** * WorkerNodeType *

  • CONTAINER: Such as Docker *
  • ACTUAL: Actual machine * * @author yutianbao */ public enum WorkerNodeType implements ValuedEnum { CONTAINER(1), ACTUAL(2); /** * Lock type */ private final Integer type; /** * Constructor with field of type */ private WorkerNodeType(Integer type) { this.type = type; } @Override public Integer value() { return type; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/worker/entity/WorkerNodeEntity.java ================================================ /* * Copyright (c) 2017 Baidu, Inc. All Rights Reserve. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.pinecone.ulf.util.guid.i64.worker.entity; import com.pinecone.ulf.util.guid.i64.worker.WorkerNodeType; import org.apache.commons.lang.builder.ReflectionToStringBuilder; import org.apache.commons.lang.builder.ToStringStyle; import java.util.Date; /** * Entity for M_WORKER_NODE * * @author yutianbao */ public class WorkerNodeEntity { /** * Entity unique id (table unique) */ private long id; /** * Type of CONTAINER: HostName, ACTUAL : IP. */ private String hostName; /** * Type of CONTAINER: Port, ACTUAL : Timestamp + Random(0-10000) */ private String port; /** * type of {@link WorkerNodeType} */ private int type; /** * Worker launch date, default now */ private Date launchDate = new Date(); /** * Created time */ private Date created; /** * Last modified */ private Date modified; /** * Getters & Setters */ public long getId() { return id; } public void setId(long id) { this.id = id; } public String getHostName() { return hostName; } public void setHostName(String hostName) { this.hostName = hostName; } public String getPort() { return port; } public void setPort(String port) { this.port = port; } public int getType() { return type; } public void setType(int type) { this.type = type; } public Date getLaunchDate() { return launchDate; } public void setLaunchDateDate(Date launchDate) { this.launchDate = launchDate; } public Date getCreated() { return created; } public void setCreated(Date created) { this.created = created; } public Date getModified() { return modified; } public void setModified(Date modified) { this.modified = modified; } @Override public String toString() { return ReflectionToStringBuilder.toString(this, ToStringStyle.SHORT_PREFIX_STYLE); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/lang/ArchMultiScopeFactory.java ================================================ package com.pinecone.ulf.util.lang; import com.pinecone.framework.system.ProvokeHandleException; import com.pinecone.framework.system.executum.TaskManager; import com.pinecone.framework.util.lang.ArchDynamicFactory; import com.pinecone.framework.util.lang.ClassScope; import com.pinecone.framework.util.name.Name; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; public abstract class ArchMultiScopeFactory extends ArchDynamicFactory implements MultiScopeFactory { protected TaskManager mTaskManager ; protected MultiTraitClassLoader mTraitClassLoader ; protected Map, Object> mInstanceSingletons ; protected ArchMultiScopeFactory( TaskManager taskManager, ClassLoader classLoader, MultiTraitClassLoader traitClassLoader, ClassScope classScope ) { super( classLoader, classScope ); this.mTaskManager = taskManager ; this.mTraitClassLoader = traitClassLoader ; this.mInstanceSingletons = new ConcurrentHashMap<>(); } @Override public void putInstanceSingleton( Class clazz, T obj ) { this.mInstanceSingletons.put( clazz, obj ); } @Override public void removeInstanceSingleton(Class clazz) { this.mInstanceSingletons.remove( clazz ); } @Override public int instanceSingletonSize() { return this.mInstanceSingletons.size(); } @Override public MultiTraitClassLoader getTraitClassLoader() { return this.mTraitClassLoader; } @Override protected Object beforeInstantiate( Class that, Class[] stereotypes, Object[] args ) { return this.mInstanceSingletons.get( that ); } @Override public Object newInstance ( Class that, Class[] stereotypes, Object[] args ) throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException { return super.newInstance( that, stereotypes, args ); } @Override public Object spawn (Name name, Object... args ) throws InvocationTargetException { return this.spawn( name, null, args ); } @Override public Object spawn ( Name name, Class[] stereotypes, Object... args ) throws InvocationTargetException { List > classes = this.mTraitClassLoader.loads( name ); Exception lastExp = null; if( !classes.isEmpty() ){ for ( Class c : classes ) { try { return this.newInstance( c, stereotypes, args ); } catch ( Exception e ) { lastExp = e; } } } throw new InvocationTargetException( lastExp, String.format( "%s::spawn, what-> Spawning in all scopes, has compromised.", this.className() ) ); } @Override public List popping ( Name name, Object... args ) { return this.popping( name, null, args ); } @Override public List popping ( Name name, Class[] stereotypes, Object... args ) { List > classes = this.mTraitClassLoader.loads( name ); // Try load by explicit name, saving times. List list = new ArrayList<>(); if( !classes.isEmpty() ){ for ( Class c : classes ) { try { Object o = this.newInstance( c, stereotypes, args ); if( o != null ) { list.add( o ) ; } } catch ( Exception e ) { this.handleIgnoreException( e ); } } } return list; } protected void handleIgnoreException( Exception e ) throws ProvokeHandleException { // Just ignore them. e.printStackTrace(); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/lang/ArchMultiScopeLoader.java ================================================ package com.pinecone.ulf.util.lang; import com.pinecone.framework.unit.LinkedTreeMap; import com.pinecone.framework.unit.LinkedTreeSet; import com.pinecone.framework.util.lang.ArchClassScopeLoader; import com.pinecone.framework.util.lang.ClassScanner; import com.pinecone.framework.util.lang.ClassScope; import com.pinecone.framework.util.lang.ScopedPackage; import com.pinecone.framework.util.name.MultiScopeName; import com.pinecone.framework.util.name.Name; import javassist.ClassPool; import javassist.CtClass; import javassist.NotFoundException; import javassist.bytecode.annotation.Annotation; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; public abstract class ArchMultiScopeLoader extends ArchClassScopeLoader implements MultiTraitClassLoader { protected ClassScanner mClassScanner ; protected ClassPool mClassPool ; protected Map mLoadedClassesPool ; protected Set mVisitedClasses ; protected HierarchyClassInspector mClassInspector ; protected ArchMultiScopeLoader( ClassScope classScope, ClassLoader classLoader, ClassPool classPool, ClassScanner classScanner, HierarchyClassInspector classInspector ) { super( classScope, classLoader ); this.mClassPool = classPool; this.mLoadedClassesPool = new LinkedTreeMap<>(); this.mVisitedClasses = new LinkedTreeSet<>(); this.mClassScanner = classScanner; this.mClassInspector = classInspector; } @Override public Class load( Name simpleName ) throws ClassNotFoundException { try{ Class c = this.loadByName( simpleName ); if( c != null ) { return c; } } catch ( ClassNotFoundException e ) { this.handleIgnoreException( e ); } return this.loadInClassTrait( simpleName ); } @Override public List > loads( Name simpleName ) { List > classes = this.loadsByName( simpleName ); this.loadsInClassTrait0( simpleName, false, classes ); return classes; } @Override public Class loadByName( Name simpleName ) throws ClassNotFoundException { return (Class) this.loads0( simpleName, true ); } @Override @SuppressWarnings( "unchecked" ) public List > loadsByName( Name simpleName ) { try{ return (List >) this.loads0( simpleName, false ); } catch ( ClassNotFoundException e ) { return null; // This should never be happened. } } @Override public Class loadInClassTrait( Name simpleName ) throws ClassNotFoundException { return (Class)this.loadsInClassTrait0( simpleName, true, null ); } @Override @SuppressWarnings( "unchecked" ) public List > loadsInClassTrait( Name simpleName ) { return (List >)this.loadsInClassTrait0( simpleName, false, new ArrayList<>() ); } protected abstract boolean isAnnotationQualified( Annotation that, String szName ); protected Object loadsInClassTrait0( Name simpleName, boolean bOnlyFirst, List > batch ) { this.updateScope(); for( Map.Entry kv : this.mLoadedClassesPool.entrySet() ) { CtClass pc = kv.getValue(); Annotation[] annotations = this.mClassInspector.queryVisibleAnnotations( pc ); if( annotations != null ) { for( Annotation annotation : annotations ) { if( this.isAnnotationQualified( annotation, simpleName.getName() ) ) { try{ Class c = this.mClassLoader.loadClass( kv.getKey() ); if( bOnlyFirst ) { return c; } else { batch.add( (Class)c ); } } catch ( ClassNotFoundException e ) { this.handleIgnoreException( e ); } } } } } return batch; } @Override protected List expandNamespace( Name name ) { if( name instanceof MultiScopeName) { return ((MultiScopeName) name).getFullNames(); } return List.of( name.getFullName() ) ; } @Override protected void registerDefaultFilters() { } @Override protected abstract Class loadSingleByFullClassName( String szFullClassName ); @Override public MultiTraitClassLoader updateScope() { try{ List candidates = new ArrayList<>(); for ( ScopedPackage scope : this.mClassScope.getAllScopes() ) { String szPkgName = scope.packageName(); if( this.mVisitedClasses.contains( szPkgName ) ) { continue; } else { this.mVisitedClasses.add( szPkgName ); } try { this.mClassScanner.scan( szPkgName, true, candidates ); } catch ( IOException e ) { this.handleIgnoreException( e ); } } for( String ns : candidates ) { this.mLoadedClassesPool.put( ns, this.mClassPool.get( ns ) ); } } catch ( NotFoundException e ) { this.handleIgnoreException( e ); } return this; } @Override public void clearCache() { this.mLoadedClassesPool.clear(); this.mVisitedClasses.clear(); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/lang/GenericPreloadClassInspector.java ================================================ package com.pinecone.ulf.util.lang; import javassist.ClassPool; import javassist.CtClass; import javassist.CtMethod; import javassist.NotFoundException; import javassist.bytecode.AnnotationsAttribute; import javassist.bytecode.BadBytecode; import javassist.bytecode.ClassFile; import javassist.bytecode.MethodInfo; import javassist.bytecode.SignatureAttribute; import javassist.bytecode.annotation.Annotation; public class GenericPreloadClassInspector implements HierarchyClassInspector { protected ClassPool mClassPool; public GenericPreloadClassInspector( ClassPool classPool ) { this.mClassPool = classPool; } @Override public CtClass preloadClass( String szClassName ) throws NotFoundException { return this.mClassPool.get( szClassName ); } @Override public boolean isImplementedDirectly( CtClass clazz, Class interf ) throws NotFoundException { CtClass[] interfaces = clazz.getInterfaces(); for ( CtClass iface : interfaces ) { if ( iface.getName().equals( interf.getName() ) ) { return true; } } return false; } @Override public boolean isImplemented( CtClass clazz, Class interf ) throws NotFoundException { String szInterfaceName = interf.getName(); while ( clazz != null && !clazz.getName().equals( Object.class.getName() ) ) { CtClass[] interfaces = clazz.getInterfaces(); for ( CtClass iface : interfaces ) { if ( this.isInterfaceExtended( iface, szInterfaceName ) ) { return true; } } clazz = clazz.getSuperclass(); } return false; } @Override public boolean isExtendedDirectly( CtClass clazz, Class parent ) throws NotFoundException { CtClass superClass = clazz.getSuperclass(); if ( superClass != null && superClass.getName().equals( parent.getName() ) ) { return true; } if( clazz.isInterface() ) { return this.isImplementedDirectly( clazz, parent ); } return false; } @Override public boolean isExtended( CtClass clazz, Class parent ) throws NotFoundException { if( clazz.isInterface() ) { return this.isInterfaceExtended( clazz, parent.getName() ); } while ( clazz != null && !clazz.getName().equals( Object.class.getName() ) ) { CtClass superClass = clazz.getSuperclass(); if (superClass != null && superClass.getName().equals(parent.getName())) { return true; } clazz = clazz.getSuperclass(); } return false; } private boolean isInterfaceExtended( CtClass clazz, String interfaceName ) throws NotFoundException { if ( clazz == null ) { return false; } CtClass[] interfaces = clazz.getInterfaces(); for ( CtClass interfaceClass : interfaces ) { if ( interfaceClass.getName().equals( interfaceName ) ) { return true; } if ( this.isInterfaceExtended( interfaceClass, interfaceName ) ) { return true; } } CtClass superClass = clazz.getSuperclass(); if ( superClass != null ) { return this.isInterfaceExtended( superClass, interfaceName ); } return false; } @Override public Annotation[] queryVisibleAnnotations( CtClass clazz ) { ClassFile classFile = clazz.getClassFile(); AnnotationsAttribute visible = (AnnotationsAttribute) classFile.getAttribute( AnnotationsAttribute.visibleTag ); if ( visible != null ) { return visible.getAnnotations(); } return null; } @Override public boolean hasOwnAnnotation( CtClass clazz, Class annotationClass ) { Annotation[] annotations = this.queryVisibleAnnotations( clazz ); if( annotations == null ) { return false; } for ( Annotation annotation : annotations ) { if ( annotation.getTypeName().equals( annotationClass.getName() ) ) { return true; } } return false; } @Override public boolean hasOwnAnnotations( CtClass clazz, Class[] annotationClasses ) { Annotation[] annotations = this.queryVisibleAnnotations( clazz ); if( annotations == null ) { return false; } return this.hasOwnAnnotations( annotations, annotationClasses ); } @Override public boolean hasOwnMethod( CtClass clazz, String methodName ) { try { clazz.getDeclaredMethod( methodName ); return true; } catch ( NotFoundException e ) { return false; } } @Override public boolean hasOwnMethods( CtClass clazz, String[] methodNames ) { for ( String methodName : methodNames ) { if ( !this.hasOwnMethod(clazz, methodName) ) { return false; } } return true; } protected boolean hasOwnAnnotations( Annotation[] annotations, Class[] annotationClasses ) { for ( Class annotationClass : annotationClasses ) { boolean found = false; for ( Annotation annotation : annotations ) { if ( annotation.getTypeName().equals( annotationClass.getName() ) ) { found = true; break; } } if (!found) { return false; } } return true; } public boolean methodHasAnnotations( CtMethod method, Class[] annotationClasses ) { MethodInfo methodInfo = method.getMethodInfo(); AnnotationsAttribute attr = (AnnotationsAttribute) methodInfo.getAttribute( AnnotationsAttribute.visibleTag ); if ( attr == null ) { return false; } return this.hasOwnAnnotations( attr.getAnnotations(), annotationClasses ); } public boolean methodHasAnnotations( CtMethod method, String[] annotationNames ) { MethodInfo methodInfo = method.getMethodInfo(); AnnotationsAttribute attr = (AnnotationsAttribute) methodInfo.getAttribute( AnnotationsAttribute.visibleTag ); if ( attr == null ) { return false; } for ( String annotationName : annotationNames ) { boolean found = false; for ( Annotation annotation : attr.getAnnotations() ) { if ( annotation.getTypeName().equals( annotationName ) ) { found = true; break; } } if (!found) { return false; } } return false; } public boolean methodHasAnnotation( CtMethod method, Class annotationClass ) { return this.methodHasAnnotation( method, annotationClass.getName() ); } public boolean methodHasAnnotation( CtMethod method, String annotationName ) { MethodInfo methodInfo = method.getMethodInfo(); AnnotationsAttribute attr = (AnnotationsAttribute) methodInfo.getAttribute( AnnotationsAttribute.visibleTag ); if ( attr != null ) { for ( Annotation annotation : attr.getAnnotations() ) { if ( annotation.getTypeName().equals( annotationName ) ) { return true; } } } return false; } @Override public boolean hasOwnField( CtClass clazz, String fieldName ) { try { clazz.getDeclaredField( fieldName ); return true; } catch ( NotFoundException e ) { return false; } } @Override public boolean hasOwnFields( CtClass clazz, String[] fieldNames ) { for ( String fieldName : fieldNames ) { if ( !this.hasOwnField( clazz, fieldName ) ) { return false; } } return true; } public static String[] parseGenericParameterTypes( CtMethod method ) throws NotFoundException, BadBytecode { SignatureAttribute.MethodSignature methodSignature = GenericPreloadClassInspector.getMethodSignature( method ); if ( methodSignature == null ) { CtClass[] ps = method.getParameterTypes(); String[] result = new String[ ps.length ]; for ( int i = 0; i < ps.length; ++i ) { result[ i ] = ps[ i ].getName(); } return result; } SignatureAttribute.Type[] paramTypes = methodSignature.getParameterTypes(); String[] result = new String[ paramTypes.length ]; for ( int i = 0; i < paramTypes.length; ++i ) { result[ i ] = GenericPreloadClassInspector.typeToString( paramTypes[ i ] ); } return result; } public static String parseGenericReturnType( CtMethod method ) throws NotFoundException, BadBytecode { SignatureAttribute.MethodSignature methodSignature = GenericPreloadClassInspector.getMethodSignature( method ); if ( methodSignature == null ) { return method.getReturnType().getName(); } return GenericPreloadClassInspector.typeToString( methodSignature.getReturnType() ); } public static String[] evalGenericParameterTypes( CtMethod method ) { try { return GenericPreloadClassInspector.parseGenericParameterTypes( method ); } catch ( NotFoundException | BadBytecode e ) { return null; } } public static String getGenericReturnType( CtMethod method ) { try { SignatureAttribute.MethodSignature methodSignature = GenericPreloadClassInspector.getMethodSignature( method ); if ( methodSignature == null ) { return null; } return GenericPreloadClassInspector.typeToString( methodSignature.getReturnType() ); } catch ( BadBytecode e ) { return null; } } public static String evalGenericReturnType( CtMethod method ) { try { return GenericPreloadClassInspector.parseGenericReturnType( method ); } catch ( NotFoundException | BadBytecode e ) { return null; } } protected static SignatureAttribute.MethodSignature getMethodSignature( CtMethod method ) throws BadBytecode { SignatureAttribute signature = (SignatureAttribute) method.getMethodInfo().getAttribute( SignatureAttribute.tag ); if ( signature == null ) { return null; } return SignatureAttribute.toMethodSignature( signature.getSignature() ); } public static String typeToString( SignatureAttribute.Type type ) { if ( type instanceof SignatureAttribute.ClassType ) { SignatureAttribute.ClassType classType = (SignatureAttribute.ClassType) type; if ( classType.getTypeArguments() != null && classType.getTypeArguments().length > 0 ) { StringBuilder sb = new StringBuilder(classType.getName()); sb.append( "<" ); for ( int i = 0; i < classType.getTypeArguments().length; ++i ) { if ( i > 0 ) { sb.append( ", " ); } sb.append(classType.getTypeArguments()[i].toString()); } sb.append( ">" ); return sb.toString(); } return classType.getName(); } return type.toString(); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/lang/HierarchyClassInspector.java ================================================ package com.pinecone.ulf.util.lang; import javassist.CtClass; import javassist.NotFoundException; public interface HierarchyClassInspector extends PreloadClassInspector { default boolean isImplementedDirectly( String szClassName, Class interf ) throws NotFoundException { return this.isImplementedDirectly( this.preloadClass( szClassName ), interf ); } boolean isImplementedDirectly(CtClass clazz, Class interf ) throws NotFoundException ; default boolean isImplemented( String szClassName, Class interf ) throws NotFoundException { return this.isImplemented( this.preloadClass( szClassName ), interf ); } boolean isImplemented( CtClass clazz, Class interf ) throws NotFoundException ; default boolean isExtendedDirectly( String szClassName, Class interf ) throws NotFoundException { return this.isExtendedDirectly( this.preloadClass( szClassName ), interf ); } boolean isExtendedDirectly( CtClass clazz, Class parent ) throws NotFoundException ; default boolean isExtended( String szClassName, Class interf ) throws NotFoundException { return this.isExtended( this.preloadClass( szClassName ), interf ); } boolean isExtended( CtClass clazz, Class parent ) throws NotFoundException ; } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/lang/MultiScopeFactory.java ================================================ package com.pinecone.ulf.util.lang; import com.pinecone.framework.util.lang.ClassScope; import com.pinecone.framework.util.lang.DynamicFactory; import com.pinecone.framework.util.name.Name; import com.pinecone.framework.util.name.ScopeName; import java.lang.reflect.InvocationTargetException; import java.util.List; public interface MultiScopeFactory extends DynamicFactory { MultiTraitClassLoader getTraitClassLoader(); default Object spawn( String name, Class[] stereotypes, Object... args ) throws InvocationTargetException { return this.spawn( new ScopeName(name), stereotypes, args ); } Object spawn( Name name, Class[] stereotypes, Object... args ) throws InvocationTargetException; default Object spawn( String name, Object... args ) throws InvocationTargetException { return this.spawn( new ScopeName(name), args ); } Object spawn( Name name, Object... args ) throws InvocationTargetException; default List popping( String name, Class[] stereotypes, Object... args ) { return this.popping( new ScopeName(name), stereotypes, args ); } List popping( Name name, Class[] stereotypes, Object... args ); default List popping( String name, Object... args ) { return this.popping( new ScopeName(name), args ); } List popping( Name name, Object... args ); void putInstanceSingleton ( Class clazz, T obj ); void removeInstanceSingleton ( Class clazz ); int instanceSingletonSize(); } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/lang/MultiTraitClassLoader.java ================================================ package com.pinecone.ulf.util.lang; import com.pinecone.framework.util.lang.MultiClassScopeLoader; import com.pinecone.framework.util.name.Name; import java.util.List; public interface MultiTraitClassLoader extends TraitClassLoader, MultiClassScopeLoader { List > loads( Name name ) ; List > loadsByName( Name simpleName ); List > loadsInClassTrait( Name simpleName ) ; } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/lang/PooledClassCandidateScanner.java ================================================ package com.pinecone.ulf.util.lang; import com.pinecone.framework.util.lang.*; import javassist.ClassPool; import java.io.IOException; public class PooledClassCandidateScanner extends ClassCandidateScanner { protected ClassPool mClassPool; public PooledClassCandidateScanner ( ClassScope searchScope, ClassLoader classLoader, NSProtocolIteratorsFactoryAdapter iteratorsFactory, ClassPool classPool ) { super( searchScope, classLoader, iteratorsFactory ); this.mClassPool = classPool; } public PooledClassCandidateScanner ( ClassScope searchScope, ClassLoader classLoader, ClassPool classPool ) { this( searchScope, classLoader, new ClassScopeNSProtocolIteratorsFactory( classLoader, searchScope ), classPool ); } public PooledClassCandidateScanner ( ClassScope searchScope, ClassLoader classLoader ) { this( searchScope, classLoader, new ClassScopeNSProtocolIteratorsFactory( classLoader, searchScope ), ClassPool.getDefault() ); } public void setClassPool ( ClassPool classPool ) { this.mClassPool = classPool; } @Override protected boolean filter( String szClassName ) { try{ for ( TypeFilter filter : this.mIncludeFilters ) { if ( filter.match( szClassName, this.mClassPool ) ) { return false; } } for ( TypeFilter filter : this.mExcludeFilters ) { if ( filter.match( szClassName, this.mClassPool ) ) { return true; } } } catch ( IOException e ) { return true; } return false; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/lang/PreloadClassInspector.java ================================================ package com.pinecone.ulf.util.lang; import com.pinecone.framework.system.prototype.Pinenut; import javassist.CtClass; import javassist.NotFoundException; import javassist.bytecode.annotation.Annotation; public interface PreloadClassInspector extends Pinenut { CtClass preloadClass( String szClassName ) throws NotFoundException ; default boolean hasOwnAnnotation( String szClassName, Class interf ) throws NotFoundException { return this.hasOwnAnnotation( this.preloadClass( szClassName ), interf ); } Annotation[] queryVisibleAnnotations( CtClass clazz ); boolean hasOwnAnnotation( CtClass clazz, Class annotationClass ) ; default boolean hasOwnAnnotations( String szClassName, Class[] annotationClasses ) throws NotFoundException { return this.hasOwnAnnotations( this.preloadClass( szClassName ), annotationClasses ); } boolean hasOwnAnnotations( CtClass clazz, Class[] annotationClasses ); default boolean hasOwnMethod( String szClassName, String methodName ) throws NotFoundException { return this.hasOwnMethod(this.preloadClass(szClassName), methodName); } boolean hasOwnMethod( CtClass clazz, String methodName ) ; default boolean hasOwnField( String szClassName, String fieldName ) throws NotFoundException { return this.hasOwnField( this.preloadClass(szClassName), fieldName ); } boolean hasOwnField( CtClass clazz, String fieldName ) ; default boolean hasOwnMethods( String szClassName, String[] methodNames ) throws NotFoundException { return this.hasOwnMethods( this.preloadClass(szClassName), methodNames ); } boolean hasOwnMethods( CtClass clazz, String[] methodNames ) ; default boolean hasOwnFields( String szClassName, String[] fieldNames ) throws NotFoundException { return this.hasOwnFields( this.preloadClass(szClassName), fieldNames ); } boolean hasOwnFields( CtClass clazz, String[] fieldNames ) ; } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/lang/SimpleAnnotationExcludeFilter.java ================================================ package com.pinecone.ulf.util.lang; import com.pinecone.framework.util.lang.TypeFilter; import javassist.ClassPool; import javassist.CtClass; import javassist.NotFoundException; import java.io.IOException; public class SimpleAnnotationExcludeFilter implements TypeFilter { protected HierarchyClassInspector mClassInspector; protected Class mAnnotationType; public SimpleAnnotationExcludeFilter( HierarchyClassInspector inspector, Class annotationType ) { this.mClassInspector = inspector; this.mAnnotationType = annotationType; } @Override public boolean match( String szClassName, Object pool ) throws IOException { try{ CtClass clz = ( (ClassPool) pool ).get( szClassName ); return !this.mClassInspector.hasOwnAnnotation( clz, this.mAnnotationType ) ; } catch ( NotFoundException e ) { return true; } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/lang/TraitClassLoader.java ================================================ package com.pinecone.ulf.util.lang; import com.pinecone.framework.util.lang.ClassScopeLoader; import com.pinecone.framework.util.name.Name; public interface TraitClassLoader extends ClassScopeLoader { @Override Class load( Name simpleName ) throws ClassNotFoundException ; // Directly by it`s name. Class loadByName( Name simpleName ) throws ClassNotFoundException ; // Scanning class`s annotations, methods or others. Class loadInClassTrait( Name simpleName ) throws ClassNotFoundException ; TraitClassLoader updateScope(); } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/BeanProtobufDecoder.java ================================================ package com.pinecone.ulf.util.protobuf; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.pinecone.framework.system.prototype.Pinenut; public interface BeanProtobufDecoder extends Pinenut { Map decodeMap( Class clazz, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set exceptedKeys, Options options ); default Map decodeMap( Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set exceptedKeys, Options options ){ return this.decodeMap( LinkedHashMap.class, descriptor, dynamicMessage, exceptedKeys, options ); } T decodeBean( Class clazz, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set exceptedKeys, Options options ); T decode( Class clazz, String genericLabel, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set exceptedKeys, Options options ); default T decode( Class clazz, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set exceptedKeys, Options options ) { return this.decode( clazz, null, descriptor, dynamicMessage, exceptedKeys, options ); } static boolean isNullMessage( DynamicMessage dynamicMessage, Descriptors.Descriptor descriptor ) { return dynamicMessage.getAllFields().isEmpty() && !descriptor.getFields().isEmpty(); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/BeanProtobufEncoder.java ================================================ package com.pinecone.ulf.util.protobuf; import java.util.Collection; import java.util.Map; import java.util.Set; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.pinecone.framework.system.prototype.Pinenut; public interface BeanProtobufEncoder extends Pinenut { BeanProtobufEncoder DefaultEncoder = new GenericBeanProtobufEncoder(); Descriptors.Descriptor transform( Object dynamicObject, Set exceptedKeys, Options options ); Descriptors.Descriptor transform( Map dynamicObject, Set exceptedKeys, Options options ); DescriptorProtos.FieldDescriptorProto.Builder transform( Collection dynamicObject, Class elementType, String key, int fieldNumber, Options options ); //Descriptors.Descriptor transform( Object[] dynamicObject, Set exceptedKeys, Options options ); Descriptors.Descriptor transformBean( Class clazz, Object dynamicObject, Set exceptedKeys, Options options ); Descriptors.Descriptor transform( Class clazz, Object dynamicObject, Set exceptedKeys, Options options ); default Descriptors.Descriptor transform( Class clazz, Object dynamicObject, Set exceptedKeys ) { return this.transform( clazz, null, dynamicObject, exceptedKeys ); } default Descriptors.Descriptor transform( Class clazz, String componentGenericLabel, Object dynamicObject, Set exceptedKeys ) { Descriptors.Descriptor primitiveDesc = this.transformPrimitive( clazz ); if( primitiveDesc != null ) { return primitiveDesc; } Descriptors.Descriptor repeatedDesc = this.transformRepeated( clazz, componentGenericLabel ); if( repeatedDesc != null ) { return repeatedDesc; } return this.transform( clazz, dynamicObject, exceptedKeys, Options.DefaultOptions ); } default Descriptors.Descriptor transformRepeated( Class clazz, String componentGenericLabel ) { if( RepeatedWrapper.isSupportedRepeated( clazz ) ) { if ( clazz.isArray() ) { return RepeatedWrapper.transform( clazz, clazz.getComponentType(), this ); } else { Class dependenceComponentType = ProtobufUtils.loadSingleGenericType( this.getClass(), componentGenericLabel ); if ( dependenceComponentType == null ) { throw new IllegalArgumentException( "None valued argument (" + componentGenericLabel + ") can`t be transformed." ); } return RepeatedWrapper.transform( clazz, dependenceComponentType, this ); } } return null; } default Descriptors.Descriptor transformPrimitive( Class clazz ) { if( PrimitiveWrapper.isSupportedPrimitive( clazz ) ) { return PrimitiveWrapper.transform( clazz ); } return null; } DescriptorProtos.FieldDescriptorProto.Type reinterpret( Class type ); DynamicMessage encode( Descriptors.Descriptor descriptor, Object dynamicObject, Set exceptedKeys, Options options ); DynamicMessage encodeBean( Descriptors.Descriptor descriptor, Object dynamicObject, Set exceptedKeys, Options options ); DynamicMessage encode( Descriptors.Descriptor descriptor, Map dynamicObject, Set exceptedKeys, Options options ); } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/DescriptorNameNormalizer.java ================================================ package com.pinecone.ulf.util.protobuf; import com.pinecone.framework.system.prototype.Pinenut; public interface DescriptorNameNormalizer extends Pinenut { String normalize( String bad ); } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/FieldProtobufDecoder.java ================================================ package com.pinecone.ulf.util.protobuf; import java.util.Map; import java.util.Set; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.pinecone.framework.lang.field.FieldEntity; public interface FieldProtobufDecoder extends BeanProtobufDecoder { Map.Entry[] decodeEntries( Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set exceptedKeys, Options options ); void decodeEntries( FieldEntity[] entities, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set exceptedKeys, Options options ); Object[] decodeValues( FieldEntity[] entities, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set exceptedKeys, Options options ); } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/FieldProtobufEncoder.java ================================================ package com.pinecone.ulf.util.protobuf; import java.util.Map; import java.util.Set; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.pinecone.framework.lang.field.FieldEntity; public interface FieldProtobufEncoder extends BeanProtobufEncoder { Descriptors.Descriptor transform( Map.Entry[] fields, String szEntityName, Set exceptedKeys, Options options ); DynamicMessage encode( Descriptors.Descriptor descriptor, Map.Entry[] fields, Set exceptedKeys, Options options ); Descriptors.Descriptor transform( FieldEntity[] fields, String szEntityName, Set exceptedKeys, Options options ); DynamicMessage encode( Descriptors.Descriptor descriptor, FieldEntity[] fields, Set exceptedKeys, Options options ); } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/FileDescriptorFormater.java ================================================ package com.pinecone.ulf.util.protobuf; import com.pinecone.framework.system.prototype.Pinenut; public interface FileDescriptorFormater extends Pinenut { String format( Class type ); } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/GenericBeanProtobufDecoder.java ================================================ package com.pinecone.ulf.util.protobuf; import java.lang.reflect.Array; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.pinecone.framework.system.stereotype.JavaBeans; import com.pinecone.framework.unit.Units; public class GenericBeanProtobufDecoder implements BeanProtobufDecoder { @Override @SuppressWarnings( "unchecked" ) public T decode( Class clazz, String genericLabel, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set exceptedKeys, Options options ) { if( PrimitiveWrapper.isSupportedPrimitive( clazz ) ) { return (T) dynamicMessage.getField( descriptor.findFieldByName( PrimitiveWrapper.FieldName ) ); } else if( RepeatedWrapper.isSupportedRepeated( clazz ) ) { Descriptors.FieldDescriptor fieldDescriptor = descriptor.findFieldByName( RepeatedWrapper.FieldName ); Object val = dynamicMessage.getField( fieldDescriptor ); Object ret = this.decodeRepeated( val, fieldDescriptor, options, clazz, genericLabel ); return clazz.cast( ret ); } else if( Map.class.isAssignableFrom( clazz ) ) { if( clazz.isInterface() && Map.class.isAssignableFrom( clazz ) ) { clazz = options.getDefaultMapType(); } return clazz.cast( this.decodeMap( clazz, descriptor, dynamicMessage, exceptedKeys, options ) ); } return clazz.cast( this.decodeBean( clazz, descriptor, dynamicMessage, exceptedKeys, options ) ); } @Override public Map decodeMap( Class clazz, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set exceptedKeys, Options options ) { if ( descriptor == null || dynamicMessage == null ) { return null; } Map result; if( clazz.isInterface() && Map.class.isAssignableFrom( clazz ) ) { result = Units.newInstance( options.getDefaultMapType() ); } else { result = Units.newInstance( clazz ); } for ( Descriptors.FieldDescriptor fieldDescriptor : descriptor.getFields() ) { try { String fieldName = fieldDescriptor.getName(); // Skip excluded keys if ( exceptedKeys != null && exceptedKeys.contains( fieldName ) ) { continue; } Object value = ProtobufUtils.evalValue( dynamicMessage, fieldDescriptor ); if ( value != null ) { if ( fieldDescriptor.isRepeated() ) { List values = (List) value; List decodedValues = new ArrayList<>(); for ( Object item : values ) { decodedValues.add( this.decodeFieldValue( fieldDescriptor, item, item.getClass(), options ) ); } result.put( fieldName, decodedValues ); } else if ( fieldDescriptor.getType() == Descriptors.FieldDescriptor.Type.MESSAGE ) { Descriptors.Descriptor nestedDescriptor = fieldDescriptor.getMessageType(); result.put( fieldName, this.decodeMap( clazz, nestedDescriptor, (DynamicMessage) value, exceptedKeys, options ) ); } else { result.put( fieldName, this.decodeFieldValue( fieldDescriptor, value, value.getClass(), options ) ); } } } catch ( Exception e ) { e.printStackTrace(); } } return result; } @Override public T decodeBean( Class targetClass, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set exceptedKeys, Options options ) { if ( descriptor == null || dynamicMessage == null ) { return null; } else if ( BeanProtobufDecoder.isNullMessage( dynamicMessage, descriptor ) ) { return null; } try { if ( targetClass == null ) { return null; } Object bean; if( targetClass.isInterface() && Map.class.isAssignableFrom( targetClass ) ) { bean = Units.newInstance( options.getDefaultMapType() ); } else { bean = targetClass.getDeclaredConstructor().newInstance(); } for ( Descriptors.FieldDescriptor fieldDescriptor : descriptor.getFields() ) { String fieldName = fieldDescriptor.getName(); if ( exceptedKeys != null && exceptedKeys.contains( fieldName ) ) { continue; } Object value = ProtobufUtils.evalValue( dynamicMessage, fieldDescriptor ); if ( value != null ) { try { String setterMethod = JavaBeans.MethodMajorKeySet + JavaBeans.methodKeyNameUpperCaseNormalize( fieldName ); Method setter = null; try{ setter = targetClass.getMethod( setterMethod, this.decodeType( fieldDescriptor ) ); } catch ( NoSuchMethodException | SecurityException e ) { Method[] methods = targetClass.getMethods(); for( Method method : methods ) { if( method.getParameterCount() == 1 && method.getName().equals( setterMethod ) ) { setter = method; break; } } if( setter == null ){ continue; } } if ( fieldDescriptor.isRepeated() ) { Class[] pars = setter.getParameterTypes(); if( pars.length > 0 ) { Class nestedType = pars[ 0 ]; String szGType = ProtobufUtils.evalSetterGenericLabel( setter ); setter.invoke( bean, this.decodeRepeated( value, fieldDescriptor, options, nestedType, szGType ) ); } } else if ( fieldDescriptor.getType() == Descriptors.FieldDescriptor.Type.MESSAGE ) { Descriptors.Descriptor nestedDescriptor = fieldDescriptor.getMessageType(); Class[] pars = setter.getParameterTypes(); if( pars.length > 0 ) { Object nestedBean; Class nestedType = pars[ 0 ]; String szGType = ProtobufUtils.evalSetterGenericLabel( setter ); if( nestedType.equals( Map.class ) ) { nestedBean = this.decodeMap( nestedType, nestedDescriptor, (DynamicMessage) value, exceptedKeys, options ); } else { if ( descriptor.equals( nestedDescriptor ) ) { DynamicMessage dyVal =(DynamicMessage) value; if ( BeanProtobufDecoder.isNullMessage( dyVal, nestedDescriptor ) ) { nestedBean = null; } else { nestedBean = this.decode( nestedType, szGType, nestedDescriptor, (DynamicMessage) value, exceptedKeys, options ); } } else { nestedBean = this.decode( nestedType, szGType, nestedDescriptor, (DynamicMessage) value, exceptedKeys, options ); } } setter.invoke( bean, nestedBean ); } } else { Class[] pars = setter.getParameterTypes(); if( pars.length > 0 ) { Class nestedType = pars[ 0 ]; String szGType = ProtobufUtils.evalSetterGenericLabel( setter ); setter.invoke( bean, this.decodeFieldValue( fieldDescriptor, value, nestedType, szGType, options ) ); } } } catch ( IllegalAccessException | InvocationTargetException | IllegalArgumentException ignore ) { //ignore.printStackTrace(); } } } return targetClass.cast( bean ); } catch ( Exception e ) { e.printStackTrace(); return null; } } protected Object decodeFieldValue( Descriptors.FieldDescriptor fieldDescriptor, Object value, Class valueType, String genericLabel, Options options ) { switch ( fieldDescriptor.getType() ) { case BOOL: { return value; } case INT32: case SINT32: case SFIXED32: { return value; } case INT64: case SINT64: case SFIXED64: { return value; } case FLOAT: { return value; } case DOUBLE: { return value; } case STRING: { return value.toString(); } case BYTES: { return value instanceof com.google.protobuf.ByteString ? ((com.google.protobuf.ByteString) value).toByteArray() : value; } case MESSAGE: { Descriptors.Descriptor nestedDescriptor = fieldDescriptor.getMessageType(); return this.decode( valueType, genericLabel, nestedDescriptor, (DynamicMessage) value, null, options ); } default: { return value; } } } protected Object decodeFieldValue( Descriptors.FieldDescriptor fieldDescriptor, Object value, Class valueType, Options options ) { return this.decodeFieldValue( fieldDescriptor, value, valueType, null, options ); } protected Class decodeType( Descriptors.FieldDescriptor fieldDescriptor ) { switch ( fieldDescriptor.getType() ) { case BOOL: { return Boolean.class; } case INT32: case SINT32: case SFIXED32: { return Integer.class; } case INT64: case SINT64: case SFIXED64: { return Long.class; } case FLOAT: { return Float.class; } case DOUBLE: { return Double.class; } case STRING: { return String.class; } case BYTES: { return byte[].class; } case MESSAGE: { return null; } default: { throw new IllegalArgumentException( "Unsupported field type: " + fieldDescriptor.getType() ); } } } protected void setCollectionRepeated( Collection values, Collection decodedValues, String genericTypeLabel, Descriptors.FieldDescriptor fieldDescriptor, Options options ) { if ( genericTypeLabel == null ) { throw new IllegalArgumentException( "Unable to decode `genericTypeLabel` with null." ); } Class componentType = ProtobufUtils.loadSingleGenericType( this.getClass(), genericTypeLabel ); if ( componentType == null ) { throw new IllegalArgumentException( "Unable to decode `genericTypeLabel` " + genericTypeLabel + "." ); } for ( Object item : values ) { decodedValues.add( this.decodeFieldValue( fieldDescriptor, item, componentType, options ) ); } } protected Object decodeRepeated( Object value, Descriptors.FieldDescriptor fieldDescriptor, Options options, Class type, String genericTypeLabel ) { if ( type.isArray() ) { List values = (List) value; Class componentType = type.getComponentType(); Object[] ret = (Object[]) Array.newInstance( type.getComponentType(), values.size() ); int i = 0; for ( Object item : values ) { ret[ i ] = this.decodeFieldValue( fieldDescriptor, item, componentType, options ); ++i; } return ret; } else if ( Set.class.isAssignableFrom( type ) ) { List values = (List) value; Set decodedValues = new HashSet<>(); this.setCollectionRepeated( values, decodedValues, genericTypeLabel, fieldDescriptor, options ); return decodedValues; } else if ( Collection.class.isAssignableFrom( type ) ) { List values = (List) value; List decodedValues = new ArrayList<>(); this.setCollectionRepeated( values, decodedValues, genericTypeLabel, fieldDescriptor, options ); return decodedValues; } return null; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/GenericBeanProtobufEncoder.java ================================================ package com.pinecone.ulf.util.protobuf; import java.lang.reflect.Array; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.TreeMap; import com.google.protobuf.ByteString; import com.google.protobuf.Descriptors; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.DynamicMessage; import com.pinecone.framework.system.stereotype.JavaBeans; import com.pinecone.framework.util.ReflectionUtils; import com.pinecone.framework.util.StringUtils; public class GenericBeanProtobufEncoder implements BeanProtobufEncoder { @Override public Descriptors.Descriptor transform( Object dynamicObject, Set exceptedKeys, Options options ) { if ( dynamicObject == null ) { return null; } return this.transform( dynamicObject.getClass(), dynamicObject, exceptedKeys, options ); } protected DescriptorProtos.FieldDescriptorProto.Builder transformEntry( String key, Object value, Class valType, String componentGLabel,int fieldNumber, List dependencies, Set exceptedKeys, Options options, String thisKey ) { if ( valType == null ) { valType = value.getClass(); } DescriptorProtos.FieldDescriptorProto.Type fieldType = valType == null ? DescriptorProtos.FieldDescriptorProto.Type.TYPE_STRING // Default for null values : this.reinterpret( valType ); DescriptorProtos.FieldDescriptorProto.Builder fieldBuilder; Class elemType = valType; if( value != null ) { elemType = value.getClass(); } Class dependenceComponentType = null; if( Collection.class.isAssignableFrom( elemType ) ) { if ( value != null ) { Collection co = (Collection) value; if( co.isEmpty() ) { fieldType = DescriptorProtos.FieldDescriptorProto.Type.TYPE_STRING; } else { fieldType = this.reinterpret( co.iterator().next().getClass() ); } } else { if ( componentGLabel == null ) { throw new IllegalArgumentException( "None valued argument can`t be transformed." ); } dependenceComponentType = ProtobufUtils.loadSingleGenericType( this.getClass(), componentGLabel ); if ( dependenceComponentType != null ) { fieldType = this.reinterpret( dependenceComponentType ); } else { throw new IllegalArgumentException( "None valued argument (" + componentGLabel + ") can`t be transformed." ); } } fieldBuilder = DescriptorProtos.FieldDescriptorProto.newBuilder() .setName( key ) .setNumber( fieldNumber ) .setType( fieldType ) .setLabel( DescriptorProtos.FieldDescriptorProto.Label.LABEL_REPEATED ); } else if( elemType.isArray() ) { dependenceComponentType = elemType.getComponentType(); fieldType = this.reinterpret( dependenceComponentType ); fieldBuilder = DescriptorProtos.FieldDescriptorProto.newBuilder() .setName( key ) .setNumber( fieldNumber ) .setType( this.reinterpret( dependenceComponentType ) ) .setLabel( DescriptorProtos.FieldDescriptorProto.Label.LABEL_REPEATED ); } else { fieldBuilder = DescriptorProtos.FieldDescriptorProto.newBuilder() .setName( key ) .setNumber( fieldNumber ) .setType( fieldType ); } if ( fieldType == DescriptorProtos.FieldDescriptorProto.Type.TYPE_MESSAGE ) { Descriptors.Descriptor nestedDescriptor; if ( dependenceComponentType != null ) { nestedDescriptor = this.transform0( dependenceComponentType, thisKey, value, exceptedKeys, options ); } else { nestedDescriptor = this.transform0( valType, thisKey, value, exceptedKeys, options ); } if ( nestedDescriptor != null ) { fieldBuilder.setTypeName( nestedDescriptor.getFullName() ); dependencies.add( nestedDescriptor.getFile() ); } } return fieldBuilder; } protected Descriptors.Descriptor transform0( Map dynamicObject, String thisKey, Set exceptedKeys, Options options ) { if ( dynamicObject == null ) { return null; } try { String szEntityName = thisKey; DescriptorProtos.DescriptorProto.Builder descriptorBuilder = DescriptorProtos.DescriptorProto.newBuilder(); List dependencies = new ArrayList<>(); int fieldNumber = 1; for ( Object em : dynamicObject.entrySet() ) { Map.Entry entry = (Map.Entry) em; String key = entry.getKey().toString(); if ( exceptedKeys != null && exceptedKeys.contains( key ) ) { continue; } DescriptorProtos.FieldDescriptorProto.Builder fieldBuilder = this.transformEntry( key, entry.getValue(), null, null, fieldNumber, dependencies, exceptedKeys, options, szEntityName + "_" + key ); descriptorBuilder.addField( fieldBuilder ); ++fieldNumber; } descriptorBuilder.setName( szEntityName ); Descriptors.FileDescriptor fileDescriptor = this.evalMessageType( dependencies, descriptorBuilder, szEntityName, options ); return fileDescriptor.findMessageTypeByName( szEntityName ); } catch ( Descriptors.DescriptorValidationException e ) { e.printStackTrace(); return null; } } protected Descriptors.FileDescriptor evalMessageType ( List dependencies, DescriptorProtos.DescriptorProto.Builder descriptorBuilder, String szEntityName, Options options ) throws Descriptors.DescriptorValidationException { Descriptors.FileDescriptor[] dependencyArray = dependencies.toArray( new Descriptors.FileDescriptor[0] ); Descriptors.FileDescriptor fileDescriptor = Descriptors.FileDescriptor.buildFrom( DescriptorProtos.FileDescriptorProto.newBuilder() .setName( szEntityName + options.getDescriptorFileExtend() ) .addMessageType( descriptorBuilder.build() ) .build(), dependencyArray); return fileDescriptor; } @Override public Descriptors.Descriptor transform( Map dynamicObject, Set exceptedKeys, Options options ) { return this.transform0( dynamicObject, "Map_root", exceptedKeys, options ); } @Override public DescriptorProtos.FieldDescriptorProto.Builder transform( Collection dynamicObject, Class elementType, String key, int fieldNumber, Options options ) { DescriptorProtos.FieldDescriptorProto.Type fieldType = this.reinterpret( elementType ); return DescriptorProtos.FieldDescriptorProto.newBuilder() .setName( key ) .setNumber( fieldNumber ) .setType( fieldType ) .setLabel( DescriptorProtos.FieldDescriptorProto.Label.LABEL_REPEATED ); } @Override public Descriptors.Descriptor transformBean( Class clazz, Object dynamicObject, Set exceptedKeys, Options options ) { return this.transformBean0( clazz, "", dynamicObject, exceptedKeys, options ); } protected Descriptors.Descriptor transformBean0( Class clazz, String thisKey, Object dynamicObject, Set exceptedKeys, Options options ) { if ( clazz == null ) { return null; } try { String szEntityName = options.formatFileDescType( clazz ); DescriptorProtos.DescriptorProto.Builder descriptorBuilder = DescriptorProtos.DescriptorProto.newBuilder(); descriptorBuilder.setName( szEntityName ); List dependencies = new ArrayList<>(); boolean includeSuperClass = clazz.getClassLoader() != null; Method[] methods = includeSuperClass ? clazz.getMethods() : clazz.getDeclaredMethods(); Map methodOrderMap = new TreeMap<>(); // Unified methods order accessing all services. for ( int i = 0; i < methods.length; ++i ) { try { Method method = methods[i]; if ( Modifier.isPublic( method.getModifiers() ) ) { String key = JavaBeans.getGetterMethodKeyName( method ); if( !StringUtils.isEmpty( key ) ) { if ( Character.isUpperCase( key.charAt(0) ) && method.getParameterTypes().length == 0 ) { key = JavaBeans.methodKeyNameLowerCaseNormalize( key ); if( exceptedKeys != null && exceptedKeys.contains( key ) ) { continue; } methodOrderMap.put( key, method ); } } } } catch ( Exception ignore ) { ignore.printStackTrace(); // Do nothing. } } int fieldNumber = 1; for ( Map.Entry kv: methodOrderMap.entrySet() ) { try { String key = kv.getKey(); Method method = kv.getValue(); Class elemRetType = method.getReturnType(); DescriptorProtos.FieldDescriptorProto.Type fieldType = this.reinterpret( elemRetType ); DescriptorProtos.FieldDescriptorProto.Builder fieldBuilder; Class dependenceComponentType = null; if( Collection.class.isAssignableFrom( elemRetType ) ) { Type gt = method.getGenericReturnType(); String[] genericTypeNames = ReflectionUtils.extractGenericClassNames( gt.getTypeName() ); if( genericTypeNames != null && genericTypeNames.length > 0 ) { String genericTypeName = genericTypeNames[ 0 ]; if( !genericTypeName.equals( "?" ) && !genericTypeName.equals( Object.class.getSimpleName() ) ) { try { dependenceComponentType = this.getClass().getClassLoader().loadClass( genericTypeName ); fieldType = this.reinterpret( dependenceComponentType ); } catch ( ClassNotFoundException e ) { continue; } } } fieldBuilder = DescriptorProtos.FieldDescriptorProto.newBuilder() .setName( key ) .setNumber( fieldNumber ) .setType( fieldType ) .setLabel( DescriptorProtos.FieldDescriptorProto.Label.LABEL_REPEATED ); } else if( elemRetType.isArray() && !byte[].class.isAssignableFrom( elemRetType ) ) { Class componentType = elemRetType.getComponentType(); fieldType = this.reinterpret( componentType ); dependenceComponentType = componentType; fieldBuilder = DescriptorProtos.FieldDescriptorProto.newBuilder() .setName( key ) .setNumber( fieldNumber ) .setType( this.reinterpret( componentType ) ) .setLabel( DescriptorProtos.FieldDescriptorProto.Label.LABEL_REPEATED ); } else { fieldBuilder = DescriptorProtos.FieldDescriptorProto.newBuilder() .setName( key ) .setNumber( fieldNumber ) .setType( fieldType ); } fieldNumber++; if ( fieldType == DescriptorProtos.FieldDescriptorProto.Type.TYPE_MESSAGE ) { Class nestedClass = method.getReturnType(); Object dyChild = null; if ( dynamicObject != null ) { try { method.setAccessible( true ); dyChild = method.invoke( dynamicObject ); } catch ( IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) { dyChild = null; } } if ( !clazz.equals( nestedClass ) ) { Descriptors.Descriptor nestedDescriptor; if ( dependenceComponentType != null ) { // Array / List can`t uses dynamic object. nestedDescriptor = this.transform0( dependenceComponentType, szEntityName + "_" + key, null, exceptedKeys, options ); } else { nestedDescriptor = this.transform0( nestedClass, szEntityName + "_" + key, dyChild, exceptedKeys, options ); } if( nestedDescriptor == null ) { continue; } fieldBuilder.setTypeName( nestedDescriptor.getFullName() ); dependencies.add( nestedDescriptor.getFile() ); } else { fieldBuilder.setTypeName( szEntityName ); } } descriptorBuilder.addField( fieldBuilder ); } catch ( Exception e ) { throw new ProtobufEncodeException( e ); } } Descriptors.FileDescriptor fileDescriptor = this.evalMessageType( dependencies, descriptorBuilder, szEntityName, options ); return fileDescriptor.findMessageTypeByName( szEntityName ); } catch ( Descriptors.DescriptorValidationException e ) { e.printStackTrace(); return null; } } @Override public Descriptors.Descriptor transform( Class clazz, Object dynamicObject, Set exceptedKeys, Options options ) { if( dynamicObject instanceof Map ) { return this.transform( (Map) dynamicObject, exceptedKeys, options ); } return this.transformBean( clazz, dynamicObject, exceptedKeys, options ); } protected Descriptors.Descriptor transform0( Class clazz, String thisKey, Object dynamicObject, Set exceptedKeys, Options options ) { if( dynamicObject instanceof Map ) { return this.transform0( (Map) dynamicObject, thisKey, exceptedKeys, options ); } return this.transformBean0( clazz, thisKey, dynamicObject, exceptedKeys, options ); } @Override public DescriptorProtos.FieldDescriptorProto.Type reinterpret( Class type ) { if ( type == int.class || type == Integer.class ) { return DescriptorProtos.FieldDescriptorProto.Type.TYPE_INT32; } else if ( type == long.class || type == Long.class ) { return DescriptorProtos.FieldDescriptorProto.Type.TYPE_INT64; } else if ( type == float.class || type == Float.class ) { return DescriptorProtos.FieldDescriptorProto.Type.TYPE_FLOAT; } else if ( type == double.class || type == Double.class ) { return DescriptorProtos.FieldDescriptorProto.Type.TYPE_DOUBLE; } else if ( type == String.class ) { return DescriptorProtos.FieldDescriptorProto.Type.TYPE_STRING; } else if ( type == boolean.class || type == Boolean.class ) { return DescriptorProtos.FieldDescriptorProto.Type.TYPE_BOOL; } else if ( type == byte[].class ) { return DescriptorProtos.FieldDescriptorProto.Type.TYPE_BYTES; } else if ( type == short.class || type == Short.class ) { return DescriptorProtos.FieldDescriptorProto.Type.TYPE_INT32; } else if ( type == byte.class || type == Byte.class ) { return DescriptorProtos.FieldDescriptorProto.Type.TYPE_INT32; } else { return DescriptorProtos.FieldDescriptorProto.Type.TYPE_MESSAGE; } } @Override public DynamicMessage encode( Descriptors.Descriptor descriptor, Object dynamicObject, Set exceptedKeys, Options options ) { if( PrimitiveWrapper.isSupportedPrimitive( dynamicObject.getClass() ) ) { dynamicObject = PrimitiveWrapper.wrap( dynamicObject ); } else if( RepeatedWrapper.isSupportedRepeated( dynamicObject.getClass() ) ) { dynamicObject = RepeatedWrapper.wrap( dynamicObject ); } else if( dynamicObject instanceof Map ) { return this.encode( descriptor, (Map) dynamicObject, exceptedKeys, options ); } return this.encodeBean( descriptor, dynamicObject, exceptedKeys, options ); } @Override public DynamicMessage encodeBean( Descriptors.Descriptor descriptor, Object dynamicObject, Set exceptedKeys, Options options ) { if ( descriptor == null || dynamicObject == null ) { return null; } DynamicMessage.Builder messageBuilder = DynamicMessage.newBuilder( descriptor ); for ( Descriptors.FieldDescriptor fieldDescriptor : descriptor.getFields() ) { try { String fieldName = fieldDescriptor.getName(); // Skip excluded keys if ( exceptedKeys != null && exceptedKeys.contains( fieldName ) ) { continue; } try { String szGetterMethod = JavaBeans.MethodMajorKeyGet + JavaBeans.methodKeyNameUpperCaseNormalize( fieldName ); Method getter ; try { getter = dynamicObject.getClass().getMethod( szGetterMethod ); } catch ( NoSuchMethodException e ) { getter = null; } if ( getter == null && fieldDescriptor.getType() == Descriptors.FieldDescriptor.Type.BOOL ) { szGetterMethod = JavaBeans.MethodMajorKeyIs + JavaBeans.methodKeyNameUpperCaseNormalize( fieldName ); getter = dynamicObject.getClass().getMethod( szGetterMethod ); } if ( getter != null ) { Object value = getter.invoke( dynamicObject ); if ( value != null ) { if ( fieldDescriptor.isRepeated() ) { if ( value instanceof Collection ) { Collection collection = (Collection) value; if ( !collection.isEmpty() ) { Class componentType = collection.iterator().next().getClass(); if ( componentType.isPrimitive() ) { for ( Object item : (Collection) value ) { messageBuilder.addRepeatedField( fieldDescriptor, this.reinterpretFieldValue( item, fieldDescriptor.getType() ) ); } } else { if ( fieldDescriptor.getType() == Descriptors.FieldDescriptor.Type.MESSAGE ) { Descriptors.Descriptor componentDesc = fieldDescriptor.getMessageType(); for ( Object item : collection ) { DynamicMessage dynamicMessage = this.encode( componentDesc, item, exceptedKeys, options ); messageBuilder.addRepeatedField( fieldDescriptor, dynamicMessage ); } } else { for ( Object item : collection ) { messageBuilder.addRepeatedField( fieldDescriptor, this.reinterpretFieldValue( item, fieldDescriptor.getType() ) ); } } } } } else if ( value.getClass().isArray() ) { Class componentType = value.getClass().getComponentType(); if ( componentType.isPrimitive() ) { int length = Array.getLength( value ); for ( int i = 0; i < length; ++i ) { Object element = Array.get( value, i ); messageBuilder.addRepeatedField( fieldDescriptor, this.reinterpretFieldValue(element, fieldDescriptor.getType()) ); } } else { if ( fieldDescriptor.getType() == Descriptors.FieldDescriptor.Type.MESSAGE ) { Descriptors.Descriptor componentDesc = fieldDescriptor.getMessageType(); for ( Object item : (Object[]) value ) { DynamicMessage dynamicMessage = this.encode( componentDesc, item, exceptedKeys, options ); messageBuilder.addRepeatedField( fieldDescriptor, dynamicMessage ); } } else { for ( Object item : (Object[]) value ) { messageBuilder.addRepeatedField( fieldDescriptor, this.reinterpretFieldValue( item, fieldDescriptor.getType() ) ); } } } } else { throw new IllegalArgumentException( "Expected a Collection for repeated field: " + fieldName ); } } else { this.encodeElement( fieldDescriptor, messageBuilder, value, exceptedKeys, options ); } } } } catch ( IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException ignore ) { //ignore.printStackTrace(); } } catch ( Exception e ) { // Log and continue processing other fields e.printStackTrace(); } } return messageBuilder.build(); } protected void encodeElement( Descriptors.FieldDescriptor fieldDescriptor, DynamicMessage.Builder messageBuilder, Object value, Set exceptedKeys, Options options ) { if ( fieldDescriptor.getType() == Descriptors.FieldDescriptor.Type.MESSAGE ) { Descriptors.Descriptor nestedDescriptor = fieldDescriptor.getMessageType(); messageBuilder.setField( fieldDescriptor, this.encode( nestedDescriptor, value, exceptedKeys, options ) ); } else { messageBuilder.setField( fieldDescriptor, this.reinterpretFieldValue( value, fieldDescriptor.getType() ) ); } } @Override public DynamicMessage encode( Descriptors.Descriptor descriptor, Map dynamicObject, Set exceptedKeys, Options options ) { if ( descriptor == null || dynamicObject == null ) { return null; } try { DynamicMessage.Builder messageBuilder = DynamicMessage.newBuilder( descriptor ); for ( Object em : dynamicObject.entrySet() ) { Map.Entry entry = (Map.Entry) em; this.encodeEntry( descriptor, entry.getKey().toString(), entry.getValue(), messageBuilder, exceptedKeys, options ); } return messageBuilder.build(); } catch ( Exception e ) { e.printStackTrace(); return null; } } protected Object encodeRepeatedValue ( Descriptors.FieldDescriptor fieldDescriptor, Object val, Set exceptedKeys, Options options ) { if ( fieldDescriptor.getType() == Descriptors.FieldDescriptor.Type.MESSAGE ) { Descriptors.Descriptor componentDesc = fieldDescriptor.getMessageType(); return this.encode( componentDesc, val, exceptedKeys, options ); } else { return this.reinterpretFieldValue( val, fieldDescriptor.getType() ); } } public void encodeEntry( Descriptors.Descriptor descriptor, String key, Object value, DynamicMessage.Builder messageBuilder, Set exceptedKeys, Options options ) { if ( exceptedKeys != null && exceptedKeys.contains( key ) ) { return; } Descriptors.FieldDescriptor fieldDescriptor = descriptor.findFieldByName( key ); if ( fieldDescriptor == null ) { return; } if ( value == null ) { if ( fieldDescriptor.isRepeated() ) { messageBuilder.setField( fieldDescriptor, List.of() ); } else { messageBuilder.clearField( fieldDescriptor ); } } else if ( fieldDescriptor.isRepeated() ) { List values = new ArrayList<>(); if ( value instanceof Collection ) { for ( Object item : (Collection) value ) { values.add( this.encodeRepeatedValue( fieldDescriptor, item, exceptedKeys, options ) ); } } else if ( value.getClass().isArray() ) { for ( int i = 0; i < Array.getLength( value ); i++ ) { values.add( this.encodeRepeatedValue( fieldDescriptor, Array.get( value, i ), exceptedKeys, options ) ); } } messageBuilder.setField( fieldDescriptor, values ); } else { this.encodeElement( fieldDescriptor, messageBuilder, value, exceptedKeys, options ); } } protected Object reinterpretFieldValue( Object value, Descriptors.FieldDescriptor.Type fieldType ) { switch ( fieldType ) { case SINT32: case SFIXED32: case INT32: { return ((Number) value).intValue(); } case INT64: case SINT64: case SFIXED64: { return ((Number) value).longValue(); } case FLOAT: { return ((Number) value).floatValue(); } case DOUBLE: { return ((Number) value).doubleValue(); } case STRING: { return value.toString(); } case BOOL: { return (Boolean) value; } case BYTES: { return ByteString.copyFrom( (byte[]) value ); } default: { return value; } } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/GenericFieldProtobufDecoder.java ================================================ package com.pinecone.ulf.util.protobuf; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.pinecone.framework.lang.field.FieldEntity; import com.pinecone.framework.unit.KeyValue; public class GenericFieldProtobufDecoder extends GenericBeanProtobufDecoder implements FieldProtobufDecoder { @Override public Map.Entry[] decodeEntries( Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set exceptedKeys, Options options ) { if ( descriptor == null || dynamicMessage == null ) { return null; } List fieldDescriptors = descriptor.getFields(); @SuppressWarnings( "unchecked" ) Map.Entry[] result = new Map.Entry[ fieldDescriptors.size() ]; int i = 0; for ( Descriptors.FieldDescriptor fieldDescriptor : descriptor.getFields() ) { try { String fieldName = fieldDescriptor.getName(); // Skip excluded keys if ( exceptedKeys != null && exceptedKeys.contains( fieldName ) ) { continue; } Object value = ProtobufUtils.evalValue( dynamicMessage, fieldDescriptor ); if ( value != null ) { if ( fieldDescriptor.isRepeated() ) { List values = (List) value; List decodedValues = new ArrayList<>(); for ( Object item : values ) { decodedValues.add( this.decodeFieldValue( fieldDescriptor, item, item.getClass(), options ) ); } result[ i ] = new KeyValue<>( fieldName, decodedValues ); } else if ( fieldDescriptor.getType() == Descriptors.FieldDescriptor.Type.MESSAGE ) { Descriptors.Descriptor nestedDescriptor = fieldDescriptor.getMessageType(); result[ i ] = new KeyValue<>( fieldName, this.decodeMap( LinkedHashMap.class, nestedDescriptor, (DynamicMessage) value, exceptedKeys, options ) ); } else { result[ i ] = new KeyValue<>( fieldName, this.decodeFieldValue( fieldDescriptor, value, value.getClass(), options ) ); } } ++i; } catch ( Exception e ) { e.printStackTrace(); } } return result; } @Override public void decodeEntries( FieldEntity[] entities, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set exceptedKeys, Options options ) { this.decodeEntries0( entities, descriptor, dynamicMessage, exceptedKeys, options, false ); } @Override public Object[] decodeValues( FieldEntity[] entities, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set exceptedKeys, Options options ) { return this.decodeEntries0( entities, descriptor, dynamicMessage, exceptedKeys, options, true ); } protected Object[] decodeEntries0( FieldEntity[] entities, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set exceptedKeys, Options options, boolean bEvalValue ) { if ( descriptor == null || dynamicMessage == null || entities == null ) { return null; } try { int i = 0; Object[] vals = null; if( bEvalValue ) { vals = new Object[ entities.length ]; } for ( Descriptors.FieldDescriptor fieldDescriptor : descriptor.getFields() ) { String fieldName = fieldDescriptor.getName(); if ( exceptedKeys != null && exceptedKeys.contains( fieldName ) ) { continue; } Object value = ProtobufUtils.evalValue( dynamicMessage, fieldDescriptor ); if ( value != null ) { FieldEntity entity = entities[ i ]; if ( fieldDescriptor.isRepeated() ) { Object decodedValues = this.decodeRepeated( value, fieldDescriptor, options, entity.getType(), entity.getGenericTypeLabel() ); entity.setValue( decodedValues ); } else if ( fieldDescriptor.getType() == Descriptors.FieldDescriptor.Type.MESSAGE ) { Descriptors.Descriptor nestedDescriptor = fieldDescriptor.getMessageType(); Object nestedBean; Class nestedType = entity.getType(); if( Map.class.isAssignableFrom( nestedType ) ) { if( nestedType.isInterface() && Map.class.isAssignableFrom( nestedType ) ) { nestedType = options.getDefaultMapType(); } nestedBean = this.decodeMap( nestedType, nestedDescriptor, (DynamicMessage) value, exceptedKeys, options ); } else { nestedBean = this.decode( nestedType, entity.getGenericTypeLabel(), nestedDescriptor, (DynamicMessage) value, exceptedKeys, options ); } entity.setValue( nestedBean ); } else { entity.setValue( this.decodeFieldValue( fieldDescriptor, value, entity.getType(), options ) ); } if( bEvalValue ) { vals[ i ] = entity.getValue(); } } ++i; } return vals; } catch ( Exception e ) { e.printStackTrace(); return null; } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/GenericFieldProtobufEncoder.java ================================================ package com.pinecone.ulf.util.protobuf; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.pinecone.framework.lang.field.FieldEntity; public class GenericFieldProtobufEncoder extends GenericBeanProtobufEncoder implements FieldProtobufEncoder { @Override public Descriptors.Descriptor transform( Map.Entry[] fields, String szEntityName, Set exceptedKeys, Options options ) { if ( fields == null ) { return null; } try { DescriptorProtos.DescriptorProto.Builder descriptorBuilder = DescriptorProtos.DescriptorProto.newBuilder(); List dependencies = new ArrayList<>(); int fieldNumber = 1; for ( Map.Entry entry : fields ) { String key = entry.getKey().toString(); if ( exceptedKeys != null && exceptedKeys.contains( key ) ) { continue; } descriptorBuilder.addField( this.transformEntry( key, entry.getValue(), null, null, fieldNumber, dependencies, exceptedKeys, options, szEntityName + "_" + key ) ); ++fieldNumber; } descriptorBuilder.setName( szEntityName ); Descriptors.FileDescriptor fileDescriptor = this.evalMessageType( dependencies, descriptorBuilder, szEntityName, options ); return fileDescriptor.findMessageTypeByName( szEntityName ); } catch ( Descriptors.DescriptorValidationException e ) { e.printStackTrace(); return null; } } @Override public Descriptors.Descriptor transform( FieldEntity[] fields, String szEntityName, Set exceptedKeys, Options options ) { if ( fields == null ) { return null; } try { DescriptorProtos.DescriptorProto.Builder descriptorBuilder = DescriptorProtos.DescriptorProto.newBuilder(); List dependencies = new ArrayList<>(); int fieldNumber = 1; for ( FieldEntity entry : fields ) { String key = entry.getName(); if ( exceptedKeys != null && exceptedKeys.contains( key ) ) { continue; } descriptorBuilder.addField( this.transformEntry( key, entry.getValue(), entry.getType(), entry.getGenericTypeLabel(), fieldNumber, dependencies, exceptedKeys, options, szEntityName + "_" + key ) ); ++fieldNumber; } descriptorBuilder.setName( szEntityName ); Descriptors.FileDescriptor fileDescriptor = this.evalMessageType( dependencies, descriptorBuilder, szEntityName, options ); return fileDescriptor.findMessageTypeByName( szEntityName ); } catch ( Descriptors.DescriptorValidationException e ) { e.printStackTrace(); return null; } } @Override public DynamicMessage encode( Descriptors.Descriptor descriptor, Map.Entry[] fields, Set exceptedKeys, Options options ) { if ( descriptor == null || fields == null ) { return null; } try { DynamicMessage.Builder messageBuilder = DynamicMessage.newBuilder( descriptor ); for ( Map.Entry entry : fields ) { this.encodeEntry( descriptor, entry.getKey().toString(), entry.getValue(), messageBuilder, exceptedKeys, options ); } return messageBuilder.build(); } catch ( Exception e ) { e.printStackTrace(); return null; } } @Override public DynamicMessage encode( Descriptors.Descriptor descriptor, FieldEntity[] fields, Set exceptedKeys, Options options ) { if ( descriptor == null || fields == null ) { return null; } try { DynamicMessage.Builder messageBuilder = DynamicMessage.newBuilder( descriptor ); for ( FieldEntity entry : fields ) { this.encodeEntry( descriptor, entry.getName(), entry.getValue(), messageBuilder, exceptedKeys, options ); } return messageBuilder.build(); } catch ( Exception e ) { e.printStackTrace(); return null; } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/Options.java ================================================ package com.pinecone.ulf.util.protobuf; import java.util.LinkedHashMap; import com.pinecone.framework.system.prototype.Pinenut; public class Options implements Pinenut { public static final FileDescriptorFormater DefaultFileDescriptorFormater = new FileDescriptorFormater() { @Override public String format( Class type ) { String neo = type.getName().replace( '.', '_' ); if( neo.startsWith( "[" ) ) { neo = neo.replace( "[", "" ); neo += WolfProtobufConstants.ArrayTransformedName; } return neo.replaceAll( "[^a-zA-Z0-9_]", "_" ); } }; public static final FileDescriptorFormater DefaultFileDescriptorSimpleFormater = new FileDescriptorFormater() { @Override public String format( Class type ) { return type.getSimpleName(); } }; public static final DescriptorNameNormalizer UnderlineDescriptorNameNormalizer = new DescriptorNameNormalizer() { @Override public String normalize( String bad ) { if ( bad == null ) { return null; } return bad.replaceAll( "[^a-zA-Z0-9_]", "_" ); } }; public static final Class DefaultMapType = LinkedHashMap.class; public static final String DescriptorFileExtend = "$File"; public static final Options DefaultOptions = new Options(); public static final Options DefaultSimpleOptions = new Options() { @Override public String formatFileDescType( Class type ) { return this.formatFileDescType( type, Options.DefaultFileDescriptorSimpleFormater ); } }; protected FileDescriptorFormater mFileDescriptorFormater; protected String mszDescriptorFileExtend; protected DescriptorNameNormalizer mDescriptorNameNormalizer; protected Class mDefaultMapType; public Options( FileDescriptorFormater formater, String szDescriptorFileExtend, Class defaultMapType ) { this.mFileDescriptorFormater = formater; this.mszDescriptorFileExtend = szDescriptorFileExtend; this.mDefaultMapType = defaultMapType; this.mDescriptorNameNormalizer = Options.UnderlineDescriptorNameNormalizer; } public Options() { this( Options.DefaultFileDescriptorFormater, Options.DescriptorFileExtend, Options.DefaultMapType ); } public String formatFileDescType( Class type, FileDescriptorFormater formater ) { return formater.format( type ); } public String formatFileDescType( Class type ) { return this.formatFileDescType( type, this.mFileDescriptorFormater ); } public String getDescriptorFileExtend() { return this.mszDescriptorFileExtend; } @SuppressWarnings( "unchecked" ) public Class getDefaultMapType() { return (Class) this.mDefaultMapType; } public void setDescriptorNameNormalizer( DescriptorNameNormalizer descriptorNameNormalizer ) { this.mDescriptorNameNormalizer = descriptorNameNormalizer; } public Options applyDescriptorNameNormalizer( DescriptorNameNormalizer descriptorNameNormalizer ) { this.mDescriptorNameNormalizer = descriptorNameNormalizer; return this; } public String normalizeDescriptorName( String szName ) { return this.mDescriptorNameNormalizer.normalize( szName ); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/PrimitiveWrapper.java ================================================ package com.pinecone.ulf.util.protobuf; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.Descriptors; import com.pinecone.framework.system.prototype.Pinenut; public class PrimitiveWrapper implements Pinenut { public final static String FieldName = "value"; private final T value; public PrimitiveWrapper( T value ) { if ( value == null || PrimitiveWrapper.isSupportedPrimitive(value) ) { this.value = value; } else { throw new IllegalArgumentException( "Unsupported primitive type: " + value.getClass() ); } } public T getValue() { return this.value; } public boolean isPrimitive() { return this.value == null || PrimitiveWrapper.isSupportedPrimitive(this.value); } public static boolean isSupportedPrimitive( Object obj ) { return PrimitiveWrapper.isSupportedPrimitive( obj.getClass() ); } public static boolean isSupportedPrimitive( Class obj ) { return obj.equals( String.class ) || !BeanProtobufEncoder.DefaultEncoder.reinterpret( obj ).equals( DescriptorProtos.FieldDescriptorProto.Type.TYPE_MESSAGE ) ; } public static PrimitiveWrapper wrap( T val ) { return new PrimitiveWrapper<>( val ); } public DescriptorProtos.FieldDescriptorProto.Type reinterpret() { return BeanProtobufEncoder.DefaultEncoder.reinterpret( this.value.getClass() ); } public Descriptors.Descriptor transform() { return PrimitiveWrapper.transform( this.value.getClass() ); } public static Descriptors.Descriptor transform( Class elemClass ) { try{ DescriptorProtos.DescriptorProto.Builder descriptorBuilder = DescriptorProtos.DescriptorProto.newBuilder(); String szEntityName = PrimitiveWrapper.class.getSimpleName() + "_" + elemClass.getSimpleName(); descriptorBuilder.setName( szEntityName ); DescriptorProtos.FieldDescriptorProto.Type fieldType = BeanProtobufEncoder.DefaultEncoder.reinterpret( elemClass ); DescriptorProtos.FieldDescriptorProto.Builder fieldBuilder = DescriptorProtos.FieldDescriptorProto.newBuilder() .setName( PrimitiveWrapper.FieldName ) .setNumber( 1 ) .setType( fieldType ); descriptorBuilder.addField( fieldBuilder ); Descriptors.FileDescriptor fileDescriptor = Descriptors.FileDescriptor.buildFrom( DescriptorProtos.FileDescriptorProto.newBuilder() .setName( szEntityName + "$FILE" ) .addMessageType( descriptorBuilder.build() ) .build(), new Descriptors.FileDescriptor[0]); return fileDescriptor.findMessageTypeByName( szEntityName ); } catch ( Descriptors.DescriptorValidationException e ) { return null; } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/ProtobufEncodeException.java ================================================ package com.pinecone.ulf.util.protobuf; import com.pinecone.framework.system.PineRuntimeException; public class ProtobufEncodeException extends PineRuntimeException { public ProtobufEncodeException () { super(); } public ProtobufEncodeException ( String message ) { super(message); } public ProtobufEncodeException ( String message, Throwable cause ) { super(message, cause); } public ProtobufEncodeException ( Throwable cause ) { super(cause); } protected ProtobufEncodeException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) { super( message, cause, enableSuppression, writableStackTrace ); } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/ProtobufUtils.java ================================================ package com.pinecone.ulf.util.protobuf; import java.lang.reflect.Method; import java.lang.reflect.Type; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.pinecone.framework.util.ReflectionUtils; public final class ProtobufUtils { public static Object evalValue(DynamicMessage dynamicMessage, Descriptors.FieldDescriptor fieldDescriptor ) { if ( !fieldDescriptor.isRepeated() ) { boolean bHasField = dynamicMessage.hasField( fieldDescriptor ); if ( !bHasField ) { return null; } } return dynamicMessage.getField( fieldDescriptor ); } static Class loadSingleGenericType( Class parent, String componentGenericLabel ) { try { String[] genericTypeNames = ReflectionUtils.extractGenericClassNames( componentGenericLabel ); if( genericTypeNames != null && genericTypeNames.length > 0 ) { String genericTypeName = genericTypeNames[ 0 ]; if( !genericTypeName.equals( "?" ) && !genericTypeName.equals( Object.class.getSimpleName() ) ) { return parent.getClassLoader().loadClass( genericTypeName ); } } } catch ( ClassNotFoundException e ) { return null; } return null; } public static String evalSetterGenericLabel( Method setter ) { Type[] gType = setter.getGenericParameterTypes(); String szGType ; if ( gType.length > 0 ) { szGType = gType[ 0 ].getTypeName(); } else { szGType = null; } return szGType; } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/RepeatedWrapper.java ================================================ package com.pinecone.ulf.util.protobuf; import java.util.Collection; import java.util.Set; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.Descriptors; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.unit.Units; import com.pinecone.framework.util.ClassUtils; public class RepeatedWrapper implements Pinenut { public final static String FieldName = "values"; private T values; private final Class componentType; public RepeatedWrapper( T value, Class componentType ) { if ( value == null || RepeatedWrapper.isSupportedRepeated(value) ) { this.values = value; this.componentType = componentType; } else { throw new IllegalArgumentException( "Unsupported repeated type: " + value.getClass() ); } } public RepeatedWrapper( T value ) { this( value, value.getClass().getComponentType() ); } public T getValues() { return this.values; } public void setValues( T values ) { this.values = values; } public Class getComponentType() { return this.componentType; } public boolean isRepeated() { return this.values == null || RepeatedWrapper.isSupportedRepeated(this.values); } public static boolean isSupportedRepeated( Object obj ) { return RepeatedWrapper.isSupportedRepeated( obj.getClass() ); } public static boolean isSupportedRepeated( Class obj ) { return obj.isArray() || Collection.class.isAssignableFrom( obj ) || !BeanProtobufEncoder.DefaultEncoder.reinterpret( obj ).equals( DescriptorProtos.FieldDescriptorProto.Type.TYPE_MESSAGE ) ; } public static RepeatedWrapper wrap( T val ) { return new RepeatedWrapper<>( val ); } public static RepeatedWrapper wrap( T val, Class componentType ) { return new RepeatedWrapper<>( val, componentType ); } public Descriptors.Descriptor transform( BeanProtobufEncoder encoder ) { return RepeatedWrapper.transform( this.values.getClass(), this.componentType, encoder ); } public static Descriptors.Descriptor transform( Class elemClass, Class componentType, BeanProtobufEncoder encoder ) { try{ DescriptorProtos.DescriptorProto.Builder descriptorBuilder = DescriptorProtos.DescriptorProto.newBuilder(); String szEntityName = RepeatedWrapper.class.getSimpleName() + "_" + elemClass.getSimpleName(); szEntityName = szEntityName.replace( ClassUtils.ARRAY_SUFFIX, WolfProtobufConstants.ArrayTransformedName ); descriptorBuilder.setName( szEntityName ); DescriptorProtos.FieldDescriptorProto.Type fieldType = BeanProtobufEncoder.DefaultEncoder.reinterpret( componentType ); DescriptorProtos.FieldDescriptorProto.Builder fieldBuilder = DescriptorProtos.FieldDescriptorProto.newBuilder() .setName( RepeatedWrapper.FieldName ) .setNumber( 1 ) .setType( fieldType ) .setLabel( DescriptorProtos.FieldDescriptorProto.Label.LABEL_REPEATED ); Descriptors.FileDescriptor[] objectDep = new Descriptors.FileDescriptor[0]; if ( fieldType == DescriptorProtos.FieldDescriptorProto.Type.TYPE_MESSAGE ) { Descriptors.Descriptor nestedDescriptor = encoder.transform( componentType, null,null, Units.emptySet() ); if ( nestedDescriptor != null ) { fieldBuilder.setTypeName( nestedDescriptor.getFullName() ); objectDep = new Descriptors.FileDescriptor[] { nestedDescriptor.getFile() }; } } descriptorBuilder.addField( fieldBuilder ); Descriptors.FileDescriptor fileDescriptor = Descriptors.FileDescriptor.buildFrom( DescriptorProtos.FileDescriptorProto.newBuilder() .setName( szEntityName + "$REPEATED_FILE" ) .addMessageType( descriptorBuilder.build() ) .build(), objectDep); return fileDescriptor.findMessageTypeByName( szEntityName ); } catch ( Descriptors.DescriptorValidationException e ) { return null; } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/WolfProtobufConstants.java ================================================ package com.pinecone.ulf.util.protobuf; public final class WolfProtobufConstants { public static final String ArrayTransformedName = "_ARRAY"; } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/template/UTRFreeMarkerProvider.java ================================================ package com.pinecone.ulf.util.template; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; import java.util.Map; import freemarker.template.Configuration; import freemarker.template.Template; import freemarker.template.TemplateException; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.system.prototype.Objectom; import com.pinecone.framework.util.template.UniformTemplateRenderer; public class UTRFreeMarkerProvider implements UniformTemplateRenderer { private Configuration configuration; public UTRFreeMarkerProvider( Configuration configuration ) { this.configuration = configuration; } public UTRFreeMarkerProvider() { this( new Configuration(Configuration.VERSION_2_3_31) ); } protected Map makeContext( Objectom vars ) { Object proto = vars.prototype().proto(); if (proto instanceof Map) { return (Map) proto; } else { return vars.toMap(); } } @Override public String render(String tpl, Objectom vars) { try ( StringWriter writer = new StringWriter() ) { this.render( tpl, vars, writer ); return writer.toString(); } catch ( IOException e ) { throw new ProxyProvokeHandleException( "Error while rendering template", e ); } } @Override public void render( String tpl, Objectom vars, Writer writer ) { try { Template template = new Template( "anonymous", tpl, this.configuration ); Map context = this.makeContext(vars); template.process( context, writer ); } catch ( TemplateException | IOException e ) { throw new ProxyProvokeHandleException( "Error while rendering template", e ); } } } ================================================ FILE: Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/template/UTRThymeleafProvider.java ================================================ package com.pinecone.ulf.util.template; import java.io.Writer; import java.util.Map; import org.thymeleaf.TemplateEngine; import org.thymeleaf.context.Context; import org.thymeleaf.templateresolver.ITemplateResolver; import org.thymeleaf.templateresolver.StringTemplateResolver; import com.pinecone.framework.system.prototype.Objectom; import com.pinecone.framework.util.template.UniformTemplateRenderer; public class UTRThymeleafProvider implements UniformTemplateRenderer { protected TemplateEngine templateEngine; public UTRThymeleafProvider( TemplateEngine engine ) { this.templateEngine = engine; } public UTRThymeleafProvider( TemplateEngine engine, ITemplateResolver resolver ) { this( engine ); this.templateEngine.setTemplateResolver( resolver ); } public UTRThymeleafProvider() { this( new TemplateEngine(), new StringTemplateResolver() ); } protected Context makeContext( Objectom vars ) { Context context = new Context(); Object proto = vars.prototype().proto(); if( proto instanceof Map ) { context.setVariables( (Map)proto ); } else { context.setVariables( vars.toMap() ); } return context; } @Override public String render( String tpl, Objectom vars ) { Context context = this.makeContext( vars ); return this.templateEngine.process( tpl, context ); } @Override public void render( String tpl, Objectom vars, Writer writer ) { Context context = this.makeContext( vars ); this.templateEngine.process( tpl, context, writer ); } } ================================================ FILE: Pinecones/Ulfhedinn/src/test/java/com/TestBson.java ================================================ package com; import com.pinecone.Pinecone; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.ulf.util.bson.UlfJSONCompiler; import com.pinecone.ulf.util.bson.UlfJSONDecompiler; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.InputStream; import java.io.OutputStream; public class TestBson { public static void testCompiler() throws Exception { UlfJSONCompiler jsonCompiler = new UlfJSONCompiler(); JSONObject object = new JSONMaptron( "{ key:'ssss jesus christ, hahahaha', int64:64, float64:3.1415926, bool: false, 'null': null, next: { arr: ['ha', 'xi', { k: true, a: [], obj:{} }] } }" ); try( OutputStream os = new FileOutputStream( "E:/test.bson" ) ){ jsonCompiler.compile( object, os ); } } public static void testDecompiler() throws Exception { InputStream is = new FileInputStream( "E:/test.bson" ); UlfJSONDecompiler decompiler = new UlfJSONDecompiler( is ); Object jo = decompiler.nextValue(); Debug.trace( jo ); } public static void main( String[] args ) throws Exception { //String szJson = FileUtils.readAll("J:/120KWordsPhonetics.json5"); Pinecone.init( (Object...cfg )->{ //TestBson.testCompiler(); TestBson.testDecompiler(); return 0; }, (Object[]) args ); } } ================================================ FILE: Pinecones/Ulfhedinn/src/test/java/com/UTRTests.java ================================================ package com; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.template.UniformTemplateRenderer; import com.pinecone.ulf.util.template.UTRFreeMarkerProvider; import com.pinecone.ulf.util.template.UTRThymeleafProvider; import org.junit.jupiter.api.Test; import org.thymeleaf.TemplateEngine; import org.thymeleaf.context.Context; import org.thymeleaf.templateresolver.StringTemplateResolver; import java.util.HashMap; import java.util.Map; import freemarker.template.Configuration; class DummyBean { private String key1; public String getKey1() { return this.key1; } public void setKey1( String key1 ) { this.key1 = key1; } } public class UTRTests { @Test void testThymeleaf() { UniformTemplateRenderer renderer = new UTRThymeleafProvider(); // Map variables = new HashMap<>(); // variables.put("key1", "Test"); DummyBean variables = new DummyBean(); variables.setKey1( "BeanTest" ); String jsonTemplate = "{ \"name\": \"[(${key1})]\" }"; Debug.trace( renderer.render( jsonTemplate, variables ) ); } @Test void testAlmondUTR() { UniformTemplateRenderer renderer = UniformTemplateRenderer.DefaultRenderer; Map variables = new HashMap<>(); variables.put("key1", "Test"); Debug.trace( renderer.render( "{ \"name\": \"${key1}\" }", variables ) ); } @Test void testFreemarker() { Configuration cfg = new Configuration( Configuration.VERSION_2_3_31 ); cfg.setDefaultEncoding("UTF-8"); UTRFreeMarkerProvider provider = new UTRFreeMarkerProvider( cfg ); Map variables = new JSONMaptron( "{ name: test, age: 30, more: { key: 1.364 } }" ); String templateContent = "{ \"name\": \"${name}\", \"age\": ${age}, \"more.key\": ${more.key} }"; String result = provider.render(templateContent, variables); Debug.echo( result ); } } ================================================ FILE: Pinecones/pom.xml ================================================ sauron com.sauron 1.2.7 4.0.0 com.pinecones pinecones pom 2.5.1 pinecone ulfhedinn slime jelly summer springram ulf-lib-construction ulf-lib-oltp-rdb org.springframework.boot spring-boot-starter-web ================================================ FILE: Pinecones/ulf-lib-construction/pom.xml ================================================ pinecones com.pinecones 2.5.1 4.0.0 com.pinecone.ulf ulf-lib-construction 1.2.1 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 provided org.springframework spring-core 5.3.29 org.springframework spring-beans 5.3.27 ================================================ FILE: Pinecones/ulf-lib-construction/src/main/java/com/pinecone/ulf/beans/aop/UlfurEnableAspectProxy.java ================================================ package com.pinecone.ulf.beans.aop; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.EnableAspectJAutoProxy; @Configuration @EnableAspectJAutoProxy public class UlfurEnableAspectProxy { } ================================================ FILE: Pinecones/ulf-lib-construction/src/main/java/com/pinecone/ulf/beans/construction/StructureAnnotationConfiguration.java ================================================ package com.pinecone.ulf.beans.construction; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @Configuration public class StructureAnnotationConfiguration { @Bean public StructureAnnotationProcessor structureAnnotationProcessor( ConfigurableListableBeanFactory beanFactory ) { return new StructureAnnotationProcessor( beanFactory ); } } ================================================ FILE: Pinecones/ulf-lib-construction/src/main/java/com/pinecone/ulf/beans/construction/StructureAnnotationProcessor.java ================================================ package com.pinecone.ulf.beans.construction; import org.springframework.beans.BeansException; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.beans.factory.config.InstantiationAwareBeanPostProcessor; import java.lang.reflect.Field; import java.lang.reflect.Method; import com.pinecone.framework.system.construction.ReuseCycle; import com.pinecone.framework.system.construction.Structure; import com.pinecone.framework.util.ReflectionUtils; public class StructureAnnotationProcessor implements InstantiationAwareBeanPostProcessor { private final ConfigurableListableBeanFactory beanFactory; public StructureAnnotationProcessor(ConfigurableListableBeanFactory beanFactory) { this.beanFactory = beanFactory; } @Override public boolean postProcessAfterInstantiation(Object bean, String beanName) throws BeansException { Class clazz = bean.getClass(); ReflectionUtils.doWithFields(clazz, field -> { if ( field.isAnnotationPresent(Structure.class) ) { handleStructureField(field, bean); } }); ReflectionUtils.doWithMethods(clazz, method -> { if ( method.isAnnotationPresent(Structure.class) ) { handleStructureMethod(method, bean); } }); return true; } private void handleStructureField(Field field, Object bean) throws IllegalAccessException { Structure structure = field.getAnnotation(Structure.class); Object dependency = resolveDependency(structure, field.getType()); field.setAccessible(true); field.set(bean, dependency); } private void handleStructureMethod(Method method, Object bean) { Structure structure = method.getAnnotation(Structure.class); Object dependency = resolveDependency(structure, method.getParameterTypes()[0]); ReflectionUtils.invokeMethod(method, bean, dependency); } private Object resolveDependency(Structure structure, Class type) { String beanName = structure.name(); Object dependency; if ( !beanName.isEmpty() ) { dependency = this.beanFactory.getBean(beanName); } else if ( structure.cycle() == ReuseCycle.Singleton || structure.cycle() == ReuseCycle.PreSingleton ) { dependency = this.beanFactory.getBean(type); } else if ( structure.cycle() == ReuseCycle.Disposable || structure.cycle() == ReuseCycle.Recyclable ) { dependency = this.beanFactory.createBean(type); } else { throw new UnsupportedOperationException( "Unsupported reuse cycle: " + structure.cycle() ); } return dependency; } } ================================================ FILE: Pinecones/ulf-lib-construction/src/main/java/com/pinecone/ulf/beans/construction/UlfInstanceManufacturer.java ================================================ package com.pinecone.ulf.beans.construction; import org.springframework.context.ConfigurableApplicationContext; import com.pinecone.framework.system.construction.InstanceManufacturer; public interface UlfInstanceManufacturer extends InstanceManufacturer { ConfigurableApplicationContext getApplicationContext(); } ================================================ FILE: Pinecones/ulf-lib-construction/src/main/java/com/pinecone/ulf/beans/construction/UlfurInstanceManufacturer.java ================================================ package com.pinecone.ulf.beans.construction; import java.util.ArrayList; import java.util.List; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.beans.factory.support.DefaultListableBeanFactory; import org.springframework.beans.factory.support.GenericBeanDefinition; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Scope; import com.pinecone.framework.system.construction.InstanceManufacturer; import com.pinecone.framework.util.Assert; public class UlfurInstanceManufacturer implements UlfInstanceManufacturer { private final ConfigurableApplicationContext context; public UlfurInstanceManufacturer() { this( new AnnotationConfigApplicationContext() ); } public UlfurInstanceManufacturer( ConfigurableApplicationContext context ) { this.context = context; } public UlfurInstanceManufacturer( Class... componentClasses ) { this(); for( Class cc : componentClasses ) { this.onlyRegister( cc ); } } @Override public InstanceManufacturer registerInstancing( Class type, Object instance ) { ConfigurableListableBeanFactory beanFactory = this.context.getBeanFactory(); beanFactory.registerSingleton( type.getName(), this.allotInstance( type ) ); this.context.refresh(); return this; } public void onlyRegister( Class type ) { DefaultListableBeanFactory beanFactory = (DefaultListableBeanFactory) this.context.getBeanFactory(); GenericBeanDefinition beanDefinition = new GenericBeanDefinition(); beanDefinition.setBeanClass( type ); Scope scope = type.getAnnotation( Scope.class ); if ( scope != null ) { beanDefinition.setScope( scope.value() ); } else { beanDefinition.setScope( BeanDefinition.SCOPE_SINGLETON ); } beanFactory.registerBeanDefinition( type.getName(), beanDefinition ); } @Override public InstanceManufacturer register( Class type ) { this.onlyRegister( type ); this.refresh(); return this; } @Override public InstanceManufacturer registers( List> types ) { for ( Class type : types ) { this.onlyRegister( type ); } this.refresh(); return this; } @Override public boolean hasRegistered( Class type ) { return this.context.containsBeanDefinition(type.getName()) || this.context.containsBean(type.getName()); } @Override public List> fetchRegistered() { List > registeredClasses = new ArrayList<>(); String[] beanNames = this.context.getBeanFactory().getBeanDefinitionNames(); for ( String beanName : beanNames ) { BeanDefinition beanDefinition = this.context.getBeanFactory().getBeanDefinition(beanName); try { Class beanClass = Class.forName(beanDefinition.getBeanClassName()); registeredClasses.add(beanClass); } catch ( ClassNotFoundException e ) { Assert.provokeIrrationally( e ); } } return registeredClasses; } @Override public String[] fetchRegisteredNames() { return this.context.getBeanFactory().getBeanDefinitionNames(); } @Override public void free( Object instance ) { String[] beanNames = this.context.getBeanNamesForType( instance.getClass() ); DefaultListableBeanFactory beanFactory = (DefaultListableBeanFactory) this.context.getBeanFactory(); for ( String beanName : beanNames ) { beanFactory.destroySingleton( beanName ); } } @Override public void free( Class type, Object instance ) { String beanName = type.getName(); DefaultListableBeanFactory beanFactory = (DefaultListableBeanFactory) this.context.getBeanFactory(); if ( beanFactory.containsSingleton(beanName) ) { beanFactory.destroySingleton(beanName); } } @Override public Object autowire( Object that ) { this.context.getAutowireCapableBeanFactory().autowireBean( that ); return that; } @Override public Object allotInstance( String type ) { return this.context.getBean( type ); } @Override public T allotInstance( Class type ) { return this.context.getBean( type ); } @Override public void close() { this.context.close(); } @Override public void refresh() { this.context.refresh(); } @Override public ConfigurableApplicationContext getApplicationContext() { return this.context; } } ================================================ FILE: Pinecones/ulf-lib-construction/src/test/java/com/wolf/construction/CanesService.java ================================================ package com.wolf.construction; import java.util.List; import javax.annotation.Resource; import org.springframework.stereotype.Component; import com.pinecone.framework.system.construction.Structure; import com.pinecone.framework.util.Debug; import com.pinecone.ulf.beans.aop.UlfurEnableAspectProxy; import com.pinecone.ulf.beans.construction.StructureAnnotationConfiguration; import com.pinecone.ulf.beans.construction.UlfInstanceManufacturer; import com.pinecone.ulf.beans.construction.UlfurInstanceManufacturer; @Component public class CanesService { @Resource //@Structure private FoxService foxService; @Resource private HuskyService huskyService; public void test() { Debug.trace( "Husky and fox are Canidae." ); this.foxService.digging(); this.huskyService.run(); this.foxService.attack( "Kevin" ); this.huskyService.tryFoxBlade(); } public static void main( String[] args ) throws Exception { UlfInstanceManufacturer manufacturer = new UlfurInstanceManufacturer( StructureAnnotationConfiguration.class, UlfurEnableAspectProxy.class ); manufacturer.registers( List.of( FoxService.class, HuskyService.class, CanesService.class, FoxBlade.class, CanisAspect.class ) ); CanesService canes = manufacturer.allotInstance( CanesService.class ); canes.test(); } } ================================================ FILE: Pinecones/ulf-lib-construction/src/test/java/com/wolf/construction/CanisAspect.java ================================================ package com.wolf.construction; import org.aspectj.lang.annotation.Aspect; import org.aspectj.lang.annotation.Before; import org.springframework.context.annotation.EnableAspectJAutoProxy; import org.springframework.stereotype.Component; import com.pinecone.framework.util.Debug; @Aspect @Component public class CanisAspect { @Before("execution(* com.wolf.construction..*(..))") public void beforeMethod() { Debug.whitef( "We are canes." ); } } ================================================ FILE: Pinecones/ulf-lib-construction/src/test/java/com/wolf/construction/FoxBlade.java ================================================ package com.wolf.construction; import javax.annotation.Resource; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import com.pinecone.framework.util.Debug; @Component @Scope(BeanDefinition.SCOPE_PROTOTYPE) //@Scope(BeanDefinition.SCOPE_SINGLETON) public class FoxBlade { @Resource private FoxService foxService; public void attack() { Debug.bluef( this.foxService.getName() + " the fox-paladin who is using fox-blade(" + this.hashCode() + ") to attack." ); } public void trying() { Debug.redf( "This fox-blade(" + this.hashCode() + ") is for fox only." ); } } ================================================ FILE: Pinecones/ulf-lib-construction/src/test/java/com/wolf/construction/FoxService.java ================================================ package com.wolf.construction; import javax.annotation.Resource; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import com.pinecone.framework.util.Debug; @Component @Scope("singleton") public class FoxService { @Resource private FoxBlade foxBlade; public void digging() { Debug.trace( "Fox is digging!" ); } public String getName() { return "Donovan"; } public void attack( String target ) { Debug.redf( "Preparing attack." ); this.foxBlade.attack(); Debug.greenf( "And " + target + " is dead." ); } } ================================================ FILE: Pinecones/ulf-lib-construction/src/test/java/com/wolf/construction/HuskyService.java ================================================ package com.wolf.construction; import javax.annotation.Resource; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import com.pinecone.framework.util.Debug; @Component @Scope("singleton") public class HuskyService { @Resource private FoxBlade foxBlade; public void run() { Debug.trace( "Husky is running!" ); } public void tryFoxBlade() { this.foxBlade.trying(); } } ================================================ FILE: Pinecones/ulf-lib-oltp-rdb/pom.xml ================================================ pinecones com.pinecones 2.5.1 4.0.0 com.pinecone.ulf ulf-lib-oltp-rdb 1.2.1 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile mysql mysql-connector-java 8.0.26 org.xerial sqlite-jdbc 3.46.1.0 ================================================ FILE: Pinecones/ulf-lib-oltp-rdb/src/main/java/com/pinecone/ulf/rdb/mysql/MySQLExecutor.java ================================================ package com.pinecone.ulf.rdb.mysql; import com.pinecone.framework.util.rdb.ArchRDBExecutor; import com.pinecone.framework.util.rdb.RDBHost; public class MySQLExecutor extends ArchRDBExecutor { public MySQLExecutor( RDBHost rdbHost ) { super( rdbHost ); } } ================================================ FILE: Pinecones/ulf-lib-oltp-rdb/src/main/java/com/pinecone/ulf/rdb/mysql/MySQLHost.java ================================================ package com.pinecone.ulf.rdb.mysql; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.rdb.RDBHost; import java.sql.*; public class MySQLHost implements RDBHost { protected String mszLocation; protected String mszUsername; protected String mszPassword; protected String mszCharset; protected String mszDriver; protected Connection mGlobalConnection; public MySQLHost( String dbLocation, String dbUsername, String dbPassword ) throws SQLException { this( dbLocation, dbUsername, dbPassword, "UTF-8" ); } public MySQLHost( String dbLocation, String dbUsername, String dbPassword, String dbCharset ) throws SQLException { this( dbLocation, dbUsername, dbPassword, dbCharset, "com.mysql.jdbc.Driver" ); } public MySQLHost( String dbLocation, String dbUsername, String dbPassword, String dbCharset, String driver ) throws SQLException { this.mszLocation = dbLocation ; this.mszUsername = dbUsername ; this.mszPassword = dbPassword ; this.mszCharset = dbCharset ; this.mszDriver = driver ; this.connect(); } @Override public boolean isClosed() { if( this.mGlobalConnection == null ) { return true; } try { return this.mGlobalConnection.isClosed(); } catch ( SQLException e ) { Debug.cerr( e ); return false; } } @Override public void connect() throws SQLException { try{ Class.forName( this.mszDriver ); } catch ( ClassNotFoundException e ){ throw new SQLException( "JDBC Driver is not found.", "CLASS_NOT_FOUND", e ); } String url = this.mszLocation; if ( !this.mszLocation.startsWith( "jdbc:" ) ) { url = "jdbc:mysql://" + this.mszLocation + "?characterEncoding="+ this.mszCharset +"&useSSL=false"; } this.mGlobalConnection = DriverManager.getConnection( url, this.mszUsername, this.mszPassword ); } @Override public void close() throws SQLException { if( this.mGlobalConnection != null ) { this.mGlobalConnection.close(); } } @Override public Connection getConnection() { return this.mGlobalConnection; } @Override public Statement createStatement() throws SQLException { if( this.isClosed() ){ this.connect(); } return this.mGlobalConnection.createStatement(); } } ================================================ FILE: Pinecones/ulf-lib-oltp-rdb/src/main/java/com/pinecone/ulf/rdb/sqlite/SQLiteExecutor.java ================================================ package com.pinecone.ulf.rdb.sqlite; import com.pinecone.framework.util.rdb.ArchRDBExecutor; import com.pinecone.framework.util.rdb.RDBHost; public class SQLiteExecutor extends ArchRDBExecutor { public SQLiteExecutor( RDBHost rdbHost ) { super( rdbHost ); } } ================================================ FILE: Pinecones/ulf-lib-oltp-rdb/src/main/java/com/pinecone/ulf/rdb/sqlite/SQLiteHost.java ================================================ package com.pinecone.ulf.rdb.sqlite; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.rdb.RDBHost; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.sql.Statement; public class SQLiteHost implements RDBHost { protected String mszLocation; protected String mszUsername; protected String mszPassword; protected String mszCharset; protected String mszDriver; protected Connection mGlobalConnection; public SQLiteHost( String dbLocation, String dbUsername, String dbPassword ) throws SQLException { this( dbLocation, dbUsername, dbPassword, "UTF-8" ); } public SQLiteHost( String dbLocation, String dbUsername, String dbPassword, String dbCharset ) throws SQLException { this( dbLocation, dbUsername, dbPassword, dbCharset, "org.sqlite.JDBC" ); } public SQLiteHost( String dbLocation, String dbUsername, String dbPassword, String dbCharset, String driver ) throws SQLException { this.mszLocation = dbLocation ; this.mszUsername = dbUsername ; this.mszPassword = dbPassword ; this.mszCharset = dbCharset ; this.mszDriver = driver ; this.connect(); } public SQLiteHost( String dbLocation ) throws SQLException { this.mszLocation = dbLocation; this.mszDriver = "org.sqlite.JDBC"; this.connect(); } @Override public boolean isClosed() { if( this.mGlobalConnection == null ) { return true; } try { return this.mGlobalConnection.isClosed(); } catch ( SQLException e ) { Debug.cerr( e ); return false; } } @Override public void connect() throws SQLException { try{ Class.forName( this.mszDriver ); } catch ( ClassNotFoundException e ){ throw new SQLException( "JDBC Driver is not found.", "CLASS_NOT_FOUND", e ); } String url = "jdbc:sqlite:" + this.mszLocation; if ( !this.mszLocation.startsWith( "jdbc:" ) ) { url = "jdbc:sqlite:" + this.mszLocation;; } this.mGlobalConnection = DriverManager.getConnection( url ); Statement statement = this.mGlobalConnection.createStatement(); statement.execute( "PRAGMA journal_mode=WAL;" ); statement.close(); } @Override public void close() throws SQLException { Debug.trace("关闭"); if( this.mGlobalConnection != null ) { this.mGlobalConnection.close(); } } @Override public Connection getConnection() { return this.mGlobalConnection; } @Override public Statement createStatement() throws SQLException { if( this.isClosed() ){ this.connect(); } return this.mGlobalConnection.createStatement(); } } ================================================ FILE: Pinecones/ulf-lib-oltp-rdb/src/main/java/com/pinecone/ulf/rdb/sqlite/SQLiteMethod.java ================================================ package com.pinecone.ulf.rdb.sqlite; import com.pinecone.framework.system.prototype.Pinenut; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.util.HashMap; import java.util.Map; public class SQLiteMethod implements Pinenut { private SQLiteHost sqliteHost; private Statement statement; public SQLiteMethod(SQLiteHost sqliteHost ) throws SQLException { this.sqliteHost = sqliteHost; this.statement = sqliteHost.createStatement(); } public Map< String, Object > executeQuery(String sql ) throws SQLException { HashMap map = new HashMap<>(); ResultSet resultSet = this.statement.executeQuery(sql); ResultSetMetaData metaData = resultSet.getMetaData(); int columnCount = metaData.getColumnCount(); for (int i = 1; i <= columnCount; i++) { map.put( metaData.getColumnName( i ), resultSet.getString( i ) ); } return map; } public int executeUpdate( String sql ) throws SQLException { return this.statement.executeUpdate( sql ); } } ================================================ FILE: README.md ================================================ # Bean Nuts Hazelnut Hydra
    九头龙,分布式操作系统

    真超级个体, 一个人公司, 一个集团, 一个人中台, 大规模AI、数据、任务调度工业架构, 大规模控制, 中央情报系统, 大规模分布式爬虫, 大数据处理, 数据仓库, 云计算, 中台

    简体中文文档 Update Log github stars github stars

    简体中文 | English[TODO] | Nuts Projects | Dragon King | Ken 老板 | Genius 老板 | Welsir 老板

    文档(持续增量更新): https://docs.nutsky.com/docs/hazelnut_sauron_zh_cn | 真实集群搭建过程: https://zhuanlan.zhihu.com/p/634851956

    ## 📖 Abstract Would you like to own the "God Eyes"? Do you crave power? Do you wish to wield all information at your fingertips? **Now, data is all you need!** The Hazelnut and Hydra ecosystem is a powerful data analysis "Elder Brain" designed specifically for "TJ" individuals, 'all information all I need'. **Hey, commander!** We build a unique personal PB level data warehouse, knowledge base, and search engine just for you, your exclusive "God Eyes" ! Your own C4ISR, your own 'global' strike system, Central Intelligence System, Central Staff System, and firepower industrial plants. The underlying architecture of above. ## 📖 摘要 / 简介 **你想拥有‘上帝之眼’吗?你渴望力量吗?你希望一切信息尽在掌控吗?这个时代,数据即使世界!** Hydra 生态,专为"TJ"人打造的大规模数据分析“主脑”,一切尽在掌握之中。 Hydra为你打造个人PB级数仓、知识库、图库、任务编排和服务于 Agent 工厂化的超级个体引擎,你的专属'上帝之眼',为所欲为! 到底这是什么玩意?属于你自己的C4ISR,你自己的“全球”打击系统、中央情报系统、中央参谋系统和火力军工厂的底层战略架构。 ### 字多不看?太高端听不懂?几个场景助你快速了解Hydra理念。 - **大规模知识库**:构造你的私人知识库,关联任何你感兴趣的知识图谱(金融、新闻、学术、游戏、音乐、电影、视频、小说、美食等),生成巨型知识库和图谱,并交给GPT等大模型给你生成属于你的`上帝报告`。 - **数据仓库**:海量数据,任你处置,你可以打造自己的数据`全图挂`,甚至可以乘坐时光机,在数据世界中随意穿行。你就是上帝,历史的变迁,触手可及。 - **数据集市**:打造你的个人GPT,随着算力平民化、大模型技术的平民化。未来,你不想拥有自己的GPT吗?你只需要不断收集属于你的数据集,未来打造你的专属GPT、Diffusion等。 - **大型采集**:统一并行架构打造大规模战略采集系统,多个实例助你快速入门: 1).维基百科全站爬取;2).Urban Dictionary全站爬取;3).imdb爬取;4).编年史子项目,每日全世界新闻采集,打造互联网记忆库与情报系统; 5).金融数据大规模采集(面向资金流向建模);6).IP反查、ISP追踪、DNS/rDNS、域名、NIC等搜索引擎基架数据采集;等。(避免争议,不提供任何有争议的代码和数据) - **数据平台**:面向战略和战术数据分析系统,构建和打通其他开源数据产品,面向智能ETL、数仓、取数、情报等专业大数据分析系统。 - **中台架构**:面向系统性实现上层应用、面向抽象、统一化,支撑大规模并行、大数据架构,信息、控制、调度、审计、权限等元架构分离。 ### 🏆 3A史诗巨献 全域覆盖、听你指挥、能打胜战、作风优良。 ### 什么是 Hydra,他能干嘛? - Hydra 由 DragonKing 及其团队原创的分布式基架系统, 面向系统性构建上层大型应用。Hydra的设计基础首先是面向控制的,用于实现大规模控制,进而实现通用任务、服务操作系统。 与其他操作系统设计理念类似,倡导内核做事、统筹规划。 - 其设计根源是基于对中台架构的创新和一体化,并尝试构造更一致的内核, 目前的设计尝试由一个迷你中台和云系统(豆子坚果云)不断自底向上迭代。 - Hydra基架如下图所示,自顶向下整体分为三层:应用(具体)、中级(典型)、底层(抽象)。对应图中应用层、中级应用中台、中台层。 - 一个操作系统至少需要实现对任务、服务、资源、存储、消息、信号、权限等子系统的控制和管理。 Hydra也是如此,但是由于时间和性价比问题,资源管理内核由具体的第三方系统代理(如Yarn)。 #### 全局架构鸟瞰图 ![HydraArchitecture](assets/imgs/architect/architect_frame_global_2.png) 01. 支持统一高度抽象化的任务、事务、服务等编排,一套接口,可分级、可本地、可集群。 02. 抽象统一分布式资源树系统,场景树、服务树、任务树、部署树、配置树、存储树等。 03. 可多级、可嵌套的编排系统,支持配置域管理、复杂配置动态解耦、可继承和重写的多域配置管理。 04. 可事务化抽象进程、线程模型,让远端服务通过RPC或通信组件通过一套接口,像本地进程一样进行统一管理。 05. 可事务图化编排方法论设计,就像TensorFlow,更抽象简单的服务、任务设计模式。事务和任务编排支持序列和并行两种模式,更支持性能模式。确保事务绝对执行、回滚、性能执行、并行等多种范式。 06. 面向统一解释器模式方法论和过程化设计,事务和任务编排逻辑化,支持循环控制、条件控制、散转控制、原子化等。 07. 抽象统一任务管理器体系,统一生命周期设计,多类任务一套“任务管理器”,就像本地系统一样简单。 08. 抽象统一系统架构体系,可中心化、可联邦化、可链式化,一切皆有可能。 09. 抽象统一外部文件系统,基于Common VFS 统一文件系统管理,从复杂底层存储中解放。 10. 抽象统一内核文件系统,支持级联逻辑卷(简单卷、跨区卷、条带卷)可自由容量编排规划,分布式对象文件系统,支持多种文件系统操作。 11. 抽象统一数据处理体系,泛容器化思想,抽象化DAO、DTO、Data Manipulation架构,一切皆可是Map、List、Set和Table等。 12. 抽象化部署模式和抽象云部署,无论是任何系统、本地进程、虚拟机部署、容器部署等。Hydra为您统一,“小程序”化进程模型,就像Springboot一样简单。 13. 基于分治和MapReduce思想设计,面向大数据处理处理系统设计。 14. 双工多路RPC设计基于Netty和NIO,支持双向控制(服务端可被动控制客户端),双端可收发,支持JSON、BSON、Protobuf(Java全自动动态编译)。 15. 传统实例化、IOC化、C/C++风格化,多种对象生命周期模式,更有趣的系统设计。 16. 可分级、分组、嵌套、级联的设计方法论,确保更灵活的大型系统设计,确保系统结构清晰、规整、可视、整整齐齐。 17. 无需担心抽象,无需担心"吹牛逼",我们尽可能通过实际案例和有效代码,展示系统功能,也欢迎commit。——以实现小型爬虫搜索引擎为例。 ### 子系统、框架和实例系统 #### Bean Nuts Hazelnut Sauron Radium (索伦·镭,分布式爬虫引擎) - 该部分为分布式爬虫引擎、爬虫大数据处理、清洗、持久化框架系统的实现。面向分布式大规模系统性爬虫设计,支持任务编排和并行流水线爬虫、支持周期和定时大规模爬虫、支持并行离线数据处理。 #### Bean Nuts Hazelnut Sauron Shadow (索伦·暗影,以爬虫、小型搜索引擎为例) - 该部分基于Pinecone、Ulfhedinn、Slime、Hydra、Radium等子框架最终设计的搜索引擎(数据采集、数据处理侧)应用实例。 - 多个实例助你快速入门:1).维基百科全站爬取;2).Urban Dictionary全站爬取;3).imdb爬取;4).编年史子项目,每日全世界新闻采集,打造你的互联网记忆库;等。 #### Bean Nuts Hazelnut Sauron Eyes - The God View (索伦·之眼,数据知识图谱化与检索系统[用户侧终端应用]) - 数据检索引擎演示实例参考SauronEyes (https://god.nutsky.com | http://www.godview.net) ## ⚔ 目录 * [一、描述](#一描述) * [1.1、框架组成](#11框架组成) * [1.1.1、Pinecone 基础运行支持库](#111基础运行支持库) * [1.1.1.1、扩展容器](#1111扩展容器) * [1.1.1.2、工具库](#1112工具库) * [1.1.2、Slime 大数据系统支持框架](#112大数据系统支持框架) * [1.1.3、Ulfhedinn 基础运行支持库,第三方依赖版](#113大数据系统支持库) * [1.1.4、Hydra 分布式、任务系统框架](#114分布式、任务系统框架) * [1.1.5、Radium 分布式爬虫系统框架](#114分布式、任务系统框架) * [1.2、功能模块组成](#12功能模块组成) * [1.2.1、网络通信库](#121网络通信库) * [1.2.1.1、流处理模块](#1211流处理模块) * [二、编译、使用](#二编译、使用) * [三、目录结构说明](#三目录结构说明) * [3.1、TODO](#31TODO) * [四、使用许可](#四使用许可) * [五、参考文献](#五参考文献) * [六、致谢](#六致谢) * [七、题外话](#七题外话) ## 一、📝 描述 ### 1.1、框架组成 #### 全局中央架构鸟瞰图(抽象全局架构) ![HydraArchitecture](assets/imgs/architect/architect_frame_global.png) #### 1.1.1、Pinecone 基础运行支持库 ##### 1.1.1.1、 扩展容器 1. LinkedTreeMap 2. ScopeMap (多域查找树、Map), 实现和支持类似动态语言(如JS、PHP、Python等)的底层继承数据结构,支持两类子模型(单继承、多继承), 可以实现多域查找的功能。 3. Dictium、Dictionary(字典接口模型),实现和支持类似动态语言(如PHP、Python等)的Array、字典查找,Map和可索引对象进一步抽象化。 4. Multi*Map (多种MultiValueMap范式),实现支持多种多值Map的实现,如MultiCollectionMap、MultiSetMap等。 5. Top (TopN问题通用解决),实现和支持堆法、有序树法、多值有序树法三种实现。 6. distinct (差异器),实现传统Set法、分治法、Bloom等的集合差异分析器。 7. affinity (亲缘性器),实现和支持对亲缘抽象字典的继承、重写等。 8. tabulate (遍历器),实现以列表式对抽象字典的内部递归,并列表化和分析亲缘关系。 9. ShardList (非复制式共享数组),由 @Geniusay 贡献。 10. TrieMap (前缀树Map),支持非递归迭代器,类文件系统完整功能前缀树,Symbolic Reparse 引用挂载点。 ##### 1.1.1.2、工具库 1. JSON库,BSON,JPlus(JSON++)库 (面向可二次开发、设计的自由JSON设计),可以重写JSONEncoder、JSONDecoder、JSONCompiler、JSONDecompiler、注入器等。 2. Name命名空间库 3. lang (Java包和扩展库),支持各类类扫描方法、包扫描方法、遍历和收集方法、加载、多域扫描等。 4. GUID (由@kenssa4eedfd贡献),统一分布式ID,魔改百度Uid,GUID64、GUID72,支持随机混淆。 4. TODO TODO #### 1.1.2、Hydra ##### 1.1.2.1、系统架构、骨架设计 1. HierarchySystem,阶级系统(Master-Slaver推广架构) 2. FederalSystem,联邦系统(面向投票式设计) [BETA, 20250101] 3. BlockSystem,块式系统(面向边缘、链式系统设计)。让你的每台设备都成为你专属链上节点。 [BETA, 20250101] 4. CascadeComponent, 级联组件设计,支持亲缘性回收控制,“The Omega Device”,级联回收主键引用。 5. 分布式容器(分布式多域树等) 6. 分布式微内核\ 借鉴WinNT、Unix,支持KernelObject、挂载点、统一内核对象管理。\ Unix风格内核句柄路径化,如分布式挂载 /proc/、/dev/ 等。 ##### 1.1.2.2、统一调度编排系统 ![TaskTree](assets/imgs/task/task_tree.png) ![TaskLineage](assets/imgs/task/task_lineage.png) 1. Orchestration (事务、任务编排子系统),面向统一解释器模式方法论和过程化设计,事务和任务编排逻辑化,支持循环控制、条件控制、散转控制、原子化等,更支持事务完整性设计。 2. Auto (简易命令模式,可编程自动机系统),实现支持Automaton简易生产-消费命令队列,实现支持PeriodicAutomaton可编程Timer,实现支持Marshalling流水线指令编排器。(更多Timer和算法持续更新中) 3. Vector DAG(矢量图), 本文提出一种通用的大规模矢量DAG(Vector DAG)图模型,用于支撑高性能调度、编排与控制任务,适配亿级以上节点规模的实际应用场景。通过拓扑拆分、矢量化子图划分及多种并发图算法,实现了大规模调度控制能力。 算法支持关键路径计算、节点可达性判断、剪枝优化、最小生成子图合并及最短路径计算等核心图处理能力。 https://docs.nutsky.com/docs/hazelnut_sauron_zh_cn/uniform_massive_graph_dispatch ![VDAGArchitecture](assets/imgs/vdag_architecture.png) ##### 1.1.2.3、小程序系统 Servgram,小程序系统,是的这很微信,不过是服务端的小程序哦!进一步抽象和推广进程思想,任何服务介质(本地、虚拟机、容器等),一切服务、一切任务等。 一切统一和谐,一套调度、一套接口、一套操作,生命周期整整齐齐(满足你的控制欲),更可冗余确保稳定。\ 配合任务编排和事务编排,多个任务,一套系统全包干。 (TODO,远端进程进一步实现、实现统一分布式锁接口) ##### 1.1.2.4、统一消息分发系统 ##### 1.1.2.5、WolfMC RPC 1. 基于Netty设计的原创消息控制中间件,支持RPC模式。 2. 支持JSON、BSON、Protobuf,更多RPC协议和数据结构持续更新中。[TODO 分片、泳道] 3. 支持双工通信,双端可收可发。(服务端可被动控制客户端,双路Channel池设计) 4. 全自动Protobuf动态编译,支持直接接口代理(类似Mybatis Mapper工厂)。 5. 支持异步回调,类似AJAX。 6. 支持同步回调。 7. 配合MessageExpress, 支持类似 Spring Controller 式消息控制。 8. 支持AOP、IOC,可以自动依赖注入,支持类似Controller范式和消息注解拦截。 ##### 1.1.2.6、统一服务注册、发现、管理系统 1. 服务树\ 支持多级分类的服务树,可以设置多级命名空间,如 `Name1.Name2.应用1.服务1`。\ 支持元信息继承、多引用、节点回收、支持复杂服务管理分类。 ##### 1.1.2.7、分布式微内核 1. 配置树、分布式注册表\ "盗版" Apollo,支持分布式配置管理。一个配置中心,就像 Windows注册表一样。 1. 统一DOM / 前缀树 抽象化,支持自定义节点(插件设计),文件系统式设计。 2. 支持配置继承 3. 支持Hard Link 引用(标记法引用计数,有循环引用检测 / inode 表设计) 4. 支持选择器 (路径选择器、XPath) 5. 支持大数据(数据库基准) 6. 路径缓存设计 7. 改进非递归DFS路径寻址算法 8. 兼容Windows 配置表风格 9. 支持移动、复制(支持递归级联,复制 / 移动文件夹和配置项) 10. 支持 JSON、XML 等原始文本或动态数据格式,支持 JSON、XML 与注册表混转。 11. 支持配置动态渲染(EL表达式、逻辑循环支持) 12. 数据库操作和底层分离,支持数据库、内存、Redis等任意数据源 2. 任务树\ 任务、进程分类、分组和编排系统。\ 对一级挂载点 `/proc/${proc_guid}/task` 的二级挂载和分类。 3. 部署树、部署管理器\ 多种部署模式(如容器、虚拟机、PaaS等),分类、分组和编排系统,类似 Windows 设备管理器。\ 抽象部署设备类似传统操作系统的物理设备,通过编写驱动,实现对部署子系统的管理。 4. 场景树\ 功能分类、分组和编排系统。 5. 统一用户系统 1. 内核级统一用户、凭证、角色、权限管理。 2. 统一单点登录中台化设计。 3. 支持域、组、用户三级设计。 ##### 1.1.2.8、分布式存储系统 1. 卷系统 1. 物理卷,多种数据源设计 2. 简单卷 3. 跨区卷 4. 条带卷,基于状态机无锁编程化并行存储,采用基于差分多路缓存滑动窗口、DFA、FIFO多线程缓存等算法优化的高性能条带卷设计。 应用层面本项目提供了物理卷与逻辑卷的管理后台方便用户的管理与使用 ![image](assets/imgs/logic_volume_manage.png) ![image](assets/imgs/logic_volume_create.png) ![image](assets/imgs/physical_volume_manage.png) 2. 分布式文件系统\ Hydra 是基于多级级联的大型系统架构,UOFS同样沿用了Hydra的整体架构体系,采样全局级联的设计。支持存储节点、索引节点、卷节点等每一层级的级联设计。 ![FileSystemArchitecture](assets/imgs/fs_architecture.svg) 应用层面本项目不仅提供了文件浏览器的核心功能,还支持文件预览、多集群上传、外部挂载、文件完整性验证等。 ![image](assets/imgs/file_manage.png) ![image](assets/imgs/file_preview.png) ![image](assets/imgs/external_mounting.png) ![image](assets/imgs/property_view.png) 3.基于UOFS的CDN(文件分发网络) 本项目基于UOFS结合Kafka、RocketMQ、服务管理中心等提供了保证数据一致性的CDN服务,并提供文件版本管理与站点管理。 ![image](assets/imgs/site_manage.png) ![image](assets/imgs/file_distribution_and_synchronization.png) ![image](assets/imgs/file_version_manage.png) 4. 版本管理 ##### 1.1.2.9、统一资源管理、分配接口系统[TODO] ##### 1.1.2.10、图形管理界面[TODO] ##### 1.1.2.11、TODO #### 1.1.3、Slime 史莱姆大数据支持库 ##### 1.1.3.1、统一块抽象、管理、分配系统(泛块式、抽象页面(连续、离散、自定义)、帧、分区、簇等) ##### 1.1.3.2、Mapper、Querier 抽象映射、查询器,统一接口多种实现(本地、数据库、缓存、数据仓库等) 1. 优化和缓存版RDBMapper、IndexableMapper,使用多种缓存策略,泛容器化API接口使用。 ##### 1.1.3.3、统一缓存库和查询优化库、支持LRU、冷热优化、页面缓存、页面LRU、多级缓存等多策略实现。 ##### 1.1.3.4、Source抽象数据源库、支持RDB-ibatis、NoSQL、缓存、文件等扩展。 ##### 1.1.3.5、Reducer库[TODO],更多Reduce策略实现、接口 #### 1.1.4、Radium 分布式爬虫和搜索引擎数据取回、任务编排、处理、持久化框架 ##### 1.1.4.1、一站式爬虫数据处理范式 基于Map-Reduce思想,面向TB-PB级别数据处理,统一任务编号、映射、处理。 范式包含 Reaver(掠夺者,数据取回器),Stalker(潜伏者,面向批量爬虫索引嗅探),Embezzler(洗钱者,面向批处理爬虫数据处理)。 ##### 1.1.4.2、统一多任务调度、配置、编排系统 支持事务型、Best-Effort等多种任务粒度控制。 支持分组、嵌套、多级任务调度,支持子任务继承父任务关系、血缘性。 支持任务回滚、熔断等接口设计。 TODO ## 二、🧬 编译、使用 ### 编译 - 项目使用Maven管理,使用jdk11以上版本即可运行。 - 编译得到jar包,即插即用,随意部署。 - 或使用 IntelliJ IDEA 直接打开即可。 ### 最小系统使用 - 无需特意配置环境变量等信息。 - 系统配置文件,默认位于"./system/setup/.." ```json5 "Orchestration" : { "Name": "ServgramOrchestrator", "Type": "Parallel", // Enum: { Sequential, Parallel, Loop } // Servgram-Classes scanning package-scopes "ServgramScopes": [ "com.sauron.heist.heistron" ], "Transactions": [ { "Name": "Heist", "Type": "Sequential", "Primary": true } ] } ``` - 默认启动 `Heist` (爬虫)任务 - 检查 `Heist` 小程序配置,默认位于"./system/setup/heist.json5" ```json5 "Orchestration" : { "Name": "HeistronOrchestrator", "Type": "Parallel", // Enum: { Sequential, Parallel, Loop } "DirectlyLoad" : { "Prefix": [], "Suffix": [ "Heist" ] }, "ServgramScopes": [ "com.sauron.shadow.heists", "com.sauron.shadow.chronicle" ], // 修改这里,可运行例程 'Void' , 最小系统演示 "Transactions": [ { "Name": "Void", "Type": "Sequential" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ }, ] } ``` - 检查 `Void` 小小程序配置,默认位于"./system/setup/heists/Void.json5",原则上注意大小写 ```json5 "Orchestration" : { "Name": "VoidOrchestrator", "Type": "Parallel", // Enum: { Sequential, Parallel, Loop } "Transactions": [ { "Name": "Jesus", "Type": "Sequential" }, { "Name": "Satan", "Type": "Sequential" }, { "Name": "Rick" , "Type": "Sequential" } ] } ``` - 正常启动,将开始本地流水线序列调度 "Jesus"、"Satan"、"Rick"三个大任务和其子任务。 ![最小系统成功运行图](assets/imgs/demo_result.png) ## 三、🔨 目录结构说明 - TODO ## 四、🔬 使用许可 - MIT (保留本许可后,可随意分发、修改,欢迎参与贡献) ## 五、📚 参考文献 (参考文献包括Nuts家族 C/C++、Java等子语言运行支持库、本项目框架、本项目等所有涉及的子项目的总参考文献、源码、设计、 专利等相关资料。便于读者了解相关技术(设计)的源头和底层方法论,作者向相关参考项目(以及未直接列出项目)作者表示崇高敬意和感谢。) 01. C/C++ STL (容器、运行支持库设计,算法、设计模式和数据结构) 02. Java JDK (容器、运行支持库设计,算法、设计模式和数据结构) 03. Go SDK (容器、运行支持库设计,算法、设计模式和数据结构) 04. PHP 5.6 Source (解释器、相关支持库设计) 05. MySQL Source (参考多个设计思想和部分思想实现) 06. Linux Kernel (参考多个设计思想和部分思想实现) 07. Win95 Kernel (Reveal Edition),Win32Apis,Runtime framework 08. WinNT 窗口事件思想、回调函数注入等 09. C/C++ Boost 10. C/C++ ACL -- One advanced C/C++ library for Unix/Windows. 11. Java Springframework Family (How IOC/AOP/etc works) 12. Hadoop MapReduce (How it works) 13. Python TensorFlow (Graph, how it orchestras) 14. Javascript DOM 设计、CSS选择器等 15. 其他若干个小框架、工具库、语言等(如Apache Commons、org.json、fastcgi、fastjson、libevent等),本文表示崇高敬意和感谢。 # 📈 项目活跃表 ![Alt](https://repobeats.axiom.co/api/embed/0ae23655bb105addf8d90a999df36f690d615af7.svg "Repobeats analytics image") ================================================ FILE: RedQueen/pom.xml ================================================ sauron com.sauron 1.2.7 4.0.0 com.acorn.redqueen redqueen pom 2.5.1 redqueen-architecture redqueen-computation-suit redqueen-system redqueen-framework-service ================================================ FILE: RedQueen/redqueen-architecture/pom.xml ================================================ redqueen com.acorn.redqueen 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.acorn.redqueen.kernel redqueen-architecture 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile com.pinecone.ulf ulfhedinn 1.2.1 compile com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.pinecone.hydra.kernel hydra-framework-service 2.1.0 compile ================================================ FILE: RedQueen/redqueen-architecture/src/main/java/com/acorn/redqueen/compute/ComputationNode.java ================================================ package com.acorn.redqueen.compute; public interface ComputationNode { } ================================================ FILE: RedQueen/redqueen-architecture/src/main/java/com/acorn/redqueen/system/Dummy.java ================================================ package com.acorn.redqueen.system; public class Dummy { } ================================================ FILE: RedQueen/redqueen-computation-suit/pom.xml ================================================ redqueen com.acorn.redqueen 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.acorn.redqueen.kernel redqueen-computation-suit 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile com.pinecone.ulf ulfhedinn 1.2.1 compile com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile ================================================ FILE: RedQueen/redqueen-framework-service/pom.xml ================================================ redqueen com.acorn.redqueen 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.acorn.redqueen.kernel redqueen-framework-service 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile com.pinecone.ulf ulfhedinn 1.2.1 compile com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.acorn.redqueen.kernel redqueen-architecture 2.1.0 compile com.pinecone.hydra.kernel hydra-service-control 2.1.0 compile ================================================ FILE: RedQueen/redqueen-framework-service/src/main/java/com/acorn/redqueen/service/ApplicationManager.java ================================================ package com.acorn.redqueen.service; import com.pinecone.framework.system.regime.arch.Manager; import com.pinecone.hydra.system.ko.CascadeKernelObjectInstrument; public interface ApplicationManager extends CascadeKernelObjectInstrument, Manager { } ================================================ FILE: RedQueen/redqueen-framework-service/src/main/java/com/acorn/redqueen/service/ArchRedApplication.java ================================================ package com.acorn.redqueen.service; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.Identification; import com.pinecone.hydra.service.kom.entity.ApplicationElement; public abstract class ArchRedApplication implements RedApplication { protected ApplicationElement mApplicationElement; @Override public ApplicationElement getApplicationElement() { return this.mApplicationElement; } @Override public long getEnumId() { return this.mApplicationElement.getEnumId(); } @Override public GUID getGuid() { return this.mApplicationElement.getGuid(); } @Override public Identification getId() { return this.mApplicationElement.getId(); } @Override public String getName() { return this.mApplicationElement.getName(); } @Override public String getScenario() { return this.mApplicationElement.getScenario(); } @Override public String getPrimaryImplLang() { return this.mApplicationElement.getPrimaryImplLang(); } @Override public String getExtraInformation() { return this.mApplicationElement.getExtraInformation(); } @Override public String getLevel() { return this.mApplicationElement.getLevel(); } @Override public String getDescription() { return this.mApplicationElement.getDescription(); } } ================================================ FILE: RedQueen/redqueen-framework-service/src/main/java/com/acorn/redqueen/service/RedApplication.java ================================================ package com.acorn.redqueen.service; import com.pinecone.hydra.service.Application; import com.pinecone.hydra.service.kom.entity.ApplicationElement; public interface RedApplication extends Application { ApplicationElement getApplicationElement(); } ================================================ FILE: RedQueen/redqueen-framework-service/src/main/java/com/acorn/redqueen/service/RedQueenServiceControllerException.java ================================================ package com.acorn.redqueen.service; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.service.registry.ServiceControlException; public class RedQueenServiceControllerException extends ServiceControlException implements Pinenut { public RedQueenServiceControllerException() { super(); } public RedQueenServiceControllerException( String message ) { super(message); } public RedQueenServiceControllerException( String message, Throwable cause ) { super(message, cause); } public RedQueenServiceControllerException( Throwable cause ) { super(cause); } } ================================================ FILE: RedQueen/redqueen-framework-service/src/main/java/com/acorn/redqueen/service/RedServiceApplication.java ================================================ package com.acorn.redqueen.service; public class RedServiceApplication extends ArchRedApplication implements ServiceApplication { } ================================================ FILE: RedQueen/redqueen-framework-service/src/main/java/com/acorn/redqueen/service/ServiceApplication.java ================================================ package com.acorn.redqueen.service; public interface ServiceApplication extends RedApplication { } ================================================ FILE: RedQueen/redqueen-framework-service/src/main/java/com/acorn/redqueen/service/conduct/CollectiveServiceRegiment.java ================================================ package com.acorn.redqueen.service.conduct; import com.pinecone.framework.system.regime.Regiment; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.registry.ServiceControlException; import com.pinecone.hydra.service.registry.server.ServiceManager; import com.pinecone.hydra.system.component.Slf4jTraceable; public interface CollectiveServiceRegiment extends Regiment, Slf4jTraceable { ServiceManager serviceManager(); ServiceInstrument serviceInstrument(); void startServiceManage() throws ServiceControlException; } ================================================ FILE: RedQueen/redqueen-framework-service/src/main/java/com/acorn/redqueen/service/conduct/RedCollectiveServiceRegiment.java ================================================ package com.acorn.redqueen.service.conduct; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.registry.ServiceControlException; import com.pinecone.hydra.service.registry.server.ServiceManager; import com.pinecone.hydra.system.Hydrogen; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class RedCollectiveServiceRegiment implements CollectiveServiceRegiment { protected ServiceManager mServiceManager; protected ServiceInstrument mServiceInstrument; protected Hydrogen mSystem; protected Logger mLogger; public RedCollectiveServiceRegiment( Hydrogen system, ServiceInstrument serviceInstrument, ServiceManager serviceManager ) { this.mSystem = system; this.mServiceInstrument = serviceInstrument; this.mLogger = LoggerFactory.getLogger( "RedCollectiveServiceRegiment" ); this.mServiceManager = serviceManager; } @Override public Logger getLogger() { return this.mLogger; } @Override public void startServiceManage() throws ServiceControlException { this.mServiceManager.startService(); this.mLogger.info( "RPC init success" ); } @Override public ServiceManager serviceManager() { return this.mServiceManager; } @Override public ServiceInstrument serviceInstrument() { return this.mServiceInstrument; } } ================================================ FILE: RedQueen/redqueen-system/pom.xml ================================================ redqueen com.acorn.redqueen 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.acorn.redqueen.kernel redqueen-system 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile com.pinecone.ulf ulfhedinn 1.2.1 compile com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile ================================================ FILE: RedQueen/redqueen-system/src/main/java/com/acorn/redqueen/RedQueen.java ================================================ package com.acorn.redqueen; import com.acorn.redqueen.system.ServiceCentralControl; import com.pinecone.framework.util.config.PatriarchalConfig; import com.pinecone.framework.util.io.Tracer; import com.pinecone.hydra.system.ArchModularizedSubsystem; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.system.component.LogStatuses; public class RedQueen extends ArchModularizedSubsystem implements ServiceCentralControl { public RedQueen( Hydrogen primarySystem, String name, PatriarchalConfig config ) { super( primarySystem, name, config ); } @Override protected void traceWelcomeInfo() { Tracer console = this.mPrimarySystem.console(); console.getOut().print( "---------------------------------------------------------------\n" ); console.getOut().print( "\u001B[31mBean Nuts Acorn Red Queen\u001B[0m\n" ); console.getOut().print( "\u001B[31mMassive Parallel Computing Orchestration System \u001B[0m\n" ); console.getOut().print( "\u001B[32mCopyright(C) 2008-2028 Bean Nuts Foundation. All rights reserved.\u001B[0m\n" ); console.getOut().print( "---------------------------------------------------------------\n" ); } protected void init() { this.getLogger().info( " >>> System Booting..." ); this.infoLifecycle( " Domain Subsystem Initialization", LogStatuses.StatusStart ); this.traceWelcomeInfo(); this.prepare_system_skeleton(); this.infoLifecycle( " Welcome to the Red Queen super computing!", LogStatuses.StatusReady ); this.infoLifecycle( " Domain Subsystem Initialization", LogStatuses.StatusReady ); } protected void prepare_system_skeleton() { } @Override public void vitalize() { this.init(); } @Override public void terminate() { } } ================================================ FILE: RedQueen/redqueen-system/src/main/java/com/acorn/redqueen/system/ServiceCentralControl.java ================================================ package com.acorn.redqueen.system; import com.pinecone.framework.system.SynergicSystem; import com.pinecone.hydra.system.centrum.CentralControlSubsystem; public interface ServiceCentralControl extends SynergicSystem, CentralControlSubsystem { } ================================================ FILE: Saurons/Saurye/pom.xml ================================================ saurons com.saurons 1.2.7 4.0.0 com.sauron.saurye saurye 2.1.0 ================================================ FILE: Saurons/Shadow/pom.xml ================================================ saurons com.saurons 1.2.7 4.0.0 com.sauron.shadow shadow 1.2.7 jar org.springframework.boot spring-boot-maven-plugin package repackage true com.sauron.shadow.ShadowBoot 11 11 UTF-8 com.pinecone pinecone 2.5.1 com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 com.pinecone.tritium hydra-system-tritium 2.1.0 com.pinecone.slime slime 2.1.0 com.pinecone.ulf ulfhedinn 1.2.1 com.pinecone.ulf ulf-lib-oltp-rdb 1.2.1 com.sauron.core sauron-core 1.2.7 compile com.sauron.heist heist-system-schedule 2.1.0 compile ================================================ FILE: Saurons/Shadow/src/main/java/META-INF/MANIFEST.MF ================================================ Manifest-Version: 1.0 Main-Class: com.sauron.shadow.ShadowBoot ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/Shadow.java ================================================ package com.sauron.shadow; import com.pinecone.framework.system.CascadeSystem; import com.sauron.Sauron; public class Shadow extends Sauron { public Shadow( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Shadow( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override protected void traceSubsystemWelcomeInfo() { super.traceSubsystemWelcomeInfo(); this.pout().print( "------------------------Shadow Subsystem-----------------------\n" ); this.pout().print( "\u001B[31m\uD83D\uDE08 Sauron`s Shadow Subsystem \uD83D\uDE08 \u001B[0m\n" ); this.pout().print( "\u001B[32mShadow is hungry, desiring for blood.\u001B[0m\n" ); this.pout().print( "---------------------------------------------------------------\n" ); } @Override public void vitalize () throws Exception { super.vitalize(); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/ShadowBoot.java ================================================ package com.sauron.shadow; import com.pinecone.Pinecone; public class ShadowBoot { public static Shadow shadow = null; public static void main( String[] args ) throws Exception { ShadowBoot.shadow = new Shadow( args, Pinecone.sys() ); ShadowBoot.shadow.init( (Object...cfg )->{ ShadowBoot.shadow.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/AffinitySuggestation.java ================================================ package com.sauron.shadow.chronicle; import com.pinecone.hydra.auto.ArchSuggestation; import com.pinecone.hydra.auto.ContinueException; import com.sauron.heist.heistron.orchestration.Instructations; public class AffinitySuggestation extends ArchSuggestation { protected Clerk mClerk; public AffinitySuggestation( Clerk clerk ) { this.mClerk = clerk; } @Override public void execute() { try{ Instructations.infoConformed( AffinitySuggestation.this.mClerk, "toRavage" ); AffinitySuggestation.this.mClerk.isTimeToFeast(); Instructations.infoCompleted( AffinitySuggestation.this.mClerk, "toRavage" ); } catch ( Exception e ) { AffinitySuggestation.this.mClerk.tracer().warn( String.format("[Fatality] (%s : %s) ", e.getClass().getSimpleName(), e.getMessage()) ); throw new ContinueException( e ); } } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/ArchClerk.java ================================================ package com.sauron.shadow.chronicle; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.JSONGet; import com.pinecone.hydra.auto.Instructation; import com.sauron.heist.heistron.HTTPCrew; import com.sauron.heist.heistron.HTTPHeist; public abstract class ArchClerk extends HTTPCrew implements Clerk { protected Instructation mAffinityPrimeDirective; protected JSONObject mConfig; @JSONGet( "__proto__.NewsDataTable" ) protected String mszNewsDataTable; public ArchClerk( HTTPHeist heist, int id, JSONObject joConfig ) { super( heist, id ); this.mConfig = joConfig; this.mAffinityPrimeDirective = new AffinitySuggestation( this ); } public ArchClerk( HTTPHeist heist, int id, JSONObject joConfig, Class childType ) { this( heist, id, joConfig ); this.autoInject( ArchClerk.class ); this.autoInject( childType ); } @Override public ChronicleHeist parentHeist() { return (ChronicleHeist)super.parentHeist(); } @Override public JSONObject getConfig() { return this.mConfig; } @Override public Instructation getPrimeDirective() { return this.mAffinityPrimeDirective; } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/Chronicle.java ================================================ package com.sauron.shadow.chronicle; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.sauron.heist.heistron.Heistum; import com.sauron.shadow.chronicle.dao.BasicChronicleManipulator; import org.apache.ibatis.session.SqlSession; public interface Chronicle extends Heistum { IbatisClient getPrimaryDataIbatisClient(); SqlSession getPrimarySharedSqlSession(); BasicChronicleManipulator getBasicChronicleManipulator(); } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/ChronicleHeist.java ================================================ package com.sauron.shadow.chronicle; import com.pinecone.framework.system.NonNull; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.system.construction.Structure; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.JSONGet; import com.pinecone.slime.jelly.source.ibatis.SoloSessionMapperPool; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.sauron.heist.heistron.CascadeHeist; import com.sauron.heist.heistron.Crew; import com.sauron.heist.heistron.HTTPHeist; import com.sauron.heist.heistron.Heistgram; import com.sauron.heist.heistron.chronic.PeriodicHeistRehearsal; import com.sauron.shadow.chronicle.dao.BasicChronicleManipulator; import org.apache.ibatis.session.SqlSession; import java.util.Map; //@Heistlet( "Chronicle" ) public class ChronicleHeist extends HTTPHeist implements Chronicle { protected PeriodicHeistRehearsal mPeriodicHeistKernel; @JSONGet( "PrimaryRDB" ) protected String mszPrimaryRDBName ; protected IbatisClient mPrimaryDataIbatisClient ; protected SqlSession mPrimarySharedSqlSession; @Structure protected BasicChronicleManipulator mBasicChronicleManipulator; public ChronicleHeist( Heistgram heistron ){ super( heistron ); this.initSelf(); } public ChronicleHeist( Heistgram heistron, JSONConfig joConfig ){ super( heistron, joConfig ); this.initSelf(); } public ChronicleHeist( Heistgram heistron, @Nullable CascadeHeist parent, @NonNull String szChildName ) { super( heistron, parent, szChildName ); this.initSelf(); } protected void initSelf() { if( this.isSlave() ) { this.mPeriodicHeistKernel = new ChroniclePeriodicHeistKernel( this ); this.parentSystem().getPrimaryConfigScope().autoInject( ChronicleHeist.class, this.getConfig(), this ); this.mPrimaryDataIbatisClient = (IbatisClient) this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( this.mszPrimaryRDBName ); this.mPrimarySharedSqlSession = this.mPrimaryDataIbatisClient.openSession( true ); this.prepareChildrenConfig(); this.prepareScopeDAOManipulator(); this.parentSystem().getPrimaryConfigScope().autoConstruct( ChronicleHeist.class, this.getConfig(), this ); } } protected void prepareChildrenConfig() { JSONObject jo = this.mPeriodicHeistKernel.getRaiderConfigs(); for( Object o : jo.entrySet() ) { Map.Entry kv = (Map.Entry) o; JSONObject jc = (JSONObject)kv.getValue(); jc.put( "__proto__", this.getConfig() ); // Jesus, no needs to override... Just using prototype chain. } } protected void prepareScopeDAOManipulator() { this.mPrimaryDataIbatisClient.addDataAccessObjectScope( this.getClass().getPackageName() ); this.parentSystem().getDispenserCenter().getInstanceDispenser().register( BasicChronicleManipulator.class, new SoloSessionMapperPool( this.mPrimarySharedSqlSession, BasicChronicleManipulator.class ) ); //this.mBasicChronicleManipulator = this.mPrimarySharedSqlSession.getMapper( BasicChronicleManipulator.class ); } @Override public IbatisClient getPrimaryDataIbatisClient() { return this.mPrimaryDataIbatisClient; } @Override public SqlSession getPrimarySharedSqlSession() { return this.mPrimarySharedSqlSession; } @Override public BasicChronicleManipulator getBasicChronicleManipulator(){ return this.mBasicChronicleManipulator; // return new BasicChronicleManipulator() { // @Override // public void insertOneNews(String szTableName, String szObjectName, String szDateTime, String szNewsIndex) { // // } // }; } @Override public Crew newCrew( int nCrewId ) { return new ChronicleReaver( this, nCrewId ); } @Override public void toRavage(){ ChronicleHeist.this.infoLifecycle( "Chronicle Heist Vitalization","Vitalized" ); // ZhihuClerk clerk = new ZhihuClerk( // this, 1999, this.mPeriodicHeistKernel.getRaiderConfigs().optJSONObject( "Zhihu" ) // ); // clerk.toRavage(); // clerk.toRavage(); // clerk.toRavage(); try{ this.mPeriodicHeistKernel.joinVitalize(); } catch ( InterruptedException e ) { throw new ProxyProvokeHandleException( e ); } ChronicleHeist.this.infoLifecycle( "Chronicle Heist Termination","Terminated" ); } @Override public void toStalk(){ } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/ChroniclePeriodicHeistKernel.java ================================================ package com.sauron.shadow.chronicle; import com.sauron.heist.heistron.Heistum; import com.sauron.heist.heistron.chronic.ArchPeriodicHeistRehearsal; public class ChroniclePeriodicHeistKernel extends ArchPeriodicHeistRehearsal { public ChroniclePeriodicHeistKernel( Heistum heistum, boolean bDaemon ) { super( heistum, bDaemon ); } public ChroniclePeriodicHeistKernel( Heistum heistum ) { this( heistum, false ); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/ChronicleReaver.java ================================================ package com.sauron.shadow.chronicle; import com.sauron.heist.heistron.HTTPCrew; import com.sauron.heist.heistron.HTTPHeist; import com.sauron.heist.heistron.LootRecoveredException; import com.sauron.heist.heistron.Reaver; import com.sauron.heist.heistron.LootAbortException; import java.io.IOException; public class ChronicleReaver extends HTTPCrew implements Reaver { public ChronicleReaver( HTTPHeist heist, int id ){ super( heist, id ); } @Override protected void tryConsumeById( long id ) throws LootRecoveredException, LootAbortException, IllegalStateException, IOException { //Page retryPage = this.queryHTTPPageSafe(new Request("https://www.artstation.com/sitemap.xml")); //Debug.trace( retryPage.getRawText() ); //this.terminate(); } @Override public void toRavage() { this.startBatchTask(); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/Clerk.java ================================================ package com.sauron.shadow.chronicle; import com.pinecone.framework.system.homotype.StereotypicInjector; import com.pinecone.framework.util.json.JSONObject; import com.sauron.heist.heistron.chronic.Raider; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; /** * Bean Nuts Hazelnut Sauron Tritium - Sauron`s Shadow For Java, Clerk [史官, 书记] * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Cooperate with the chronicle system for periodic crawler to retrieve data. * 配合编年史系统,面向周期性爬虫数据取回 * ***************************************************************************************** * Dragon King, the undefined */ public interface Clerk extends Raider { JSONObject getConfig(); default StereotypicInjector autoInject( Class stereotype ) { return this.parentSystem().getPrimaryConfigScope().autoInject( stereotype, this.getConfig(), this ); } default String nowDateTime() { return LocalDateTime.now().format( DateTimeFormatter.ofPattern( "yyyy-MM-dd HH:mm:ss" ) ); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/Newstron/BaiduClerk.java ================================================ package com.sauron.shadow.chronicle.Newstron; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.JSONGet; import com.sauron.heist.heistron.HTTPHeist; import com.sauron.shadow.chronicle.ArchClerk; import com.sauron.shadow.chronicle.Clerk; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; import org.jsoup.nodes.Node; import us.codecraft.webmagic.Page; public class BaiduClerk extends ArchClerk implements Clerk { @JSONGet( "TopHref" ) protected String mszTopHref; public BaiduClerk( HTTPHeist heist, int id, JSONObject joConfig ){ super( heist, id, joConfig, BaiduClerk.class ); } @Override public void toRavage() { JSONObject jBuiduToIndex = new JSONMaptron(); try{ this.parseBaiduTopIndex( jBuiduToIndex ); this.parentHeist().getBasicChronicleManipulator().insertOneNews( this.mszNewsDataTable, "BaiduTop", this.nowDateTime(), StringUtils.addSlashes( jBuiduToIndex.toJSONString() ) ); //Debug.trace( jBuiduToIndex ); } catch ( IllegalStateException e ) { this.tracer().warn( String.format("[Fatality] (%s : %s) ", e.getClass().getSimpleName(), e.getMessage()) ); } } protected void parseBaiduTopIndex( JSONObject jIndex ) throws IllegalStateException { try { Page httpPage = this.getHTTPPage( this.mszTopHref ); Document document = httpPage.getHtml().getDocument(); Element rootElement = document.getElementById( "sanRoot" ); // API sanRoot 20221127 if ( rootElement != null ) { Node firstChild = rootElement.childNode( 0 ); if ( firstChild.nodeName().equals("#comment") ) { String szInner = firstChild.toString(); int nJsonAt = szInner.indexOf("s-data:"); if ( nJsonAt != -1 ) { nJsonAt += 7; jIndex.jsonDecode( szInner.substring( nJsonAt ).trim() ); return; } } } } catch ( Exception e ) { throw new IllegalStateException( "IllegalStateException: CompromisedParseBaiduTop", e ); } throw new IllegalStateException( "IllegalStateException: CompromisedParseBaiduTop" ); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/Newstron/CNNClerk.java ================================================ package com.sauron.shadow.chronicle.Newstron; import com.pinecone.framework.util.Randomium; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.JSONGet; import com.sauron.heist.heistron.HTTPHeist; import com.sauron.shadow.chronicle.ArchClerk; import com.sauron.shadow.chronicle.Clerk; import us.codecraft.webmagic.Page; public class CNNClerk extends ArchClerk implements Clerk { @JSONGet( "NewsHref" ) protected String mszNewsHref; @JSONGet( "TopN" ) protected int mnTopN; @JSONGet( "request_id" ) protected String mszRequestId; public CNNClerk( HTTPHeist heist, int id, JSONObject joConfig ){ super( heist, id, joConfig, CNNClerk.class ); } @Override public void toRavage() { JSONObject jIndex = new JSONMaptron(); this.parseCNNIndex( jIndex ); this.parentHeist().getBasicChronicleManipulator().insertOneNews( this.mszNewsDataTable, "CNNNewsTop" + this.mnTopN, this.nowDateTime(), StringUtils.addSlashes( jIndex.toJSONString() ) ); //Debug.trace( jIndex ); } protected void parseCNNIndex0( JSONObject jIndex ) throws IllegalStateException { String szHref = String.format( this.mszNewsHref, this.mnTopN, ( new Randomium() ).nextString( 8 ) ); Page httpPage = this.getHTTPPage( szHref ); jIndex.clear(); jIndex.jsonDecode( httpPage.getRawText() ); jIndex.eliminateExcepts( "result" ); } protected void parseCNNIndex( JSONObject jIndex ) throws IllegalStateException { try { this.parseCNNIndex0( jIndex ); } catch ( Exception e ) { try { this.parseCNNIndex0( jIndex ); } catch ( Exception e1 ) { throw new IllegalStateException( "IllegalStateException: CompromisedParseCNN", e ); } } } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/Newstron/GoogleClerk.java ================================================ package com.sauron.shadow.chronicle.Newstron; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.JSONGet; import com.sauron.heist.heistron.HTTPHeist; import com.sauron.shadow.chronicle.ArchClerk; import com.sauron.shadow.chronicle.Clerk; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; import org.jsoup.select.Elements; import us.codecraft.webmagic.Page; import java.text.Normalizer; public class GoogleClerk extends ArchClerk implements Clerk { @JSONGet( "NewsHref" ) protected String mszNewsHref; @JSONGet( "TopN" ) protected int mnTopN; public GoogleClerk( HTTPHeist heist, int id, JSONObject joConfig ){ super( heist, id, joConfig, GoogleClerk.class ); } @Override public void toRavage() { JSONObject jGoogleIndex = new JSONMaptron(); this.fetchGoogleNewsIndexTopN( jGoogleIndex ); this.parentHeist().getBasicChronicleManipulator().insertOneNews( this.mszNewsDataTable, "GoogleNewsTop" + this.mnTopN + "Pages", this.nowDateTime(), StringUtils.addSlashes( jGoogleIndex.toJSONString() ) ); //Debug.trace( jGoogleIndex ); } public static String googleStringNormalize( String sz ) { if ( sz == null || sz.isEmpty() ) { return sz; } int nStemFrontAt = 0; int nStemEndAt = sz.length() - 1; while ( nStemFrontAt < sz.length() && Character.isWhitespace(sz.charAt(nStemFrontAt)) ) { nStemFrontAt++; } while ( nStemEndAt > nStemFrontAt && Character.isWhitespace(sz.charAt(nStemEndAt)) ) { nStemEndAt--; } if ( nStemFrontAt > nStemEndAt ) { return ""; } String trimmedString = sz.substring( nStemFrontAt, nStemEndAt + 1 ); return Normalizer.normalize( trimmedString, Normalizer.Form.NFC ); } protected void fetchGoogleNewsIndexTopN( JSONObject jIndex ) { for ( int i = 0; i < this.mnTopN; ++i ) { this.parseGoogleNewsIndexSinglePage( jIndex, i ); } } protected void parseGoogleNewsIndexSinglePage( JSONObject jIndex, int nPageId ) { String szHrefById = this.mszNewsHref + ( nPageId * 10 ); try { Page httpPage = this.getHTTPPage( szHrefById ); Document document = httpPage.getHtml().getDocument(); Element lpList = document.getElementById( "search" ); if ( lpList != null ) { Elements children = lpList.children(); if( children.size() == 1 ) { children = children.get(0).children(); if( children.size() == 1 || children.size() == 2 ) { if( children.size() == 2 && children.get(0).tagName().toLowerCase().equals( "h1" ) ) { children = children.get(1).children(); } else { children = children.get(0).children(); } if( children.size() == 1 ) { children = children.get(0).children(); if( children.size() == 1 ) { children = children.get(0).children(); } } } } int nNews = nPageId * 10; for ( Element lpChild : children ) { Elements aNodes = lpChild.getElementsByTag( "a" ); if( aNodes.size() == 1 ) { Element aNode = aNodes.get(0); Elements nexts = aNode.children(); if( !nexts.isEmpty() ){ nexts = nexts.get(0).children(); if( nexts.size() == 2 ) { Element contentDiv = nexts.get( 1 ); // The final content. JSONObject jNews = new JSONMaptron(); jNews.put( "id", nNews ); jNews.put( "href", aNode.attr("href") ); Elements divElements = contentDiv.children(); int nDiv = 0; for ( Element divElement : divElements ) { if ( divElement.tagName().equals("div") ) { if ( nDiv == 0 ) { jNews.put( "source", GoogleClerk.googleStringNormalize( divElement.text() ) ); } else if ( nDiv == 1 ) { jNews.put( "title", GoogleClerk.googleStringNormalize( divElement.text() ) ); } else if ( nDiv == 2 ) { jNews.put( "abstract", GoogleClerk.googleStringNormalize( divElement.text() ) ); } else if ( nDiv == 4 ) { jNews.put( "timeSpan", GoogleClerk.googleStringNormalize( divElement.text() ) ); } ++nDiv; } } jIndex.append( "data", jNews ); ++nNews; } } } } } } catch ( Exception e ) { this.tracer().warn(String.format( "[Fatality] (%s : %s : %d) ", e.getClass().getSimpleName(), e.getMessage(), nPageId) ); } } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/Newstron/ZhihuClerk.java ================================================ package com.sauron.shadow.chronicle.Newstron; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.json.JSONArray; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.JSONGet; import com.pinecone.hydra.auto.Instructation; import com.sauron.heist.heistron.HTTPHeist; import com.sauron.shadow.chronicle.ArchClerk; import com.sauron.shadow.chronicle.Clerk; public class ZhihuClerk extends ArchClerk implements Clerk { @JSONGet( "HotlineApi" ) protected String mszHotlineApi; @JSONGet( "Global.IndexFrom" ) protected int mnIndexFrom; @JSONGet( "Global.IndexTo" ) protected int mnIndexTo; @JSONGet( "Global.IndexStep" ) protected int mnIndexStep; public ZhihuClerk( HTTPHeist heist, int id, JSONObject joConfig ){ super( heist, id, joConfig, ZhihuClerk.class ); } @Override public void toRavage() { JSONObject jZhihuIndex = new JSONMaptron(); this.fetchZhihuByRange( this.mnIndexFrom, this.mnIndexTo, this.mnIndexStep, jZhihuIndex ); this.parentHeist().getBasicChronicleManipulator().insertOneNews( this.mszNewsDataTable, "ZhihuTop" + this.mnIndexStep, this.nowDateTime(), StringUtils.addSlashes( jZhihuIndex.toJSONString() ) ); //Debug.trace( jZhihuIndex ); } protected void fetchZhihuByRange( int nFrom, int nTo, int nStep, JSONObject jIndex ) { String szApi = this.mszHotlineApi; // Zhihu v4 api int nItems = nTo - nFrom; int nRound = nItems / nStep; int nMoving = 0; if ( nRound * nStep < nItems ) { ++nRound; } if ( nRound == 1 ) { szApi = this.mszHotlineApi + "&limit=" + nStep + "&offset=" + nFrom + "&period=hour"; try { String szHtml = this.getHTTPPage( szApi, false ).getRawText(); jIndex.jsonDecode( szHtml ); } catch ( Exception e ) { this.tracer().warn( String.format("[Fatality] (%s : %s) ", e.getClass().getSimpleName(), e.getMessage()) ); } } else { for ( int i = 0; i < nRound; ++i ) { JSONObject jEach = new JSONMaptron(); int nStepPace = nStep; if ( nMoving + nStep > nItems ) { nStepPace = nMoving + nStep - nItems; } szApi = this.mszHotlineApi + "&limit=" + nStepPace + "&offset=" + nMoving + "&period=hour"; try { String szHtml = this.getHTTPPage( szApi, false ).getRawText(); jEach.jsonDecode(szHtml); JSONArray data = jIndex.optJSONArray("data"); for ( int j = 0; j < data.length(); ++j ) { data.put( data.getJSONObject( j ) ); } jIndex.put("paging", jEach.getJSONObject("paging")); nMoving += nStep; } catch ( Exception e ) { this.tracer().warn( String.format("[Fatality] (%s : %s) ", e.getClass().getSimpleName(), e.getMessage()) ); } } } } @Override public Instructation getPrimeDirective() { return this.mAffinityPrimeDirective; } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/SimpleAjaxBasedClerk.java ================================================ package com.sauron.shadow.chronicle; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.json.JSONArray; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.auto.Instructation; import com.sauron.heist.heistron.HTTPHeist; import com.sauron.heist.heistron.orchestration.Instructations; public class SimpleAjaxBasedClerk extends ArchClerk implements Clerk { protected JSONArray mTasks; public SimpleAjaxBasedClerk( HTTPHeist heist, int id, JSONObject joConfig ){ super( heist, id, joConfig, SimpleAjaxBasedClerk.class ); this.mTasks = this.mConfig.optJSONArray( "Tasks" ); } @Override public void toRavage() { Instructations.infoConformed( SimpleAjaxBasedClerk.this ); for( Object o : this.mTasks ) { JSONObject jo = (JSONObject) o; String szObjectName = jo.optString( "ObjectName" ); String szApi = jo.optString( "Api" ); try{ String szNewsIndex = this.getHTTPPage( szApi ).getRawText(); JSONObject tmp = new JSONMaptron( szNewsIndex ); this.parentHeist().getBasicChronicleManipulator().insertOneNews( this.mszNewsDataTable, szObjectName, this.nowDateTime(), StringUtils.addSlashes( tmp.toJSONString() ) ); //Debug.trace( this.getHTTPPage( szApi ).getRawText() ); } catch ( Exception e ) { SimpleAjaxBasedClerk.this.tracer().warn( String.format("[Fatality] (%s::%s : %s) ", szObjectName, e.getClass().getSimpleName(), e.getMessage()) ); } } } @Override public Instructation getPrimeDirective() { return this.mAffinityPrimeDirective; } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/dao/BasicChronicleManipulator.java ================================================ package com.sauron.shadow.chronicle.dao; import com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; @IbatisDataAccessObject( scope = "MySQLKingData0" ) public interface BasicChronicleManipulator { @Insert( "INSERT INTO ${tableName} ( `object_name`, `date_time`, `news_index` ) VALUES ( '${object_name}', '${date_time}', '${news_index}' )" ) void insertOneNews( @Param( "tableName" ) String szTableName, @Param( "object_name" ) String szObjectName, @Param( "date_time" ) String szDateTime, @Param( "news_index" ) String szNewsIndex ); } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/heists/Apesk/ApeskHeist.java ================================================ package com.sauron.shadow.heists.Apesk; import com.pinecone.ulf.rdb.mysql.MySQLExecutor; import com.sauron.heist.heistron.Crew; import com.sauron.heist.heistron.HTTPIndexHeist; import com.pinecone.framework.util.config.JSONConfig; import com.sauron.heist.heistron.Heistgram; public class ApeskHeist extends HTTPIndexHeist { protected MySQLExecutor mysql; public ApeskHeist( Heistgram heistron ){ super( heistron ); this.init(); } public ApeskHeist( Heistgram heistron, JSONConfig joConfig ){ super( heistron, joConfig ); this.init(); } @Override protected void init() { super.init(); } @Override public Crew newCrew( int nCrewId ) { return new ApeskReaver( this, nCrewId ) ; } @Override public String queryHrefById ( long id ) { return this.heistURL + this.getConfig().optString( "SubHref" ) + id; } @Override public void toRavage(){ super.toRavage(); } @Override public void toStalk(){ ( new ApeskStalker( this, 0 ) ).toStalk(); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/heists/Apesk/ApeskReaver.java ================================================ package com.sauron.shadow.heists.Apesk; import com.sauron.heist.heistron.HTTPIndexHeist; import com.sauron.heist.heistron.MegaDOMIndexCrew; import com.sauron.heist.heistron.Reaver; import org.jsoup.nodes.Element; import us.codecraft.webmagic.Page; import us.codecraft.webmagic.Request; public class ApeskReaver extends MegaDOMIndexCrew implements Reaver { public ApeskReaver( HTTPIndexHeist heist, int id ){ super( heist, id ); } protected Page compressSoloMBTIArchive( Page page, Request request ) { Element typeInfo = page.getHtml().getDocument().selectFirst(".results .type-info"); Element segRow = page.getHtml().getDocument().selectFirst(".results .row"); String szRawPage = ""; if( typeInfo != null ) { szRawPage += "
    " + typeInfo.html() + "
    \n"; } else { this.logger.info("NoTypeInfo"); } if( segRow != null ) { szRawPage += "
    " + segRow.html() + "
    \n"; } else { this.logger.info("NoSegRow"); } int id = request.getExtra("id"); szRawPage += this.fetchCompressSoloMBTIArchiveExRawPage( id ); return this.parentHeist().extendPage( szRawPage, page ); } protected String fetchCompressSoloMBTIArchiveExRawPage( long id ) { String newUrl = this.heistURL + "/mbti/submit_email_date_cx_m.asp?code=223.73.241.5&user=" + id; Request request = new Request( newUrl ); request.setCharset( "gb2312" ); Page page = this.queryHTTPPageSafe( request ); Element rawInfo = page.getHtml().getDocument().selectFirst("table[align='center'][border='0']"); if( rawInfo != null ) { return "
    " + ( rawInfo.html() ) + "
    \n"; } else { this.logger.info("NoRawInfo"); } return ""; } @Override protected Page afterPageFetched( Page page, Request request ){ return this.compressSoloMBTIArchive( page, request ); } @Override public String querySpoilStoragePath( long id ) { return this.querySpoilStorageDir( id ) + "page_" + id + ".html"; } @Override public void toRavage() { this.startBatchTask(); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/heists/Apesk/ApeskStalker.java ================================================ package com.sauron.shadow.heists.Apesk; import com.pinecone.framework.util.json.JSONObject; import com.sauron.heist.heistron.*; public class ApeskStalker extends HTTPCrew implements Stalker { protected int mutualID; protected String mszQueryCookie = ""; protected JSONObject mjoConfig ; public ApeskStalker( HTTPIndexHeist heist, int id ){ super( heist, id ); this.mjoConfig = this.parentHeist().getConfig(); this.mszQueryCookie = this.mjoConfig.optString( "QueryCookie" ); } @Override protected void tryConsumeById( long index ) throws LootRecoveredException, LootAbortException, IllegalStateException { // try{ // Debug.trace( new String( this.getHTTPFile( "https://rednest.cn" ).getBytes(), "UTF8" ) ); // } // catch ( exception e ) { // // } } @Override public void toStalk() { } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/heists/ArtStation/ArtStationHeist.java ================================================ package com.sauron.shadow.heists.ArtStation; import com.pinecone.framework.util.config.JSONConfig; import com.sauron.heist.heistron.*; import com.pinecone.framework.util.json.JSONArray; import com.pinecone.framework.util.json.JSONException; import com.pinecone.ulf.rdb.mysql.MySQLExecutor; import com.pinecone.ulf.rdb.mysql.MySQLHost; import java.sql.SQLException; public class ArtStationHeist extends HTTPIndexHeist { protected MySQLExecutor mysql; public ArtStationHeist( Heistotron heistron ){ super( heistron ); this.init(); } public ArtStationHeist(Heistotron heistron, JSONConfig joConfig ){ super( heistron, joConfig ); this.init(); } @Override protected void init() { super.init(); try{ this.mysql = new MySQLExecutor( new MySQLHost( "192.168.1.177:33062/nonaron", "root", "root" )); } catch ( SQLException e ) { this.handleKillException( e ); } } @Override public Crew newCrew( int nCrewId ) { return new ArtStationReaver( this, nCrewId ) ; } @Override public String queryHrefById ( long id ) { try { JSONArray ja = this.mysql.fetch( "SELECT href FROM nona_pubchem_sitemap_idx WHERE mutual_id =" + id ); return ja.getJSONObject( 0 ).getString( "href" ); } catch ( SQLException | JSONException e ) { this.handleAliveException( e ); } return ""; } @Override public void toRavage(){ super.toRavage(); } @Override public void toStalk(){ ( new ArtStationStalker( this, 0 ) ).toStalk(); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/heists/ArtStation/ArtStationReaver.java ================================================ package com.sauron.shadow.heists.ArtStation; import com.sauron.heist.heistron.*; import org.jsoup.nodes.Document; import us.codecraft.webmagic.Page; import us.codecraft.webmagic.Request; public class ArtStationReaver extends MegaDOMIndexCrew implements Reaver { public ArtStationReaver( HTTPIndexHeist heist, int id ){ super( heist, id ); } @Override protected Page afterPageFetched( Page page, Request request ){ Document document = page.getHtml().getDocument(); String id = document.select( "meta[name='ncbi_pubchem_cid']" ).attr( "content" ); String seg = document.select( "meta[name='pubchem_uid_name']" ).attr( "content" ); String newUrl = this.heistURL + "/rest/pug_view/data/"+ seg +"/" + id + "/JSON/"; return this.queryHTTPPageSafe( ( new Request( newUrl ) ).putExtra( "id", id ) ); } @Override public String querySpoilStoragePath( long id ) { return this.querySpoilStorageDir( id ) + "page_" + id + ".json"; } @Override public void toRavage() { this.startBatchTask(); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/heists/ArtStation/ArtStationStalker.java ================================================ package com.sauron.shadow.heists.ArtStation; import com.pinecone.framework.util.json.JSONMaptron; import com.sauron.heist.heistron.*; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JSONObject; import org.jsoup.select.Elements; import us.codecraft.webmagic.Page; import us.codecraft.webmagic.Request; import java.io.File; import java.util.Map; public class ArtStationStalker extends HTTPCrew implements Stalker { protected int mutualID; protected String mszQueryCookie = ""; public ArtStationStalker( HTTPIndexHeist heist, int id ){ super( heist, id ); this.mszQueryCookie = this.parentHeist().getConfig().optString( "QueryCookie" ); } @Override protected void tryConsumeById( long id ) throws LootRecoveredException, LootAbortException, IllegalStateException { // try{ // Debug.trace( new String( this.getHTTPFile( "https://rednest.cn" ).getBytes(), "UTF8" ) ); // } // catch ( exception e ) { // // } } protected void stalk_inlet_index() { JSONObject joSiteMaps = this.parentHeist().getConfig().optJSONObject( "SiteMaps" ); this.mutualID = 1; for( Object ok : joSiteMaps.entrySet() ) { Map.Entry k = (Map.Entry) ok; this.stalk_sub_site_map( k.getKey().toString(), (JSONObject) k.getValue() ); } //this.stalk_sub_site_map( "annotation", joSiteMaps.optJSONObject("annotation") ); } protected void stalk_sub_site_map( String szSeg, JSONObject jo ) { String szIndexPath = this.parentHeist().getIndexPath(); File fSegFileDir = new File( szIndexPath ); fSegFileDir.mkdir(); String szSegFile = szIndexPath + "/" + szSeg + ".xml"; Page cachePage; String href = jo.optString( "href" ); cachePage = this.queryHTTPPage( new Request(href).addHeader( "Cookie", this.mszQueryCookie ), szSegFile ); Elements elements = cachePage.getHtml().getDocument().select( "loc" ); JSONObject joIndexList = new JSONMaptron(); for ( int i = 0; i < elements.size(); i++ ) { String szItemHref = elements.get(i).text(); String[] debris = szItemHref.split( this.heistURL + "/" ); String szItemFN = debris[1]; String szSegment = "artists"; if( szItemFN.contains( "artists" ) ) { szSegment = "artists"; } else if( szItemFN.contains( "artworks" ) ) { szSegment = "artworks"; } else { continue; } joIndexList.affirmArray( szSegment ).put( szItemFN ); String szLocalPath = szIndexPath + szItemFN; cachePage = this.queryHTTPPage( new Request(szItemHref).addHeader( "Cookie", this.mszQueryCookie ), szLocalPath ); if( cachePage.getStatusCode() != 200 ) { this.logger.error( "", i, szItemHref, cachePage.getStatusCode() ); } else { this.logger.info( "", i, szItemHref, cachePage.getBytes().length ); } } Debug.trace( joIndexList.size() ); } protected void profileSiteMap() { this.stalk_inlet_index(); } @Override public void toStalk() { this.profileSiteMap(); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/heists/DeviantArt/DeviantArtHeist.java ================================================ package com.sauron.shadow.heists.DeviantArt; import com.pinecone.framework.util.config.JSONConfig; import com.sauron.heist.heistron.*; import com.pinecone.framework.util.json.JSONArray; import com.pinecone.framework.util.json.JSONException; import com.pinecone.ulf.rdb.mysql.MySQLExecutor; import com.pinecone.ulf.rdb.mysql.MySQLHost; import java.sql.SQLException; public class DeviantArtHeist extends HTTPIndexHeist { protected MySQLExecutor mysql; public DeviantArtHeist( Heistotron heistron ){ super( heistron ); this.init(); } public DeviantArtHeist(Heistotron heistron, JSONConfig joConfig ){ super( heistron, joConfig ); this.init(); } @Override protected void init() { super.init(); try{ this.mysql = new MySQLExecutor( new MySQLHost( "192.168.1.177:33062/nonaron", "root", "root" )); } catch ( SQLException e ) { this.handleKillException( e ); } } @Override public Crew newCrew( int nCrewId ) { return new DeviantArtReaver( this, nCrewId ); } @Override public String queryHrefById ( long id ) { try { JSONArray ja = this.mysql.fetch( "SELECT href FROM nona_pubchem_sitemap_idx WHERE mutual_id =" + id ); return ja.getJSONObject( 0 ).getString( "href" ); } catch ( SQLException | JSONException e ) { this.handleAliveException( e ); } return ""; } @Override public void toRavage(){ super.toRavage(); } @Override public void toStalk(){ ( new DeviantArtStalker( this, 0 ) ).toStalk(); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/heists/DeviantArt/DeviantArtReaver.java ================================================ package com.sauron.shadow.heists.DeviantArt; import com.sauron.heist.heistron.*; import org.jsoup.nodes.Document; import us.codecraft.webmagic.Page; import us.codecraft.webmagic.Request; public class DeviantArtReaver extends MegaDOMIndexCrew implements Reaver { public DeviantArtReaver( HTTPIndexHeist heist, int id ){ super( heist, id ); } @Override protected Page afterPageFetched( Page page, Request request ){ Document document = page.getHtml().getDocument(); String id = document.select( "meta[name='ncbi_pubchem_cid']" ).attr( "content" ); String seg = document.select( "meta[name='pubchem_uid_name']" ).attr( "content" ); String newUrl = this.heistURL + "/rest/pug_view/data/"+ seg +"/" + id + "/JSON/"; return this.queryHTTPPageSafe( ( new Request( newUrl ) ).putExtra( "id", id ) ); } @Override public String querySpoilStoragePath( long id ) { return this.querySpoilStorageDir( id ) + "page_" + id + ".json"; } @Override public void toRavage() { this.startBatchTask(); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/heists/DeviantArt/DeviantArtStalker.java ================================================ package com.sauron.shadow.heists.DeviantArt; import com.sauron.heist.heistron.*; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.io.FileUtils; import com.pinecone.framework.util.json.JSONObject; import org.jsoup.nodes.Element; import org.jsoup.select.Elements; import us.codecraft.webmagic.Page; import us.codecraft.webmagic.Request; import java.io.IOException; public class DeviantArtStalker extends HTTPCrew implements Stalker { protected int mutualID; protected String mszQueryCookie = ""; protected JSONObject mjoConfig ; public DeviantArtStalker( HTTPIndexHeist heist, int id ){ super( heist, id ); this.mjoConfig = this.parentHeist().getConfig(); this.mszQueryCookie = this.mjoConfig.optString( "QueryCookie" ); } @Override protected void tryConsumeById( long id ) throws LootRecoveredException, LootAbortException, IllegalStateException { // try{ // Debug.trace( new String( this.getHTTPFile( "https://rednest.cn" ).getBytes(), "UTF8" ) ); // } // catch ( exception e ) { // // } } protected void stalk_inlet_index() { String szIndexPath = this.parentHeist().getIndexPath(); String szGZIndexPath = szIndexPath + "/RawGZ/"; String szInletSMLocal = szIndexPath + this.mjoConfig.optString( "InletSitemap" ); try{ Page page = this.parentHeist().extendPage( FileUtils.readAll( szInletSMLocal ), new Request("") ); Elements elements = page.getHtml().getDocument().select( "loc" ); String szMajorHref = this.heistURL + "/sitemaps/"; for ( int i = 0; i < elements.size(); i++ ) { Element loc = elements.get(i); String szLocHref = loc.text(); String[] debris = szLocHref.split( szMajorHref ); String szGZFN = debris[1]; String szGZLocalPath = szGZIndexPath + szGZFN; this.queryHTTPPage( new Request( szLocHref ), szGZLocalPath ); Debug.trace( szGZLocalPath, i ); } } catch ( IOException e ) { this.handleKillException( e ); } } protected void profileSiteMap() { this.stalk_inlet_index(); } @Override public void toStalk() { this.profileSiteMap(); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/heists/DownloadCNet/DownloadCNetHeist.java ================================================ package com.sauron.shadow.heists.DownloadCNet; import com.pinecone.framework.util.config.JSONConfig; import com.sauron.heist.heistron.Crew; import com.sauron.heist.heistron.HTTPIndexHeist; import com.sauron.heist.heistron.Heistotron; import com.pinecone.framework.util.json.JSONArray; import com.pinecone.framework.util.json.JSONException; import com.pinecone.ulf.rdb.mysql.MySQLExecutor; import com.pinecone.ulf.rdb.mysql.MySQLHost; import java.sql.SQLException; public class DownloadCNetHeist extends HTTPIndexHeist { protected MySQLExecutor mysql; public DownloadCNetHeist(Heistotron heistron ){ super( heistron ); this.init(); } public DownloadCNetHeist(Heistotron heistron, JSONConfig joConfig ){ super( heistron, joConfig ); this.init(); } @Override protected void init() { super.init(); try{ this.mysql = new MySQLExecutor( new MySQLHost( "node1.nutgit.com:13393/nonaron", "root", "root" )); } catch ( SQLException e ) { this.handleKillException( e ); } } @Override public Crew newCrew( int nCrewId ) { return new DownloadCNetReaver( this, nCrewId ); } @Override public String queryHrefById ( long id ) { try { JSONArray ja = this.mysql.fetch( "SELECT href FROM nona_download_cnet_idx WHERE mutual_id =" + id ); return ja.getJSONObject( 0 ).getString( "href" ); } catch ( SQLException | JSONException e ) { this.handleAliveException( e ); } return ""; } @Override public void toRavage(){ super.toRavage(); } @Override public void toStalk(){ ( new DownloadCNetStalker( this, 0 ) ).toStalk(); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/heists/DownloadCNet/DownloadCNetReaver.java ================================================ package com.sauron.shadow.heists.DownloadCNet; import com.sauron.heist.heistron.HTTPIndexHeist; import com.sauron.heist.heistron.MegaDOMIndexCrew; import com.sauron.heist.heistron.Reaver; import org.jsoup.nodes.Element; import us.codecraft.webmagic.Page; import us.codecraft.webmagic.Request; public class DownloadCNetReaver extends MegaDOMIndexCrew implements Reaver { public DownloadCNetReaver(HTTPIndexHeist heist, int id ){ super( heist, id ); } protected Page compressSoloArchive( Page page, Request request ) { Element infoJSON = page.getHtml().getDocument().selectFirst("script[data-hid='ld+json']"); Element mainPage = page.getHtml().getDocument().selectFirst(".c-layoutDefault_page .c-scrollPercent"); Element megaJSON = page.getHtml().getDocument().selectFirst("body script:nth-child(2)"); String szRawPage = ""; if( infoJSON != null ) { szRawPage += "\n"; } else { this.logger.info("NoFirstJSON"); } if( mainPage != null ) { szRawPage += "
    " + mainPage.html() + "
    \n"; } else { this.logger.info("NoMainPage"); } if( megaJSON != null ) { szRawPage += "\n"; } else { this.logger.info("NoMegaJSON"); } return this.parentHeist().extendPage( szRawPage, page ); } @Override protected Page afterPageFetched( Page page, Request request ){ return this.compressSoloArchive( page, request ); } @Override public String querySpoilStoragePath( long id ) { return this.querySpoilStorageDir( id ) + "page_" + id + ".html"; } @Override public void toRavage() { this.startBatchTask(); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/heists/DownloadCNet/DownloadCNetStalker.java ================================================ package com.sauron.shadow.heists.DownloadCNet; import com.pinecone.framework.util.json.JSONMaptron; import com.sauron.heist.heistron.*; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.io.FileUtils; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.rdb.MappedSQLSplicer; import org.jsoup.select.Elements; import us.codecraft.webmagic.Page; import us.codecraft.webmagic.Request; import java.io.File; import java.io.FileWriter; import java.io.IOException; public class DownloadCNetStalker extends HTTPCrew implements Stalker { protected int mutualID; public DownloadCNetStalker(HTTPIndexHeist heist, int id ){ super( heist, id ); } @Override protected void tryConsumeById( long id ) throws LootRecoveredException, LootAbortException, IllegalStateException {} protected void stalk_inlet_index() { JSONObject joSiteMaps = this.parentHeist().getConfig().optJSONObject( "SiteMaps" ); this.mutualID = 1; this.stalk_sub_site_map( "products", joSiteMaps.optJSONObject("products") ); } protected void stalk_sub_site_map( String szSeg, JSONObject jo ) { String szSegFileDir = this.parentHeist().getIndexPath() + szSeg; File fSegFileDir = new File( szSegFileDir ); boolean mkdir = fSegFileDir.mkdir(); String szSegFile = szSegFileDir + "\\" + szSeg + "_main.xml"; Page cachePage; File fSegFile = new File( szSegFile ); try { String href = jo.optString( "href" ); if( !fSegFile.exists() ) { cachePage = this.getHTTPPage( href ); FileWriter fw = new FileWriter( fSegFile ); fw.write( cachePage.getRawText() ); fw.close(); } else { String cache = FileUtils.readAll( szSegFile ); cachePage = this.parentHeist().extendPage( cache, new Request( href ) ); } Elements elements = cachePage.getHtml().getDocument().select( "loc" ); File fSQLIndex = new File( szSegFileDir + "/" + szSeg + ".sql" ); FileWriter fSQL = new FileWriter( fSQLIndex ); MappedSQLSplicer sqlSplicer = new MappedSQLSplicer(); int topicId = 1; for ( int i = 0; i < elements.size(); i++ ) { String szFN = String.format( "%s/%s_%d.xml", szSegFileDir, szSeg, i ); cachePage = this.getHTTPPage( elements.get(i).text(), szFN ); Elements subEles = cachePage.getHtml().getDocument().select( "loc" ); StringBuilder sqlBuf = new StringBuilder(); for ( int j = 0; j < subEles.size(); j++ ) { String szHref = subEles.get(j).text(); if( szHref.length() > 333 ) { continue; } JSONObject thisSQLMap = new JSONMaptron(); thisSQLMap.put( "heist", this.crewName() ); thisSQLMap.put( "href", szHref ); thisSQLMap.put( "mutual_id", this.mutualID ); thisSQLMap.put( "topic", szSeg ); thisSQLMap.put( "topic_id", topicId ); sqlBuf.append( sqlSplicer.spliceInsertSQL( "nona_download_cnet_idx", thisSQLMap.getMap(), false ) ); sqlBuf.append( ";\n" ); ++topicId; ++this.mutualID; } fSQL.write( sqlBuf.toString() ); Debug.trace( i ); } fSQL.close(); } catch ( IOException e ){ e.printStackTrace(); } } protected void profileSiteMap() { this.stalk_inlet_index(); } @Override public void toStalk() { this.profileSiteMap(); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/heists/PubChem/PubChemHeist.java ================================================ package com.sauron.shadow.heists.PubChem; import com.pinecone.framework.util.config.JSONConfig; import com.sauron.heist.heistron.*; import com.pinecone.framework.util.json.JSONArray; import com.pinecone.framework.util.json.JSONException; import com.pinecone.ulf.rdb.mysql.MySQLExecutor; import com.pinecone.ulf.rdb.mysql.MySQLHost; import java.sql.SQLException; public class PubChemHeist extends HTTPIndexHeist { protected MySQLExecutor mysql; public PubChemHeist( Heistotron heistron ){ super( heistron ); this.init(); } public PubChemHeist(Heistotron heistron, JSONConfig joConfig ){ super( heistron, joConfig ); this.init(); } @Override protected void init() { super.init(); try{ this.mysql = new MySQLExecutor( new MySQLHost( "b-serverkingpin:33062/nonaron", "root", "root" )); } catch ( SQLException e ) { this.handleKillException( e ); } } @Override public Crew newCrew( int nCrewId ) { return new PubChemReaver( this, nCrewId ) ; } @Override public String queryHrefById( long id ) { try { JSONArray ja = this.mysql.fetch( "SELECT href FROM nona_pubchem_sitemap_idx WHERE mutual_id =" + id ); return ja.getJSONObject( 0 ).getString( "href" ); } catch ( SQLException | JSONException e ) { this.handleAliveException( e ); } return ""; } @Override public void toRavage(){ super.toRavage(); } @Override public void toStalk(){ ( new PubChemStalker( this, 0 ) ).toStalk(); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/heists/PubChem/PubChemReaver.java ================================================ package com.sauron.shadow.heists.PubChem; import com.sauron.heist.heistron.*; import org.jsoup.nodes.Document; import us.codecraft.webmagic.Page; import us.codecraft.webmagic.Request; public class PubChemReaver extends MegaDOMIndexCrew implements Reaver { public PubChemReaver ( HTTPIndexHeist heist, int id ){ super( heist, id ); } @Override protected Page afterPageFetched( Page page, Request request ){ Document document = page.getHtml().getDocument(); String id = document.select( "meta[name='ncbi_pubchem_cid']" ).attr( "content" ); String seg = document.select( "meta[name='pubchem_uid_name']" ).attr( "content" ); String newUrl = this.heistURL + "/rest/pug_view/data/"+ seg +"/" + id + "/JSON/"; return this.queryHTTPPageSafe( ( new Request( newUrl ) ).putExtra( "id", id ) ); } @Override public String querySpoilStoragePath( long id ) { return this.querySpoilStorageDir( id ) + "page_" + id + ".json"; } @Override public void toRavage() { this.startBatchTask(); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/heists/PubChem/PubChemStalker.java ================================================ package com.sauron.shadow.heists.PubChem; import com.pinecone.framework.util.json.JSONMaptron; import com.sauron.heist.heistron.*; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.StringUtils; import com.pinecone.framework.util.io.FileUtils; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.rdb.MappedSQLSplicer; import org.jsoup.select.Elements; import us.codecraft.webmagic.Page; import us.codecraft.webmagic.Request; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.Map; public class PubChemStalker extends HTTPCrew implements Stalker { protected int mutualID; public PubChemStalker ( HTTPIndexHeist heist, int id ){ super( heist, id ); } @Override protected void tryConsumeById( long id ) throws LootRecoveredException, LootAbortException, IllegalStateException { // try{ // Debug.trace( new String( this.getHTTPFile( "https://rednest.cn" ).getBytes(), "UTF8" ) ); // } // catch ( exception e ) { // // } } protected void stalk_inlet_index() { JSONObject joSiteMaps = this.parentHeist().getConfig().optJSONObject( "SiteMaps" ); this.mutualID = 1; for( Object ok : joSiteMaps.entrySet() ) { Map.Entry k = (Map.Entry) ok; this.stalk_sub_site_map( k.getKey().toString(), (JSONObject) k.getValue() ); } //this.stalk_sub_site_map( "annotation", joSiteMaps.optJSONObject("annotation") ); } protected void stalk_sub_site_map( String szSeg, JSONObject jo ) { String szSegFileDir = this.parentHeist().getIndexPath() + szSeg; File fSegFileDir = new File( szSegFileDir ); fSegFileDir.mkdir(); String szSegFile = szSegFileDir + "/" + szSeg + "_main.xml"; Page cachePage; File fSegFile = new File( szSegFile ); try { String href = jo.optString( "href" ); if( !fSegFile.exists() ) { cachePage = this.getHTTPPage( href ); FileWriter fw = new FileWriter( fSegFile ); fw.write( cachePage.getRawText() ); fw.close(); } else { String cache = FileUtils.readAll( szSegFile ); cachePage = this.parentHeist().extendPage( cache, new Request( href ) ); } Elements elements = cachePage.getHtml().getDocument().select( "loc" ); File fSQLIndex = new File( szSegFileDir + "/" + szSeg + ".sql" ); FileWriter fSQL = new FileWriter( fSQLIndex ); MappedSQLSplicer sqlSplicer = new MappedSQLSplicer(); int topicId = 1; for ( int i = 0; i < elements.size(); i++ ) { String szFN = String.format( "%s/%s_%d.xml", szSegFileDir, szSeg, i ); cachePage = this.getHTTPPage( elements.get(i).text(), szFN ); Elements subEles = cachePage.getHtml().getDocument().select( "loc" ); StringBuilder sqlBuf = new StringBuilder(); for ( int j = 0; j < subEles.size(); j++ ) { JSONObject thisSQLMap = new JSONMaptron(); thisSQLMap.put( "heist", this.crewName() ); thisSQLMap.put( "href", StringUtils.addSlashes( subEles.get(j).text() ) ); thisSQLMap.put( "mutual_id", this.mutualID ); thisSQLMap.put( "topic", szSeg ); thisSQLMap.put( "topic_id", topicId ); sqlBuf.append( sqlSplicer.spliceInsertSQL( "nona_pubchem_sitemap_idx", thisSQLMap.getMap(), false ) ); sqlBuf.append( ";\n" ); ++topicId; ++this.mutualID; } fSQL.write( sqlBuf.toString() ); Debug.trace( i ); } fSQL.close(); } catch ( IOException e ){ e.printStackTrace(); } } protected void profileSiteMap() { this.stalk_inlet_index(); } @Override public void toStalk() { this.profileSiteMap(); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/heists/Steam/SteamHeist.java ================================================ package com.sauron.shadow.heists.Steam; import com.pinecone.framework.util.config.JSONConfig; import com.sauron.heist.heistron.*; import com.pinecone.framework.util.json.JSONArray; import com.pinecone.framework.util.json.JSONException; import com.pinecone.ulf.rdb.mysql.MySQLExecutor; import com.pinecone.ulf.rdb.mysql.MySQLHost; import java.sql.SQLException; public class SteamHeist extends HTTPIndexHeist { protected MySQLExecutor mysql; public SteamHeist( Heistotron heistron ){ super( heistron ); this.init(); } public SteamHeist(Heistotron heistron, JSONConfig joConfig ){ super( heistron, joConfig ); this.init(); } @Override protected void init() { super.init(); try{ this.mysql = new MySQLExecutor( new MySQLHost( "node1.nutgit.com:13393/nonaron", "root", "root" )); } catch ( SQLException e ) { this.handleKillException( e ); } } @Override public Crew newCrew( int nCrewId ) { return new SteamReaver( this, nCrewId ); } protected String queryInletHref( long id ) { return this.heistURL + "/search/?ndl=1&ignore_preferences=1&page=" + id; } @Override public String queryHrefById( long id ) { if( this.getInstanceName().equals( "FetchInletList" ) ) { return this.queryInletHref( id ); } else { try { JSONArray ja = this.mysql.fetch( "SELECT href FROM nona_steam_game_idx WHERE mutual_id =" + id ); return ja.getJSONObject( 0 ).getString( "href" ); } catch ( SQLException | JSONException e ) { this.handleAliveException( e ); } } return ""; } @Override public void toRavage(){ super.toRavage(); } @Override public void toStalk(){ //( new SteamStalker( this ) ).toStalk(); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/heists/Steam/SteamReaver.java ================================================ package com.sauron.shadow.heists.Steam; import com.sauron.heist.heistron.*; import org.jsoup.select.Elements; import us.codecraft.webmagic.Page; import us.codecraft.webmagic.Request; public class SteamReaver extends MegaDOMIndexCrew implements Reaver { public SteamReaver(HTTPIndexHeist heist, int id ){ super( heist, id ); } @Override protected Page afterPageFetched( Page page, Request request ){ String[] cookieGroup = { "wants_mature_content=1; steamCountry=US%7C6200f47c9b62892472b38bf7bbfd9a20; browserid=3066434021590310219; sessionid=d3260d1c80b4ff080f3c3641; timezoneOffset=28800,0; _ga=GA1.2.650079156.1685022988; _gid=GA1.2.697747378.1685022988; ak_bmsc=2CA539E4F2635E8FB79CF2F744843296~000000000000000000000000000000~YAAQXY0duFuJ2y+IAQAABpQyUxPg33L/2piDSim4d/G5+YQl4fuLirFFGlXtPSRxbh3xoU0Ohnb8FyHO30d/nuLiVKiOV2X6drabWJZ1UjnodRMJLWqKYWPNZjaKf1ZLQGkHflTxg5qaAAz+dS389vPGWWM53jZvD8ZZbYsOucK3oWJoRL+I7nJhwS6k0+JZVEckl3Al3V7gvx4shiDTHmTZ/z8+dTnpFpf/fWRCWVFWRWExB/VWDNDFJInXrVTEIwcBQ4wSWRFkJfW4d/S5JQl2QSDHyqaHpgW1va2vAICYX/GKFR/lxrgXVm2LBLT6UqFv1BSx/UzJfsZZm2mCxktV2FKaASRYt3pUcfGVBXlNrZ2LCtj72mNrJ78FKD/50SWSNS72; steamLoginSecure=76561199447520905%7C%7CeyAidHlwIjogIkpXVCIsICJhbGciOiAiRWREU0EiIH0.eyAiaXNzIjogInI6MEQzNV8yMjk2RTFFM19BQ0JDMSIsICJzdWIiOiAiNzY1NjExOTk0NDc1MjA5MDUiLCAiYXVkIjogWyAid2ViIiBdLCAiZXhwIjogMTY4NTExMDc5MSwgIm5iZiI6IDE2NzYzODMwMDMsICJpYXQiOiAxNjg1MDIzMDAzLCAianRpIjogIjBEMzFfMjI5NkUxREZfQUQ3MDQiLCAib2F0IjogMTY4NTAyMzAwMiwgInJ0X2V4cCI6IDE3MDI4OTc1NjIsICJwZXIiOiAwLCAiaXBfc3ViamVjdCI6ICI3NC4xMjEuMTg4LjIyMSIsICJpcF9jb25maXJtZXIiOiAiNzQuMTIxLjE4OC4yMjEiIH0.hzq-8liTaMgNVPoLOzeFmmjRIiSgjMwhsFYlBrFEC37Q3QSQ6sC1xbSYY3tLlh9DL5VUDfF05bA59M03sx_8Bg; recentapps=%7B%22981160%22%3A1685023036%7D; birthtime=28828801; lastagecheckage=1-0-1964", }; Elements age = page.getHtml().getDocument().select("#app_agegate"); Elements login = page.getHtml().getDocument().select("#error_box"); if( age.size()==0&&login.size()==0 ) { return page; } else{ for( int i = 0; i < cookieGroup.length; ++i ){ Page retryPage = this.queryHTTPPageSafe( new Request(page.getRequest().getUrl()).addHeader("cookie",cookieGroup[i]) ); if( retryPage.getHtml().getDocument().select("#app_agegate").size() == 0 ){ return retryPage; } } return page; } } @Override public String querySpoilStoragePath( long id ) { return this.querySpoilStorageDir( id ) + "page_" + id + ".html"; } @Override public void toRavage() { this.startBatchTask(); } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/heists/Void/VoidHeist.java ================================================ package com.sauron.shadow.heists.Void; import com.pinecone.framework.system.NonNull; import com.pinecone.framework.system.Nullable; import com.sauron.heist.heistron.CascadeHeist; import com.sauron.heist.heistron.Crew; import com.sauron.heist.heistron.HTTPIndexHeist; import com.sauron.heist.heistron.Heistgram; import com.pinecone.framework.util.config.JSONConfig; //@Heistlet( "Void" ) public class VoidHeist extends HTTPIndexHeist { public VoidHeist( Heistgram heistron ){ super( heistron ); } public VoidHeist( Heistgram heistron, JSONConfig joConfig ){ super( heistron, joConfig ); } public VoidHeist( Heistgram heistron, @Nullable CascadeHeist parent, @NonNull String szChildName ) { super( heistron, parent, szChildName ); } @Override public Crew newCrew( int nCrewId ) { VoidReaver reaver = new VoidReaver( this, nCrewId ); //this.heistPool.submit( reaver ); return reaver; } @Override public void toRavage(){ super.toRavage(); } @Override public void toStalk(){ } } ================================================ FILE: Saurons/Shadow/src/main/java/com/sauron/shadow/heists/Void/VoidReaver.java ================================================ package com.sauron.shadow.heists.Void; import com.sauron.heist.heistron.*; import java.io.IOException; public class VoidReaver extends MegaDOMIndexCrew implements Reaver { public VoidReaver( HTTPIndexHeist heist, int id ){ super( heist, id ); } @Override protected void tryConsumeById( long id ) throws LootRecoveredException, LootAbortException, IllegalStateException, IOException { //Page retryPage = this.queryHTTPPageSafe(new Request("https://www.artstation.com/sitemap.xml")); //Debug.trace( retryPage.getRawText() ); //this.terminate(); } @Override public void toRavage() { this.startBatchTask(); } } ================================================ FILE: Saurons/heist-framework-architecture/pom.xml ================================================ saurons com.saurons 1.2.7 4.0.0 org.apache.maven.plugins maven-compiler-plugin 11 11 com.sauron.heist heist-framework-architecture 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 com.pinecone.tritium hydra-system-tritium 2.1.0 com.pinecone.ulf ulfhedinn 1.2.1 com.sauron.core sauron-core 1.2.7 compile org.jsoup jsoup 1.15.4 us.codecraft webmagic-core 0.8.0 us.codecraft webmagic-extension 0.8.0 ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/CascadeHeist.java ================================================ package com.sauron.heist.heistron; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.name.Namespace; import com.pinecone.framework.util.name.UniNamespace; import com.sauron.heist.heistron.orchestration.Hierarchy; public interface CascadeHeist extends Heistum { String HeistNSSeparator = "::"; CascadeHeist parent(); default boolean isRoot() { return this.parent() == null; } default CascadeHeist root() { CascadeHeist p = this; CascadeHeist c = p; while ( p != null ) { c = p; p = p.parent(); } return c; } Hierarchy getHierarchy(); default boolean isMaster() { return this.getHierarchy() == Hierarchy.Master; } default boolean isSlave() { return this.getHierarchy() == Hierarchy.Slave; } Namespace getHeistNamespace(); default String getInstanceFullName() { return this.getHeistNamespace().getFullName(); } default String getInstanceName() { return this.getHeistNamespace().getName(); } static Namespace newNamespace( String szSegmentName, @Nullable CascadeHeist parent ) { Namespace p = null; if( parent != null ) { p = parent.getHeistNamespace(); } return new UniNamespace( szSegmentName, p, CascadeHeist.HeistNSSeparator ); } } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/ConfigNotFoundException.java ================================================ package com.sauron.heist.heistron; import com.pinecone.framework.system.PineRuntimeException; public class ConfigNotFoundException extends PineRuntimeException { public ConfigNotFoundException() { super(); } public ConfigNotFoundException( String message ) { super( message ); } public ConfigNotFoundException( String message, Throwable cause ) { super( message, cause ); } public ConfigNotFoundException( Throwable cause ) { super(cause); } } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/Crew.java ================================================ package com.sauron.heist.heistron; import com.pinecone.tritium.system.MissionTerminateException; import com.pinecone.tritium.system.TritiumSystem; import com.pinecone.tritium.system.StorageSystem; import org.slf4j.Logger; public interface Crew extends Crewnium { String crewName(); Heistum parentHeist(); void validateSpoil( String sz ); void isTimeToFeast(); default void terminate(){ throw new MissionTerminateException(); } void startBatchTask(); @Override TritiumSystem parentSystem(); StorageSystem getStorageSystem(); Logger tracer(); } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/Crewnium.java ================================================ package com.sauron.heist.heistron; import com.sauron.system.Saunut; import com.pinecone.framework.system.executum.Executum; public interface Crewnium extends Runnable, Executum, Saunut { } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/Embezzler.java ================================================ package com.sauron.heist.heistron; /** * Bean Nuts Hazelnut Sauron Tritium For Java, Embezzler [洗钱者] * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Focus on batch crawler data processing * 面向批处理爬虫数据处理 * ***************************************************************************************** * Dragon King, the undefined */ public interface Embezzler extends Crew { void toEmbezzle(); @Override default void isTimeToFeast(){ this.toEmbezzle(); } } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/HeistConfigConstants.java ================================================ package com.sauron.heist.heistron; public final class HeistConfigConstants { public static final String KeyConfigScope = "ConfigScope"; public static final String KeyLocalConfigs = "LocalConfigs"; public static final String KeyTemplatedConfig = "TemplatedConfig"; public static final String KeyHeistsTable = "Heists"; public static final String KeyComponents = "Components"; public static final String KeyHttpBrowser = "HttpBrowser"; public final class Heistum { public static final String KeyHeistURL = "HeistURL"; } } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/HeistException.java ================================================ package com.sauron.heist.heistron; import com.pinecone.framework.system.prototype.Pinenut; public class HeistException extends Exception implements Pinenut { public HeistException () { super(); } public HeistException ( String message ) { super(message); } public HeistException ( String message, Throwable cause ) { super(message, cause); } public HeistException ( Throwable cause ) { super(cause); } } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/HeistExecutionException.java ================================================ package com.sauron.heist.heistron; public class HeistExecutionException extends HeistException { public HeistExecutionException () { super(); } public HeistExecutionException ( String message ) { super(message); } public HeistExecutionException ( String message, Throwable cause ) { super(message, cause); } public HeistExecutionException ( Throwable cause ) { super(cause); } } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/HeistOrchestrateException.java ================================================ package com.sauron.heist.heistron; public class HeistOrchestrateException extends HeistException { public HeistOrchestrateException () { super(); } public HeistOrchestrateException ( String message ) { super(message); } public HeistOrchestrateException ( String message, Throwable cause ) { super(message, cause); } public HeistOrchestrateException ( Throwable cause ) { super(cause); } } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/HeistScheme.java ================================================ package com.sauron.heist.heistron; import java.util.Map; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.unit.MultiScopeMap; import com.pinecone.framework.util.config.JSONConfig; public interface HeistScheme extends Pinenut { JSONConfig getInstanceConfigByName( String name ); /** * getInstanceConfigByName * @param name ( Child instance name, which will extents the parent scope, and get its instance config of this child. ) * ( The `null` is the current scope, [this] ) * @param bRecursive ( Override all object and list, if that key which its child doesnt`t had. ) * @return Instance Config */ JSONConfig getInstanceConfigByName( @Nullable String name, boolean bRecursive ); void overrideSegment ( Map parentProto, Map instance ); HeistScheme reinterpret( JSONConfig that ); MultiScopeMap getHeistScope(); JSONConfig getProtoConfig(); Heistgram getHeistgram(); Heistum getParentHeist(); JSONConfig getTemplateHeistSchemeConfig(); } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/HeistStatusTerminatedException.java ================================================ package com.sauron.heist.heistron; import com.pinecone.framework.system.prototype.Pinenut; public class HeistStatusTerminatedException extends IllegalStateException implements Pinenut { public HeistStatusTerminatedException () { super(); } public HeistStatusTerminatedException ( String message ) { super(message); } public HeistStatusTerminatedException ( String message, Throwable cause ) { super(message, cause); } public HeistStatusTerminatedException ( Throwable cause ) { super(cause); } } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/Heistgram.java ================================================ package com.sauron.heist.heistron; import com.pinecone.hydra.servgram.Servgramium; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.task.TaskInstanceStatus; import com.sauron.heist.heistron.event.HeistLifecycleEventInterceptor; import com.sauron.heist.heistron.orchestration.HeistletOrchestrator; import com.pinecone.framework.system.executum.ExclusiveProcessum; import com.pinecone.framework.util.config.JSONConfig; import com.sauron.heist.heistron.orchestration.Hierarchy; public interface Heistgram extends ExclusiveProcessum, Servgramium { Hydrogen parentSystem(); JSONConfig getTemplateHeistSchemeConfig(); JSONConfig getLocalHeistsConfigList(); HeistletOrchestrator getHeistletOrchestrator(); JSONConfig queryHeistConfig ( String szHeistName ); JSONConfig getComponentsConfig(); String searchHeistName( Heistum that ) ; Heistgram addLifecycleEventInterceptors( HeistLifecycleEventInterceptor interceptor ); Heistgram removeLifecycleEventInterceptors( HeistLifecycleEventInterceptor interceptor ); void notifyLifecycleEvent(Heistum heist, TaskInstanceStatus instanceStatus, Hierarchy hierarchy ); } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/Heistium.java ================================================ package com.sauron.heist.heistron; import com.sauron.heist.heistron.orchestration.Taskium; import com.sauron.heist.heistron.scheduler.TaskProducer; import java.util.concurrent.atomic.AtomicBoolean; public interface Heistium extends Taskium { Heistum getParentHeist(); TaskProducer getTaskProducer(); void joinStartMultiTasks(); void terminate(); AtomicBoolean queryTerminationSignal(); } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/Heists.java ================================================ package com.sauron.heist.heistron; public final class Heists { public static String getCriterionNomenclatureName( Heistum heistum ) { String szHeistName; if( heistum instanceof CascadeHeist ) { szHeistName = ((CascadeHeist) heistum).getInstanceFullName(); } else { szHeistName = heistum.heistName(); } return szHeistName; } } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/Heistum.java ================================================ package com.sauron.heist.heistron; import com.pinecone.hydra.servgram.AutoOrchestrator; import com.pinecone.hydra.servgram.Servgramlet; import com.sauron.heist.heistron.orchestration.ChildHeistInstanceModifier; import com.sauron.heist.heistron.orchestration.ChildHeistOrchestrator; import com.sauron.heist.heistron.orchestration.HeistletOrchestrator; import com.sauron.system.Saunut; import org.slf4j.Logger; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.slime.chunk.RangedPage; public interface Heistum extends Saunut, Servgramlet { String heistName(); @Override default String gramName() { return this.heistName(); } @Override default String getName() { return this.taskName(); } default String taskName() { return this.getHeistium().getName(); } ChildHeistInstanceModifier getChildHeistInstanceModifier(); void applyChildHeistInstanceModifier( ChildHeistInstanceModifier modifier ); ChildHeistOrchestrator getThisHeistletOrchestrator(); HeistletOrchestrator getGramHeistletOrchestrator(); @Override JSONConfig getConfig(); JSONConfig getProtoConfig(); HeistScheme getHeistScheme(); Heistgram getHeistgram(); RangedPage getMasterTaskPage(); Heistium getHeistium(); Crew newCrew( int nCrewId ) ; // For Heistium to start the crew. int getMaximumThread(); Logger tracer(); void terminate(); @Override default void execute() throws Exception { this.toHeist(); } void toRavage(); void toStalk(); void toEmbezzle(); void toHeist() throws HeistException; void handleAliveException( Exception e ); void handleKillException( Exception e ) throws IllegalStateException ; String ConfigChildrenKey = "Children"; String ConfigOrchestrationKey = AutoOrchestrator.ConfigOrchestrationKey; String StatusStart = "Start"; String StatusDone = "Done"; } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/LootAbortException.java ================================================ package com.sauron.heist.heistron; public class LootAbortException extends RuntimeException { public LootAbortException() { super(); } public LootAbortException( String message ) { super( message ); } public LootAbortException( String message, Throwable cause ) { super( message, cause ); } @Override public String toString() { return "[object LootAbortException]"; } public String prototypeName() { return "LootAbortException"; } } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/LootRecoveredException.java ================================================ package com.sauron.heist.heistron; public class LootRecoveredException extends RuntimeException { public LootRecoveredException() { super(); } public LootRecoveredException( String message ) { super( message ); } public LootRecoveredException( String message, Throwable cause ) { super( message, cause ); } @Override public String toString() { return "[object LootRecoveredException]"; } public String prototypeName() { return "LootRecoveredException"; } } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/Metier.java ================================================ package com.sauron.heist.heistron; public enum Metier { REAVER ("Reaver"), STALKER ("Stalker"), EMBEZZLER ("Embezzler"); private final String value; Metier( String value ){ this.value = value; } public String getName(){ return this.value; } public static String queryName( Metier type ) { return type.getName(); } public static Metier queryMetier( String sz ) { return Metier.valueOf( sz.toUpperCase() ); } } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/Reaver.java ================================================ package com.sauron.heist.heistron; /** * Bean Nuts Hazelnut Sauron Tritium For Java, Reaver [掠夺者] * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Focus on batch crawler downloading and retrieving. * 面向批处理化爬虫数据取回 * ***************************************************************************************** * Dragon King, the undefined */ public interface Reaver extends Crew { default void toRavage() { this.startBatchTask(); } @Override default void isTimeToFeast(){ this.toRavage(); } } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/Stalker.java ================================================ package com.sauron.heist.heistron; /** * Bean Nuts Hazelnut Sauron Tritium For Java, Stalker [潜伏者] * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Focus on batch crawler indexes sniffing. * 面向批量爬虫索引嗅探 * ***************************************************************************************** * Dragon King, the undefined */ public interface Stalker extends Crew { void toStalk(); @Override default void isTimeToFeast(){ this.toStalk(); } } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/chronic/MultiRaiderLoader.java ================================================ package com.sauron.heist.heistron.chronic; import com.pinecone.framework.util.lang.MultiClassScopeLoader; import com.pinecone.framework.util.name.Name; import com.pinecone.hydra.servgram.filters.AnnotationValueFilter; import com.pinecone.ulf.util.lang.MultiTraitClassLoader; import java.util.List; public interface MultiRaiderLoader extends MultiClassScopeLoader, MultiTraitClassLoader { @Override Class load( Name simpleName ) throws ClassNotFoundException ; // Directly by it`s name. @Override Class loadByName( Name simpleName ) throws ClassNotFoundException ; // Scanning class`s annotations, methods or others. @Override Class loadInClassTrait( Name simpleName ) throws ClassNotFoundException ; @Override MultiRaiderLoader updateScope(); void setAnnotationValueFilter( AnnotationValueFilter filter ); @Override List > loads( Name name ) ; @Override List > loadsByName( Name simpleName ); @Override List > loadsInClassTrait( Name simpleName ) ; } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/chronic/PeriodicHeist.java ================================================ package com.sauron.heist.heistron.chronic; import com.sauron.heist.heistron.Heistum; public interface PeriodicHeist extends Heistum { } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/chronic/PeriodicHeistKernel.java ================================================ package com.sauron.heist.heistron.chronic; import com.sauron.heist.heistron.Heistgram; import com.sauron.heist.heistron.Heistum; import com.sauron.system.Saunut; import java.util.List; public interface PeriodicHeistKernel extends Saunut { Heistgram getHeistgram(); Heistum getParentHeist(); void vitalize(); void joinVitalize() throws InterruptedException; List getPreloadPrefixes() ; List getPreloadSuffixes() ; } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/chronic/PeriodicHeistRehearsal.java ================================================ package com.sauron.heist.heistron.chronic; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.auto.PeriodicAutomatron; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; public interface PeriodicHeistRehearsal extends PeriodicHeistKernel { PeriodicAutomatron getAutomatron(); List getRawChronicPeriods(); AtomicInteger getIndexId(); @Override default void vitalize() { this.getAutomatron().start(); } @Override default void joinVitalize() throws InterruptedException { this.vitalize(); this.getAutomatron().join(); } JSONObject getRaiderMarshalingConf(); JSONObject getRaiderConfigs(); } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/chronic/Raider.java ================================================ package com.sauron.heist.heistron.chronic; import com.pinecone.hydra.auto.Instructation; import com.sauron.heist.heistron.Reaver; /** * Bean Nuts Hazelnut Sauron Tritium For Java, Raider [突袭者] * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Focus on periodic or burst irregularity crawler downloading and retrieving. * 面向周期或无规律性突发爬虫数据取回 * ***************************************************************************************** * Dragon King, the undefined */ public interface Raider extends Reaver { Instructation getPrimeDirective(); } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/chronic/RaiderFactory.java ================================================ package com.sauron.heist.heistron.chronic; import com.pinecone.framework.util.lang.ClassScope; import com.pinecone.framework.util.name.Name; import com.pinecone.framework.util.name.ScopeName; import com.pinecone.ulf.util.lang.MultiScopeFactory; import java.lang.reflect.InvocationTargetException; import java.util.List; public interface RaiderFactory extends MultiScopeFactory { @Override ClassLoader getClassLoader(); @Override ClassScope getClassScope(); @Override MultiRaiderLoader getTraitClassLoader(); @Override default Raider spawn( String name, Class[] stereotypes, Object... args ) throws InvocationTargetException { return this.spawn( new ScopeName(name), stereotypes, args ); } @Override Raider spawn( Name name, Class[] stereotypes, Object... args ) throws InvocationTargetException; @Override default Raider spawn( String name, Object... args ) throws InvocationTargetException { return this.spawn( new ScopeName(name), args ); } @Override Raider spawn( Name name, Object... args ) throws InvocationTargetException; @Override default List popping( String name, Class[] stereotypes, Object... args ) { return this.popping( new ScopeName(name), stereotypes, args ); } @Override List popping( Name name, Class[] stereotypes, Object... args ); @Override default List popping( String name, Object... args ) { return this.popping( new ScopeName(name), args ); } @Override List popping( Name name, Object... args ); } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/chronic/Raiderlet.java ================================================ package com.sauron.heist.heistron.chronic; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.lang.annotation.ElementType; @Target({ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface Raiderlet { String ValueKey = "value"; String value() default ""; } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/event/HeistLifecycleEventInterceptor.java ================================================ package com.sauron.heist.heistron.event; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.task.TaskInstanceStatus; import com.sauron.heist.heistron.Heistum; import com.sauron.heist.heistron.orchestration.Hierarchy; public interface HeistLifecycleEventInterceptor extends Pinenut { void afterLifecycleEventTriggered( String name, Heistum heist, TaskInstanceStatus instanceStatus, Hierarchy hierarchy ); } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/mapreduce/SchemeQuerier.java ================================================ package com.sauron.heist.heistron.mapreduce; import com.pinecone.slime.map.AlterableQuerier; public interface SchemeQuerier extends AlterableQuerier { default boolean hasOwnProperty( Object k ) { return this.containsKey( k ); } } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/mapreduce/TaskScheme.java ================================================ package com.sauron.heist.heistron.mapreduce; import com.pinecone.framework.system.prototype.Pinenut; public interface TaskScheme extends Pinenut { } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/orchestration/ChildHeistInstanceModifier.java ================================================ package com.sauron.heist.heistron.orchestration; import com.pinecone.framework.system.prototype.Pinenut; import com.sauron.heist.heistron.CascadeHeist; public interface ChildHeistInstanceModifier extends Pinenut { void modify( CascadeHeist heistum ) ; } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/orchestration/ChildHeistOrchestrator.java ================================================ package com.sauron.heist.heistron.orchestration; import com.pinecone.hydra.servgram.ServgramOrchestrator; import com.sauron.heist.heistron.CascadeHeist; import com.sauron.heist.heistron.Heistgram; import com.sauron.heist.heistron.Heistium; import com.sauron.system.Saunut; public interface ChildHeistOrchestrator extends Saunut, ServgramOrchestrator { CascadeHeist getHeist(); Heistium getHeistium(); Heistgram getHeistgram(); int nextAutoIncrementTaskId(); } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/orchestration/Heistlet.java ================================================ package com.sauron.heist.heistron.orchestration; import java.lang.annotation.*; @Target({ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface Heistlet { String ValueKey = "value"; String value() default ""; } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/orchestration/HeistletOrchestrator.java ================================================ package com.sauron.heist.heistron.orchestration; import com.pinecone.hydra.servgram.ServgramOrchestrator; import com.sauron.heist.heistron.Heistgram; import com.sauron.system.Saunut; import java.util.List; public interface HeistletOrchestrator extends Saunut, ServgramOrchestrator { Heistgram getHeistgram(); List getPreloadPrefixes() ; List getPreloadSuffixes() ; } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/orchestration/Hierarchy.java ================================================ package com.sauron.heist.heistron.orchestration; public enum Hierarchy { Master ( "Master" ), Slave ( "Slave" ); private final String value; Hierarchy( String value ){ this.value = value; } public String getName(){ return this.value; } public static String queryName( Hierarchy hierarchy ) { return hierarchy.getName(); } public static Hierarchy queryHierarchy( String sz ) { return Hierarchy.valueOf( sz ); } } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/orchestration/Instructations.java ================================================ package com.sauron.heist.heistron.orchestration; import com.pinecone.hydra.auto.Instructation; import com.sauron.heist.heistron.Heistum; import com.sauron.heist.heistron.chronic.Raider; public final class Instructations { public static void infoLifecycle(Heistum heistum, String szWhat, String szStateOrExtra ) { heistum.tracer().info( "[Lifecycle] [{}] <{}>", szWhat, szStateOrExtra ); } public static void infoConformed( Heistum heistum, Instructation instructation ) { Instructations.infoLifecycle( heistum, "Conformed", String.format( "System committed instruction (%s)", instructation.className().replace( "Instructation", "" ) ) ); } public static void infoConformed(Raider raider, String methodName ) { if( methodName == null ) { StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace(); methodName = stackTraceElements[ 2 ].getMethodName(); } Instructations.infoLifecycle( raider.parentHeist(), "Conformed", String.format( "System committed instruction (%s::%s)", raider.className(), methodName ) ); } public static void infoConformed( Raider raider ) { Instructations.infoConformed( raider, null ); } public static void infoCompleted( Raider raider, String methodName ) { if( methodName == null ) { StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace(); methodName = stackTraceElements[ 2 ].getMethodName(); } Instructations.infoLifecycle( raider.parentHeist(), "Termination", String.format( "Instruction completed (%s::%s)", raider.className(), methodName ) ); } public static void infoCompleted( Raider raider ) { Instructations.infoCompleted( raider, null ); } } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/orchestration/TaskTransaction.java ================================================ package com.sauron.heist.heistron.orchestration; import com.sauron.system.Saunut; public interface TaskTransaction extends Saunut { } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/orchestration/Taskium.java ================================================ package com.sauron.heist.heistron.orchestration; import com.pinecone.framework.system.executum.Processum; public interface Taskium extends Processum { default ChildHeistOrchestrator getHeistletOrchestrator() { return (ChildHeistOrchestrator) this.getTaskManager(); } long getTaskId(); } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/scheduler/PageFrame64ConsumerAdapter.java ================================================ package com.sauron.heist.heistron.scheduler; import com.sauron.system.Saunut; public interface PageFrame64ConsumerAdapter extends Saunut { void consumeById( long index ); } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/scheduler/TaskConsumer.java ================================================ package com.sauron.heist.heistron.scheduler; import com.sauron.system.Saunut; public interface TaskConsumer extends Saunut { void consume(); } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/scheduler/TaskFrame64Consumer.java ================================================ package com.sauron.heist.heistron.scheduler; public interface TaskFrame64Consumer extends TaskConsumer { TaskFrame64Producer getTaskPageProducer(); } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/scheduler/TaskFrame64Producer.java ================================================ package com.sauron.heist.heistron.scheduler; public interface TaskFrame64Producer extends TaskProducer { Long require(); void deactivate( Long that ); } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/scheduler/TaskPage.java ================================================ package com.sauron.heist.heistron.scheduler; import com.pinecone.slime.chunk.RangedPage; public interface TaskPage extends RangedPage { } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/scheduler/TaskPageConsumer.java ================================================ package com.sauron.heist.heistron.scheduler; public interface TaskPageConsumer extends TaskConsumer { TaskPageProducer getTaskPageProducer(); } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/scheduler/TaskPageProducer.java ================================================ package com.sauron.heist.heistron.scheduler; import com.pinecone.slime.chunk.scheduler.ActivePageScheduler; public interface TaskPageProducer extends ActivePageScheduler, TaskProducer { TaskPage require(); } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/scheduler/TaskProducer.java ================================================ package com.sauron.heist.heistron.scheduler; import com.pinecone.framework.system.prototype.Pinenut; import java.util.concurrent.TimeUnit; public interface TaskProducer extends Pinenut { Object require(); boolean hasMoreProducts(); boolean hasTerminateSignal(); long getProductsSum(); void awaitProducerFinished() throws InterruptedException; void awaitProducerFinished( long timeout, TimeUnit unit ) throws InterruptedException; boolean isFinished(); } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/scheduler/TaskSchedulerStrategy.java ================================================ package com.sauron.heist.heistron.scheduler; import com.pinecone.framework.system.prototype.Pinenut; import com.sauron.heist.heistron.Heistium; import com.pinecone.slime.chunk.RangedPage; import com.pinecone.slime.chunk.scheduler.PageDivider; import com.pinecone.slime.chunk.scheduler.PagePool; import com.pinecone.slime.chunk.scheduler.PageRecycleStrategy; public interface TaskSchedulerStrategy extends Pinenut { Heistium getParentHeistium(); RangedPage getMasterPage(); PagePool getHeistTaskPagePool(); PageDivider getPageDivider(); PageRecycleStrategy getPageRecycleStrategy(); TaskSchedulerStrategy setHeistTaskPagePool( PagePool pagePool ); TaskSchedulerStrategy setPageDivider( PageDivider divider ); TaskSchedulerStrategy setPageRecycleStrategy( PageRecycleStrategy strategy ); TaskProducer formulateProducer(); } ================================================ FILE: Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/http/HttpBrowserConf.java ================================================ package com.sauron.heist.http; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.json.homotype.DirectObjectInjector; import com.pinecone.framework.util.json.JSONArray; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.tritium.util.ConfigHelper; import com.sauron.heist.heistron.HeistConfigConstants; import com.sauron.heist.heistron.Heistum; public class HttpBrowserConf implements Pinenut { public enum ProxyStrategy { NoProxy, SystemOnly, ProxyGroup } protected Heistum mParentHeistum; public JSONObject protoConfig; public JSONArray headConfigGroup; public boolean agentConfusion; public JSONObject systemProxy; public JSONArray proxyGroup; public ProxyStrategy proxyStrategy = ProxyStrategy.NoProxy; public boolean enableRandomDelay = false; public int randomDelayMin = 0; public int randomDelayMax = 0; public int socketTimeout = 20000; public String charset = "UTF-8"; public boolean enableCookieJar = true; // New improved V2 // 是否跟随 HTTP 3xx 重定向 public boolean followRedirects = true; // 是否跟随 HTTPS → HTTPS / HTTP → HTTPS 重定向 public boolean followSslRedirects = true; // 是否在连接异常时自动重试(TCP 层) public boolean retryOnConnectionFailure = true; public int connectTimeout = this.socketTimeout; // ms public int readTimeout = 20000; // ms public int writeTimeout = 20000; // ms // 一般来说业务会自己限制,可开可不开 public boolean enableRequestLimit = false; // 全局最大并发请求数 public int maxRequests = 4096; // 单 Host 最大并发 public int maxRequestsPerHost = 512; // 连接池配置 public boolean enableConnectionPool = true; public int maxIdleConnections = 5; public int keepAliveSeconds = 300; public HttpBrowserConf( Heistum heistum ) { this.mParentHeistum = heistum; JSONObject parentConf = this.mParentHeistum.getConfig(); this.protoConfig = parentConf.optJSONObject( HeistConfigConstants.KeyHttpBrowser ); if( this.protoConfig == null ) { this.protoConfig = this.mParentHeistum.getHeistgram().getComponentsConfig().optJSONObject( HeistConfigConstants.KeyHttpBrowser ) ; } DirectObjectInjector.instance( ConfigHelper.fnToSmallHumpName, this.getClass() ).typeInject( this.protoConfig, this ); this.proxyStrategy = ProxyStrategy.valueOf( this.protoConfig.optString( "ProxyStrategy" ) ); } } ================================================ FILE: Saurons/heist-http-client-okhttp-suit/pom.xml ================================================ saurons com.saurons 1.2.7 4.0.0 org.apache.maven.plugins maven-compiler-plugin 11 11 com.sauron.heist heist-http-client-okhttp-suit 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 com.pinecone.tritium hydra-system-tritium 2.1.0 com.pinecone.ulf ulfhedinn 1.2.1 com.sauron.core sauron-core 1.2.7 compile org.jsoup jsoup 1.15.4 us.codecraft webmagic-core 0.8.0 us.codecraft webmagic-extension 0.8.0 com.sauron.heist heist-framework-architecture 2.1.0 compile com.squareup.okhttp3 okhttp 4.12.0 ================================================ FILE: Saurons/heist-http-client-okhttp-suit/src/main/java/com/sauron/heist/okhttp/HeistOkHttpClientFactory.java ================================================ package com.sauron.heist.okhttp; import java.net.InetSocketAddress; import java.net.Proxy; import java.net.ProxySelector; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; import com.pinecone.framework.util.json.JSONObject; import com.sauron.heist.heistron.Heistum; import com.sauron.heist.http.HttpBrowserConf; import okhttp3.ConnectionPool; import okhttp3.Dispatcher; import okhttp3.OkHttpClient; public class HeistOkHttpClientFactory implements OkHttpFactory { protected Heistum heistum; protected HttpBrowserConf conf; public HeistOkHttpClientFactory( Heistum heistum ) { this.heistum = heistum; this.conf = new HttpBrowserConf( this.heistum ); } @Override public List make() { return this.make( this.conf ); } @Override public List make( HttpBrowserConf conf ) { List schemes = this.makeScheme( conf ); List clients = new ArrayList<>(); for ( OkClientConstructionScheme scheme : schemes ) { OkHttpClient client = scheme.getBuilder().build(); clients.add(client); } return clients; } @Override public List makeScheme( HttpBrowserConf conf ) { List schemes = new ArrayList<>(); switch ( conf.proxyStrategy ) { case SystemOnly: { OkHttpClient.Builder builder = this.createBaseBuilder(conf); builder.proxySelector(ProxySelector.getDefault()); OkClientConstructionScheme pair = new OkClientConstructionScheme( builder, conf, conf.systemProxy, null ); schemes.add(pair); break; } case ProxyGroup: { if ( conf.proxyGroup != null ) { for ( int i = 0; i < conf.proxyGroup.length(); ++i ) { JSONObject proxyConf = conf.proxyGroup.optJSONObject(i); if (proxyConf == null) { continue; } Proxy proxy = this.buildProxyFromConf(proxyConf); if (proxy == null) { continue; } OkHttpClient.Builder builder = this.createBaseBuilder(conf); builder.proxy(proxy); OkClientConstructionScheme pair = new OkClientConstructionScheme( builder, conf, proxyConf, proxy ); schemes.add(pair); } } break; } case NoProxy: default: { OkHttpClient.Builder builder = this.createBaseBuilder(conf); builder.proxy(Proxy.NO_PROXY); OkClientConstructionScheme pair = new OkClientConstructionScheme( builder, conf, null, Proxy.NO_PROXY ); schemes.add(pair); break; } } return schemes; } protected OkHttpClient.Builder createBaseBuilder( HttpBrowserConf conf ) { OkHttpClient.Builder builder = new OkHttpClient.Builder() .connectTimeout(conf.connectTimeout, TimeUnit.MILLISECONDS) .readTimeout(conf.readTimeout, TimeUnit.MILLISECONDS) .writeTimeout(conf.writeTimeout, TimeUnit.MILLISECONDS) .followRedirects(conf.followRedirects) .followSslRedirects(conf.followSslRedirects) .retryOnConnectionFailure(conf.retryOnConnectionFailure); if ( conf.enableCookieJar ) { builder.cookieJar(new InMemoryCookieJar()); } // === 连接池配置(可关闭,默认开启)=== if ( conf.enableConnectionPool ) { ConnectionPool connectionPool = new ConnectionPool( conf.maxIdleConnections, conf.keepAliveSeconds, TimeUnit.SECONDS ); builder.connectionPool(connectionPool); } if ( conf.enableRequestLimit ) { Dispatcher dispatcher = new Dispatcher(); dispatcher.setMaxRequests(conf.maxRequests); dispatcher.setMaxRequestsPerHost(conf.maxRequestsPerHost); builder.dispatcher(dispatcher); } if ( conf.enableRandomDelay ) { builder.addInterceptor(chain -> { this.applyRandomDelay(conf); return chain.proceed(chain.request()); }); } return builder; } protected Proxy buildProxyFromConf( JSONObject proxyConf ) { String host = proxyConf.optString("host", null); int port = proxyConf.optInt("port", -1); if (host == null || port <= 0) { return null; } return new Proxy( Proxy.Type.HTTP, new InetSocketAddress(host, port) ); } protected void applyRandomDelay( HttpBrowserConf conf ) { int min = conf.randomDelayMin; int max = conf.randomDelayMax; if (max <= min || min < 0) { return; } int delay = ThreadLocalRandom.current().nextInt(min, max + 1); try { Thread.sleep( delay ); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } } ================================================ FILE: Saurons/heist-http-client-okhttp-suit/src/main/java/com/sauron/heist/okhttp/InMemoryCookieJar.java ================================================ package com.sauron.heist.okhttp; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import okhttp3.Cookie; import okhttp3.CookieJar; import okhttp3.HttpUrl; public class InMemoryCookieJar implements CookieJar { private final Map> cookieStore = new ConcurrentHashMap<>(); @Override public void saveFromResponse(HttpUrl url, List cookies) { this.cookieStore.put(url.host(), cookies); } @Override public List loadForRequest(HttpUrl url) { return this.cookieStore.getOrDefault(url.host(), Collections.emptyList()); } } ================================================ FILE: Saurons/heist-http-client-okhttp-suit/src/main/java/com/sauron/heist/okhttp/OkClientConstructionScheme.java ================================================ package com.sauron.heist.okhttp; import java.net.Proxy; import com.pinecone.framework.util.json.JSONObject; import com.sauron.heist.http.HttpBrowserConf; import okhttp3.OkHttpClient; public final class OkClientConstructionScheme { private OkHttpClient.Builder builder; private HttpBrowserConf httpBrowserConf; private JSONObject proxyConf; private Proxy proxy; public OkClientConstructionScheme( OkHttpClient.Builder builder, HttpBrowserConf httpBrowserConf, JSONObject proxyConf, Proxy proxy ) { this.builder = builder; this.httpBrowserConf = httpBrowserConf; this.proxyConf = proxyConf; this.proxy = proxy; } public OkHttpClient.Builder getBuilder() { return this.builder; } public void setBuilder(OkHttpClient.Builder builder) { this.builder = builder; } public HttpBrowserConf getHttpBrowserConf() { return this.httpBrowserConf; } public void setHttpBrowserConf(HttpBrowserConf httpBrowserConf) { this.httpBrowserConf = httpBrowserConf; } public JSONObject getProxyConf() { return this.proxyConf; } public void setProxyConf(JSONObject proxyConf) { this.proxyConf = proxyConf; } public Proxy getProxy() { return this.proxy; } public void setProxy(Proxy proxy) { this.proxy = proxy; } } ================================================ FILE: Saurons/heist-http-client-okhttp-suit/src/main/java/com/sauron/heist/okhttp/OkHttpFactory.java ================================================ package com.sauron.heist.okhttp; import java.util.List; import com.pinecone.framework.system.prototype.Pinenut; import com.sauron.heist.heistron.Heistum; import com.sauron.heist.http.HttpBrowserConf; import okhttp3.OkHttpClient; public interface OkHttpFactory extends Pinenut { List make(HttpBrowserConf conf ); List make(); List makeScheme( HttpBrowserConf conf ); } ================================================ FILE: Saurons/heist-system-schedule/pom.xml ================================================ saurons com.saurons 1.2.7 4.0.0 org.apache.maven.plugins maven-compiler-plugin 11 11 com.sauron.heist heist-system-schedule 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 com.pinecone.tritium hydra-system-tritium 2.1.0 com.pinecone.slime slime 2.1.0 com.pinecone.slime.jelly jelly 2.1.0 com.pinecone.ulf ulfhedinn 1.2.1 com.pinecone.summer.springram springram 2.1.0 com.sauron.core sauron-core 1.2.7 compile org.javassist javassist 3.29.0-GA io.netty netty-all 4.1.80.Final org.springframework.boot spring-boot-starter-web org.jsoup jsoup 1.15.4 us.codecraft webmagic-core 0.8.0 us.codecraft webmagic-extension 0.8.0 org.mybatis mybatis 3.5.9 org.mybatis mybatis-spring 2.0.6 com.baomidou mybatis-plus-core 3.4.3.4 com.baomidou mybatis-plus-annotation 3.4.3.4 mysql mysql-connector-java 8.0.26 org.slf4j slf4j-api 1.7.30 net.spy spymemcached 2.12.3 org.apache.httpcomponents.client5 httpclient5 5.1 org.apache.commons commons-vfs2 2.9.0 org.apache.commons commons-vfs2-jackrabbit1 2.9.0 org.apache.commons commons-lang3 3.12.0 com.sauron.heist heist-framework-architecture 2.1.0 compile ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/ArchCrew.java ================================================ package com.sauron.heist.heistron; import com.pinecone.tritium.system.MissionTerminateException; import com.pinecone.tritium.system.TritiumSystem; import com.pinecone.tritium.system.StorageSystem; import com.pinecone.framework.system.functions.FunctionTraits; import com.pinecone.framework.util.json.JSONObject; import org.apache.commons.vfs2.FileSystemManager; import org.slf4j.Logger; import java.io.IOException; import java.util.Random; public abstract class ArchCrew extends LocalCrewnium implements Crew { protected Logger logger; protected String crewInstanceName; protected JSONObject joFailureConf; protected int fileRetrieveTime = 1 ; protected long fragBase = 10000 ; // unit: W protected long fragRange = 1000000 ; // unit: 1 protected Heist heist; public ArchCrew( Heist heist, int nCrewId ){ super( heist, nCrewId ); this.heist = (Heist) this.mParentHeist; this.fragBase = this.heist.fragBase; this.fragRange = this.heist.fragRange; this.joFailureConf = this.heist.getConfig().optJSONObject( "FailureConf" ); this.fileRetrieveTime = this.joFailureConf.optInt( "FileRetrieveTime", 1 ); this.crewInstanceName = this.className() + this.mnCrewId; //this.failureRetryTimes = this.heistCenter.getProtoConfig().getFailureRetryTimes(); this.logger = this.parentSystem().getTracerScope().newLogger( this.crewInstanceName ); } @Override public Heistum parentHeist() { return this.heist; } //根据任务数量获取线程数 protected String lifecycleTracerSignature() { return String.format( "%s::Lifecycle", FunctionTraits.thatName(3) ); } protected ArchCrew traceTaskState( long idx, String szState ) { this.logger.info( "[TaskState] <[{},{}], ID:{}> <{}>", this.heist.taskFrom, this.heist.taskTo ,idx, szState ); return this; } protected boolean handleTask( long index ){ this.traceTaskState( index, "Handle" ); return new Random().nextInt(100)+1>80; } protected boolean noticeTaskDone ( long index, boolean bIsRecovered ){ if( bIsRecovered ) { this.traceTaskState( index, "Recovered" ); } else { this.traceTaskState( index, Heistum.StatusDone ); } //this.heist.getSpoilsLock().countDown(); return true; } protected boolean noticeTaskDone ( long index ){ return this.noticeTaskDone( index, false ); } @Override public void startBatchTask() { this.mTaskConsumer.consume(); } protected void consumeById( long index ) { try { this.traceTaskState( index, Heistum.StatusStart ); this.tryConsumeById( index ); this.noticeTaskDone( index, false ); } catch ( LootAbortException e ) { this.traceTaskState( index, "Abort" ); } catch ( LootRecoveredException e1 ) { this.noticeTaskDone( index, true ); } catch ( IllegalStateException e2 ) { this.traceTaskState( index, "Error:" + e2.getMessage() ); } catch ( IOException io ) { this.traceTaskState( index, "IOException:" + io.getMessage() ); } catch ( MissionTerminateException mte ) { this.parentHeist().terminate(); } catch ( Exception e3 ) { // Keep this task alive, and ignore other exceptions. this.parentHeist().handleAliveException( e3 ); } } protected void tryConsumeById( long index ) throws LootRecoveredException, LootAbortException, IllegalStateException, IOException { } @Override public void run() { //this.lootFromSignal(); this.isTimeToFeast(); } @Override public String crewName() { return this.heist.heistName(); } @Override public void validateSpoil( String sz ) { } @Override public TritiumSystem parentSystem() { return (TritiumSystem) super.parentSystem(); } @Override public StorageSystem getStorageSystem() { return this.parentSystem().getStorageSystem(); } public FileSystemManager getDafaultFileSystemManager() { return this.getStorageSystem().getFileSystemManager(); } @Override public Logger tracer(){ return this.logger; } protected void handleAliveException( Exception e ) { this.parentHeist().handleAliveException( e ); } protected void handleKillException( Exception e ) throws IllegalStateException { this.parentHeist().handleKillException( e ); } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/ArchHeistum.java ================================================ package com.sauron.heist.heistron; import com.pinecone.tritium.system.TritiumSystem; import org.slf4j.Logger; public abstract class ArchHeistum extends HeistEntity implements Heistum { protected Heistgram mHeistgram ; protected Logger mLogger; protected ArchHeistum( Heistgram heistgram ) { super(); this.mHeistgram = heistgram; this.mLogger = this.parentSystem().getTracerScope().newLogger( this.className() ); } @Override public Logger tracer() { return this.mLogger; } protected ArchHeistum infoLifecycle( String szWhat, String szStateOrExtra ) { this.tracer().info( "[Lifecycle] [{}] <{}>", szWhat, szStateOrExtra ); return this; } protected ArchHeistum infoLifecycle( String szStateOrExtra ) { StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace(); return this.infoLifecycle( stackTraceElements[ 2 ].getMethodName(), szStateOrExtra ); } @Override public Heistgram getHeistgram() { return this.mHeistgram; } public TritiumSystem parentSystem() { return (TritiumSystem) this.getHeistgram().parentSystem(); } /** * These exceptions will not interrupt the running state of single moulder, but only log them. */ @Override public void handleAliveException( Exception e ) { this.tracer().info( "[{}] ", this.heistName(), e.getMessage() ); } /** * These exceptions will kill the running state of single moulder, and interrupt and redirect to RuntimeException. */ @Override public void handleKillException( Exception e ) throws IllegalStateException { this.tracer().info( "[{}] ", this.heistName(), e.getMessage() ); throw new IllegalStateException( e ); } @Override public abstract Crew newCrew( int nCrewId ) ; } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/CrewPageProcessor.java ================================================ package com.sauron.heist.heistron; import com.pinecone.framework.util.Debug; import us.codecraft.webmagic.Page; import us.codecraft.webmagic.Request; import us.codecraft.webmagic.Site; import us.codecraft.webmagic.processor.PageProcessor; public class CrewPageProcessor implements PageProcessor { protected HTTPHeist parentHeist; public CrewPageProcessor( HTTPHeist heist ) { this.parentHeist = heist; } @Override public void process( Page page ) { Request request = new Request("https://rednest.cn/index.html"); request.putExtra("requestType", "temp"); page.addTargetRequest( request ); Debug.trace( "fuck", page.getHtml().toString() ); } @Override public Site getSite() { return this.parentHeist.getSite(); } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/CrewPipeline.java ================================================ package com.sauron.heist.heistron; import com.fasterxml.jackson.databind.ObjectMapper; import us.codecraft.webmagic.ResultItems; import us.codecraft.webmagic.Task; import us.codecraft.webmagic.pipeline.Pipeline; public class CrewPipeline implements Pipeline { private ObjectMapper objectMapper = new ObjectMapper(); private int cnt = 0; @Override public void process( ResultItems resultItems, Task task ) { } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/HTTPCrew.java ================================================ package com.sauron.heist.heistron; import com.sauron.heist.http.HttpBrowserConf; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemManager; import us.codecraft.webmagic.Page; import us.codecraft.webmagic.Request; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Random; public abstract class HTTPCrew extends ArchCrew { protected String heistURL; public HTTPCrew ( HTTPHeist heist, int id ){ super( heist, id ); this.heistURL = this.parentHeist().getConfig().optString( "HeistURL" ); } public void validateSpoil( Page page ) throws LootAbortException, IllegalStateException { if( page.getBytes().length < this.joFailureConf.optInt( "FailedFileSize" ) ) { throw new IllegalStateException("CompromisedFilesSize"); } } @Override public HTTPHeist parentHeist() { return (HTTPHeist) this.heist; } protected void afterPageQueried( Page cache ) { } // [Query, Get] Inlet method. public Page queryHTTPPage( Request request, boolean bPooled ) { Page cache = this.parentHeist().queryHTTPPage( request, bPooled ); try{ HttpBrowserConf browserConf = this.parentHeist().getBrowserConf(); if( browserConf.enableRandomDelay ){ Thread.sleep( ( new Random() ).nextInt( browserConf.randomDelayMax - browserConf.randomDelayMin + 1 ) + browserConf.randomDelayMin ); } } catch ( InterruptedException e ) { this.parentHeist().handleKillException( e ); } this.afterPageQueried( cache ); this.tracer().info( "[{}] [PageFetched:]", this.lifecycleTracerSignature(), cache.getStatusCode(), cache.getBytes().length ); return cache; } public Page queryHTTPPage( Request request ) { return this.queryHTTPPage( request, true ); } public Page getHTTPPage( String szHref, boolean bPooled ) { Request request = new Request( szHref ); request.putExtra("requestType", "CrewDefault"); request.setMethod( "GET" ); return this.queryHTTPPage( request, bPooled ); } public Page getHTTPPage( String szHref ) { return this.getHTTPPage( szHref, true ); } public String getHTTPFile( String szHref, boolean bPooled ) { return this.getHTTPPage( szHref, bPooled ).getRawText(); } public String getHTTPFile( String szHref ) { return this.getHTTPFile( szHref, true ); } // No validate public Page queryHTTPPage( Request request, String szFilePath ) { try { Page cachePage; byte[] cache ; FileSystemManager fsm = this.getDafaultFileSystemManager(); FileObject fileObject = fsm.resolveFile( szFilePath ); if ( fileObject.exists() ) { try ( InputStream inputStream = fileObject.getContent().getInputStream() ) { cache = inputStream.readAllBytes(); cachePage = this.parentHeist().extendPage( cache, request ); } } else { cachePage = this.queryHTTPPage( request ); fileObject.createFile(); try ( OutputStream outputStream = fileObject.getContent().getOutputStream() ) { outputStream.write( cachePage.getBytes() ); } } return cachePage; } catch ( IOException e ){ this.parentHeist().handleAliveException( e ); } return null; } public Page getHTTPPage( String szHref, String szFilePath ) { return this.queryHTTPPage( new Request( szHref ), szFilePath ); } public Page queryHTTPPageSafe( Request request ) { Page page = null; int nRetry = 0; IllegalStateException lpLastError = null; for ( int i = 0; i < this.fileRetrieveTime; ++i ) { try { page = this.queryHTTPPage( request ); this.validateSpoil( page ); break; } catch ( IllegalStateException e ) { ++nRetry; lpLastError = e; } catch ( LootAbortException e ) { return page; } } if ( nRetry >= this.fileRetrieveTime - 1 && lpLastError != null ) { throw new IllegalStateException("IrredeemableLoot"); } return page; } protected Page afterPageFetched( Page page, Request request ){ return page; } Page tryRecoverFromLocalFile( String szStoragePath, Request request ) throws LootRecoveredException, LootAbortException { try { byte[] cache ; FileSystemManager fsm = this.getDafaultFileSystemManager(); FileObject fileObject = fsm.resolveFile( szStoragePath ); try ( InputStream inputStream = fileObject.getContent().getInputStream() ) { cache = inputStream.readAllBytes(); } Page page = this.parentHeist().extendPage( cache, request ); this.validateSpoil( page ); throw new LootRecoveredException(); } catch ( LootRecoveredException | LootAbortException e ) { throw e; } catch ( IOException e1 ) { return null; } } void storeHrefCache( String szStoragePath, Request request ) throws LootRecoveredException, LootAbortException, IOException { Page cache; try { cache = this.tryRecoverFromLocalFile( szStoragePath, request ); } catch ( LootRecoveredException | LootAbortException e ) { throw e; } catch ( IllegalStateException e ) { this.logger.info( "[Mission::Lifecycle] [Heistum<{}>] ", this.className() ); } cache = this.queryHTTPPageSafe( request ); cache = this.afterPageFetched( cache, request ); FileSystemManager fsm = this.getDafaultFileSystemManager(); FileObject fileObject = fsm.resolveFile( szStoragePath ); if ( !fileObject.exists() ) { fileObject.createFile(); } try ( OutputStream outputStream = fileObject.getContent().getOutputStream() ) { outputStream.write( cache.getBytes() ); } } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/HTTPHeist.java ================================================ package com.sauron.heist.heistron; import com.pinecone.framework.system.NonNull; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.util.config.JSONConfig; import com.sauron.heist.http.HttpBrowserConf; import com.sauron.heist.http.HttpBrowserDownloader; import com.pinecone.framework.util.json.JSONObject; import us.codecraft.webmagic.Page; import us.codecraft.webmagic.Request; import us.codecraft.webmagic.Site; import us.codecraft.webmagic.Spider; import us.codecraft.webmagic.proxy.Proxy; import us.codecraft.webmagic.proxy.SimpleProxyProvider; import us.codecraft.webmagic.selector.PlainText; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.locks.ReentrantReadWriteLock; public abstract class HTTPHeist extends Heist { protected final String defUserAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36 Edg/112.0.1722.48"; protected String heistURL; // 爬虫的主链接 protected Site site; protected HttpBrowserConf browserConf; protected Spider majorSpider; protected CrewPageProcessor pageProcessor; protected HttpBrowserDownloader httpBrowser; protected ReentrantReadWriteLock requestLock = new ReentrantReadWriteLock(); public HTTPHeist( Heistgram heistron ){ super( heistron ); } public HTTPHeist( Heistgram heistron, JSONConfig joConfig ){ super( heistron, joConfig ); } public HTTPHeist( Heistgram heistron, @Nullable CascadeHeist parent, @NonNull String szChildName ) { super( heistron, parent, szChildName ); } protected void init() { this.site = Site.me().setRetryTimes( 3 ); this.pageProcessor = new CrewPageProcessor( this ); this.majorSpider = Spider.create( this.pageProcessor ); this.httpBrowser = new HttpBrowserDownloader(); } protected void loadSiteConf() { if( this.getConfig() != null ) { if( this.browserConf.enableRandomDelay ) { this.site.setSleepTime( this.browserConf.randomDelayMin ); this.site.setRetrySleepTime( this.browserConf.randomDelayMin ); } else { this.site.setSleepTime( 0 ); this.site.setRetrySleepTime( 100 ); } this.site.setUserAgent( defUserAgent ) // TODO .setCharset( this.browserConf.charset ) .setTimeOut( this.browserConf.socketTimeout ); } } protected void loadProxyConf() { if( this.getConfig() != null ) { switch ( this.browserConf.proxyStrategy ) { case NoProxy: { this.httpBrowser.setProxyProvider( null ); break; } case SystemOnly: { this.httpBrowser.setProxyProvider( SimpleProxyProvider.from( new Proxy( this.browserConf.systemProxy.optString("host"), this.browserConf.systemProxy.optInt("port") ) ) ); break; } case ProxyGroup: { List proxies = new ArrayList<>(); for ( int i = 0; i < this.browserConf.proxyGroup.size(); ++i ) { JSONObject jo = this.browserConf.proxyGroup.optJSONObject( i ); proxies.add( new Proxy( jo.optString("host"), jo.optInt("port") ) ); } this.httpBrowser.setProxyProvider( SimpleProxyProvider.from( (Proxy[]) proxies.toArray() ) ); break; } default: { break; } } } } @Override protected void loadConfig() { this.init(); super.loadConfig(); this.browserConf = new HttpBrowserConf( this ); this.loadSiteConf(); this.loadProxyConf(); this.heistURL = this.getConfig().optString( HeistConfigConstants.Heistum.KeyHeistURL ); } public Site getSite() { return this.site; } public HttpBrowserConf getBrowserConf() { return this.browserConf; } public CrewPageProcessor getPageProcessor() { return this.pageProcessor; } public HttpBrowserDownloader getHttpBrowser() { return this.httpBrowser; } public Page queryHTTPPage( Request request ) { return this.queryHTTPPage( request, true ); } protected Page queryHTTPPageOnly( Request request, boolean bPooled ) { this.requestLock.readLock().lock(); try{ return this.httpBrowser.download( request, this.majorSpider, bPooled ); } finally { this.requestLock.readLock().unlock(); } } public Page queryHTTPPage( Request request, boolean bPooled ) { try{ return this.queryHTTPPageOnly( request, bPooled ); } catch ( ProxyProvokeHandleException e ) { if( e.getCause() instanceof IOException ) { this.tracer().warn( "[queryHTTPPage:Warning] [What: IOException, " + e.getMessage() + "]" ); // Fixed: CloseableHttpClient SSL exception using none pooled. try{ return this.queryHTTPPageOnly( request, bPooled ); } catch ( ProxyProvokeHandleException e1 ) { if ( e.getCause() instanceof IOException ) { this.tracer().warn("[queryHTTPPage:Warning:ResetPool] [What: IOException, " + e.getMessage() + "]"); this.requestLock.writeLock().lock(); try{ this.httpBrowser.reset(); } finally { this.requestLock.writeLock().unlock(); } return this.queryHTTPPageOnly( request, bPooled ); } throw e1; } } throw e; } } public Page getHTTPPage( String szHref, boolean bPooled ) { Request request = new Request( szHref ); request.putExtra("requestType", "HeistDefault"); request.setMethod( "GET" ); return this.queryHTTPPage( request, bPooled ); } public Page getHTTPPage( String szHref ) { return this.getHTTPPage( szHref, true ); } public String getHTTPFile( String szHref, boolean bPooled ) { return this.getHTTPPage( szHref, bPooled ).getHtml().toString(); } public String getHTTPFile( String szHref ) { return this.getHTTPFile( szHref, true ); } protected Page initDefaultPage( Page page, Request request ) { page.setStatusCode( 200 ); page.setRequest ( request ); page.setCharset ( this.getSite().getCharset() ); page.setUrl ( new PlainText( request.getUrl() ) ); page.setDownloadSuccess( true ); return page; } public Page extendPage( byte[] pageCache, Request request ) { Page page = new Page(); page.setBytes ( pageCache ); try{ page.setRawText( new String( pageCache, this.getSite().getCharset() ) ); } catch ( UnsupportedEncodingException e1 ) { page.setRawText( null ); } return this.initDefaultPage( page, request ); } public Page extendPage( String szPageCache, Request request ) { Page page = new Page(); page.setRawText ( szPageCache ); page.setBytes ( szPageCache.getBytes() ); return this.initDefaultPage( page, request ); } public Page extendPage( String szNeoPageCache, Page that ) { Page neoPage = new Page(); neoPage.setRawText ( szNeoPageCache ); neoPage.setBytes ( szNeoPageCache.getBytes() ); neoPage.setStatusCode( that.getStatusCode()); neoPage.setRequest ( that.getRequest()); neoPage.setCharset ( that.getCharset()); neoPage.setUrl ( that.getUrl()); neoPage.setDownloadSuccess( that.isDownloadSuccess() ); return neoPage; } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/HTTPIndexHeist.java ================================================ package com.sauron.heist.heistron; import com.pinecone.framework.system.NonNull; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.config.JSONConfig; import com.sauron.heist.heistron.mapreduce.SchemeQuerier; public abstract class HTTPIndexHeist extends HTTPHeist { protected SchemeQuerier mSchemeQuerier; public HTTPIndexHeist( Heistgram heistron ){ super(heistron); } public HTTPIndexHeist( Heistgram heistron, JSONConfig joConfig ){ super( heistron, joConfig ); } public HTTPIndexHeist( Heistgram heistron, @Nullable CascadeHeist parent, @NonNull String szChildName ) { super( heistron, parent, szChildName ); } public SchemeQuerier getSchemeQuerier() { return this.mSchemeQuerier; } public String queryHrefById ( long id ) { Object scheme = this.mSchemeQuerier.get( id ); if( scheme instanceof String ) { return (String) scheme; } return null; // TODO } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/Heist.java ================================================ package com.sauron.heist.heistron; //import com.genius.common.Heist; //import com.genius.common.UlfUMC.UlfUMCMessage; //import com.genius.config.HeistConfig; //import com.genius.config.SystemConfig; //import com.genius.mq.Harbor; //import com.genius.pool.FunctionNamePool; //import org.springframework.beans.factory.annotation.Autowired; import com.pinecone.framework.system.NonNull; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.ProxyProvokeHandleException; import com.pinecone.framework.util.json.JSONArray; import com.pinecone.framework.util.name.Namespace; import com.pinecone.hydra.servgram.GramTransaction; import com.pinecone.hydra.servgram.OrchestrateInterruptException; import com.pinecone.hydra.servgram.AutoOrchestrator; import com.pinecone.hydra.servgram.ServgramOrchestrator; import com.pinecone.hydra.task.TaskInstanceStatus; import com.pinecone.tritium.util.ConfigHelper; import com.sauron.heist.heistron.orchestration.ChildHeistInstanceModifier; import com.sauron.heist.heistron.orchestration.ChildHeistOrchestrator; import com.sauron.heist.heistron.orchestration.HeistletOrchestrator; import com.sauron.heist.heistron.orchestration.Hierarchy; import com.sauron.heist.heistron.scheduler.RangedTaskPage; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.framework.util.json.homotype.DirectObjectInjector; import com.pinecone.slime.chunk.RangedPage; /** * 负责抢劫任务(Heist)的任务调度分配 */ public abstract class Heist extends ArchHeistum implements CascadeHeist { //@Autowired //private Harbor harbor; //港口,负责和master结点进行通信 protected Hierarchy mHierarchy; protected Namespace mHeistName; protected CascadeHeist mParent = null; protected JSONConfig mjoProtoConfig ; // 当前Heist的JSON原型配置项,等待被子Heist继承和重写 protected JSONConfig mjoInstanceConfig; protected HeistScheme heistScheme; protected RangedPage mMasterHeistTaskPage; protected Heistium mHeistium; // The affiliated process, which is using for executing specific heist. protected ChildHeistInstanceModifier mChildHeistInstanceModifier; protected Heist( Heistgram heistgram, @Nullable JSONConfig joConfig, @Nullable CascadeHeist parent, Namespace heistName ) { super( heistgram ); this.mParent = parent; this.mHeistName = heistName; this.infoLifecycle( "Heist::FinalConstructor", "A new heist has been contrived" ); this.apply( joConfig ); } protected Heist( Heistgram heistron, @Nullable JSONConfig joConfig, @Nullable CascadeHeist parent, @Nullable String szHeistName ) { this( heistron, joConfig, parent, (Namespace) null ); if( szHeistName == null ) { szHeistName = heistron.searchHeistName( this ); } this.mHeistName = CascadeHeist.newNamespace( szHeistName, parent ); } public Heist( Heistgram heistron, @Nullable CascadeHeist parent, @NonNull String szChildName ) { this( heistron, null, parent, szChildName ); JSONConfig jc = parent.getHeistScheme().getInstanceConfigByName( szChildName, true ); parent.getHeistScheme().reinterpret( jc ); this.apply( jc ); } public Heist( Heistgram heistron, @Nullable JSONConfig joConfig ){ this( heistron, joConfig, null, (String) null ); } public Heist( Heistgram heistron ) { this( heistron, (String) null ); } public Heist( Heistgram heistron, @Nullable String szName ) { this( heistron, null, null, szName ); if( this.mHeistName != null ) { this.apply( heistron.queryHeistConfig( this.heistName() ) ); } } protected void loadConfig() { this.applyThisInnerConfig( HeistEntity.class ); this.apply( this.taskFrom, this.taskTo ); } public Heist apply( long taskFrom, long taskTo ) { this.taskFrom = taskFrom; this.taskTo = taskTo; this.mMasterHeistTaskPage = new RangedTaskPage( taskFrom, taskTo, 0 ); this.mHeistium = new LocalHeistium( this, 0 ); return this; } public Heist apply( @Nullable JSONConfig joProtoConfig ) { if( joProtoConfig != null ) { this.mjoProtoConfig = joProtoConfig; this.heistScheme = new PatriarchalHeistScheme( this ); this.mjoInstanceConfig = this.getHeistScheme().getInstanceConfigByName( null, true ); this.getHeistScheme().reinterpret( this.mjoInstanceConfig ); //Debug.fmt( 2, this.mjoInstanceConfig ); this.mHierarchy = this.queryHeistHierarchy(); this.loadConfig(); this.reportScheme(); } return this; } protected Hierarchy queryHeistHierarchy() { JSONConfig joOrchestration = this.getConfig().getChild( AutoOrchestrator.ConfigOrchestrationKey ); if( joOrchestration != null ) { JSONArray transaction = joOrchestration.optJSONArray( GramTransaction.ConfigTransactionsListKey ); if( transaction != null && !transaction.isEmpty() ) { return Hierarchy.Master; } } return Hierarchy.Slave; // Root is also a slave. } protected void applyThisInnerConfig( Class stereotype ) { DirectObjectInjector.instance( ConfigHelper.fnToSmallHumpName, stereotype ).inject( this.mjoInstanceConfig, stereotype, this ); this.metier = Metier.queryMetier( this.mjoInstanceConfig.optString( "Metier" ) ); } protected void reportScheme(){ // TODO, Detailed scheme. this.infoLifecycle( String.format( "Standby => { Hierarchy => %s, Name => %s }", this.getHierarchy(), this.getInstanceFullName() ) ); } @Override public Hierarchy getHierarchy() { return this.mHierarchy; } @Override public CascadeHeist parent() { return this.mParent; } @Override public RangedPage getMasterTaskPage() { return this.mMasterHeistTaskPage; } @Override public Heistium getHeistium() { return this.mHeistium; } @Override public HeistScheme getHeistScheme() { return this.heistScheme; } public String getIndexPath() { return this.indexPath; } @Override public JSONConfig getConfig() { return this.mjoInstanceConfig; } @Override public JSONConfig getProtoConfig() { return this.mjoProtoConfig; } @Override public ServgramOrchestrator getAttachedOrchestrator() { if( this.isRoot() ) { return this.getGramHeistletOrchestrator(); } else { return this.parent().getThisHeistletOrchestrator(); } } @Override public ChildHeistInstanceModifier getChildHeistInstanceModifier() { return this.mChildHeistInstanceModifier; } @Override public void applyChildHeistInstanceModifier( ChildHeistInstanceModifier modifier ) { this.mChildHeistInstanceModifier = modifier; } @Override public String heistName(){ return this.mHeistName.rootName(); } @Override public Namespace getHeistNamespace() { return this.mHeistName; } @Override public ChildHeistOrchestrator getThisHeistletOrchestrator() { return this.getHeistium().getHeistletOrchestrator(); } @Override public HeistletOrchestrator getGramHeistletOrchestrator() { return this.getHeistgram().getHeistletOrchestrator(); } @Override public void terminate(){ this.mHeistium.terminate(); } @Override public void toRavage() { this.infoLifecycle( Heistum.StatusStart ); this.mHeistium.joinStartMultiTasks(); //Debug.trace( "Deal!" ); } @Override public void toStalk() { this.infoLifecycle( Heistum.StatusStart ); } @Override public void toEmbezzle() { this.infoLifecycle( Heistum.StatusStart ); } protected void executeSlaveMission() throws HeistExecutionException { try { this.infoLifecycle( "It`s time to feast" ); switch ( this.metier ) { case REAVER : { this.toRavage(); break; } case STALKER : { this.toStalk(); break; } case EMBEZZLER : { this.toEmbezzle(); break; } default: { break; } } this.infoLifecycle( Heistum.StatusDone ); } catch ( ProxyProvokeHandleException e ) { throw new HeistExecutionException( e.getCause() ); } catch ( RuntimeException e ) { throw new HeistExecutionException( e ); } } protected void executeMasterMission() throws HeistOrchestrateException { this.infoLifecycle( "orchestrating transactions" ); try { this.getHeistium().getHeistletOrchestrator().orchestrate(); } catch ( OrchestrateInterruptException e ) { throw new HeistOrchestrateException( e ); } this.infoLifecycle( Heistum.StatusDone ); } @Override public void toHeist() throws HeistException { Hierarchy hierarchy = this.getHierarchy(); this.getHeistgram().notifyLifecycleEvent( this, TaskInstanceStatus.Running, hierarchy ); try { if( hierarchy == Hierarchy.Slave ) { this.executeSlaveMission(); } else { this.executeMasterMission(); } } catch ( HeistExecutionException e ) { Throwable cause = e.getCause(); if ( cause instanceof RuntimeException ) { if ( e.getCause() != null ) { cause = e.getCause(); } } if ( cause instanceof InterruptedException ) { this.getHeistgram().notifyLifecycleEvent( this, TaskInstanceStatus.Killed, hierarchy ); } throw e; } catch ( HeistException e ) { this.getHeistgram().notifyLifecycleEvent( this, TaskInstanceStatus.Error, hierarchy ); throw e; } this.getHeistgram().notifyLifecycleEvent( this, TaskInstanceStatus.Finished, hierarchy ); } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/HeistEntity.java ================================================ package com.sauron.heist.heistron; public abstract class HeistEntity implements Heistum { protected String indexPath; // 索引路径 protected String spoilPath; // 数据文件存储路径 protected String workingPath; // 程序工作路径 protected long taskFrom = 0; // Range min protected long taskTo = 100000; // Range max protected long fragBase; protected long fragRange; protected int maximumThread = 5; protected boolean fromDeathPoint; // 从上一个死亡点复活 protected Metier metier; // 职业 (掠夺者、潜伏者、洗钱者) protected HeistEntity() { } @Override public int getMaximumThread() { return this.maximumThread; } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/Heistotron.java ================================================ package com.sauron.heist.heistron; import com.pinecone.hydra.config.MapConfigReinterpreter; import com.pinecone.hydra.servgram.Gram; import com.pinecone.hydra.servgram.OrchestrateInterruptException; import com.pinecone.hydra.task.TaskInstanceStatus; import com.sauron.heist.heistron.event.HeistLifecycleEventInterceptor; import com.sauron.heist.heistron.orchestration.Heistlet; import com.pinecone.tritium.system.TritiumSystem; import com.sauron.heist.heistron.orchestration.Hierarchy; import com.sauron.heist.heistron.orchestration.LocalHeistumOrchestrator; import com.sauron.heist.heistron.orchestration.HeistletOrchestrator; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.hydra.config.ConfigSource; import com.pinecone.hydra.config.LocalConfigSource; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.servgram.ArchServgramium; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import java.io.IOException; import java.lang.annotation.Annotation; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; /** * Bean Nuts Hazelnut Sauron Heistotron * Author: Harald.E / JH.W (DragonKing) * Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved. * ***************************************************************************************** * Heistotron Spider for PB-Level Automatic Crawler * * ***************************************************************************************** * DragonKing.cn of Harald */ @Gram( "Heist" ) public class Heistotron extends ArchServgramium implements Heistgram { protected boolean mbEnableCmdCall = true; protected ConfigSource mUnifyConfigSource; protected JSONConfig mLocalHeistsConfigList; protected JSONConfig mTemplateHeistSchemeConfig; protected JSONConfig mComponents; protected HeistletOrchestrator mHeistletOrchestrator; protected Collection mLifecycleEventInterceptors; public Heistotron( String szName, Processum parent ) { super( szName, parent ); this.mHeistletOrchestrator = new LocalHeistumOrchestrator( this, this.getConfig() ); this.loadHeistronScopeConfig(); this.mLifecycleEventInterceptors = new ArrayList<>(); } protected void prepareTemplateHeistScheme() { JSONConfig tc = this.getConfig().getChild( HeistConfigConstants.KeyTemplatedConfig ) ; if( tc == null ){ this.getConfig().put( HeistConfigConstants.KeyTemplatedConfig, new JSONMaptron() ); tc = this.getConfig().getChild( HeistConfigConstants.KeyTemplatedConfig ); } this.mTemplateHeistSchemeConfig = tc; this.mTemplateHeistSchemeConfig.put( HeistConfigConstants.KeyHttpBrowser, this.getComponentsConfig().opt( HeistConfigConstants.KeyHttpBrowser ) ); } protected void loadHeistronScopeConfig() { this.mbEnableCmdCall = this.getConfig().optBoolean( "EnableCmdCall" ); this.mLocalHeistsConfigList = this.getConfig().getChild( HeistConfigConstants.KeyHeistsTable ); this.mComponents = this.getConfig().getChild( HeistConfigConstants.KeyComponents ); JSONConfig joLocalHeistsConfigList = this.getLocalHeistsConfigList(); JSONConfig joLocalConfigs = this.getConfig().getChild( HeistConfigConstants.KeyConfigScope ).getChild( HeistConfigConstants.KeyLocalConfigs ); MapConfigReinterpreter reinterpreter = this.parentSystem().getPrimaryConfigScope().newMapConfigReinterpreter(); reinterpreter.addExcludeKey( HeistConfigConstants.KeyTemplatedConfig ); reinterpreter.reinterpretByLineage( this.getConfig(), null ); this.mUnifyConfigSource = new LocalConfigSource( this, joLocalConfigs, joLocalHeistsConfigList ); // TODO, to implements UnifyConfigSource this.prepareTemplateHeistScheme(); } @Override public Heistgram addLifecycleEventInterceptors( HeistLifecycleEventInterceptor interceptor ) { this.mLifecycleEventInterceptors.add( interceptor ); return this; } @Override public Heistgram removeLifecycleEventInterceptors( HeistLifecycleEventInterceptor interceptor ) { this.mLifecycleEventInterceptors.remove( interceptor ); return this; } @Override public JSONConfig getConfig() { return (JSONConfig)this.mServgramConf; } @Override public TritiumSystem parentSystem() { return (TritiumSystem) super.parentSystem(); } @Override public JSONConfig getTemplateHeistSchemeConfig() { return this.mTemplateHeistSchemeConfig; } public ConfigSource getConfigSource() { return this.mUnifyConfigSource; } @Override public JSONConfig getLocalHeistsConfigList() { return this.mLocalHeistsConfigList; } public HeistletOrchestrator getHeistletOrchestrator() { return this.mHeistletOrchestrator; } protected String queryCmdDesignatedHeist() { Map map = this.parentSystem().getStartupCommandMap(); String[] heists = map.get( "heist" ); if( heists != null && heists.length > 0 ) { return heists[ 0 ]; } return ""; } @Override public JSONConfig queryHeistConfig ( String szHeistName ) { JSONConfig parent = this.getLocalHeistsConfigList(); Object thisConf = parent.opt( szHeistName ); JSONConfig config; try{ if( thisConf instanceof String ) { config = (JSONConfig) this.getConfigSource().loadConfig( Path.of( (String) thisConf ) ); } else if( thisConf instanceof JSONObject ) { config = parent.getChild( szHeistName ); } else { config = (JSONConfig) this.getConfigSource().loadConfigBySegmentName( szHeistName ); } } catch ( IOException e ) { throw new ConfigNotFoundException( e ); } if( config == null ) { throw new ConfigNotFoundException( "Compromised attempts, Heist config `" + szHeistName + "` can be found in nowhere." ); } return config; } @Override public JSONConfig getComponentsConfig() { return this.mComponents; } protected void dispatch() throws HeistException { String szDesignatedHeist = this.queryCmdDesignatedHeist(); //szDesignatedHeist = "Void"; if( szDesignatedHeist.length() != 0 ) { this.infoLifecycle( "Into command-prompt mode" ); List heists = this.mHeistletOrchestrator.preloads( szDesignatedHeist ); for( Object o : heists ) { ( (Heistum) o ).toHeist(); } } else { this.infoLifecycle( "Into orchestrator mode" ); try { this.mHeistletOrchestrator.orchestrate(); } catch ( OrchestrateInterruptException e ) { throw new HeistOrchestrateException( e ); } } } @Override public void execute() throws HeistException { this.infoLifecycle( "Can do !" ); this.dispatch(); } @Override public String searchHeistName( Heistum that ) { Annotation[] annotations = that.getClass().getAnnotations(); for( Annotation annotation : annotations ) { if( annotation instanceof com.pinecone.hydra.servgram.Gram ) { return ( (com.pinecone.hydra.servgram.Gram) annotation ).value(); } else if( annotation instanceof Heistlet) { return ( (Heistlet) annotation ).value(); } } List prefixes = this.mHeistletOrchestrator.getPreloadPrefixes(); String szClassName = that.className(); if( prefixes != null ) { for( Object o : prefixes ) { szClassName = szClassName.replaceFirst( o.toString(), "" ); } } List suffixes = this.mHeistletOrchestrator.getPreloadSuffixes(); if( suffixes != null ) { for( Object o : suffixes ) { szClassName = szClassName.replaceFirst( o.toString(), "" ); } } return szClassName; } @Override public void notifyLifecycleEvent( Heistum heist, TaskInstanceStatus instanceStatus, Hierarchy hierarchy ) { for ( HeistLifecycleEventInterceptor interceptor : this.mLifecycleEventInterceptors ) { interceptor.afterLifecycleEventTriggered( heist.getName(), heist, instanceStatus, hierarchy ); } } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/LocalCrewnium.java ================================================ package com.sauron.heist.heistron; import com.sauron.heist.heistron.scheduler.LocalPreemptiveSingleFrame64Consumer; import com.sauron.heist.heistron.scheduler.PageFrame64ConsumerAdapter; import com.sauron.heist.heistron.scheduler.TaskConsumer; import com.sauron.heist.heistron.scheduler.TaskFrame64Producer; import com.pinecone.framework.system.executum.ArchThreadum; public abstract class LocalCrewnium extends ArchThreadum implements Crewnium { protected int mnCrewId; protected Heistum mParentHeist; protected TaskConsumer mTaskConsumer; protected PageFrame64ConsumerAdapter mFrame64ConsumerAdapter = LocalCrewnium.this::consumeById; public LocalCrewnium ( Heist heist, int nCrewId ) { super( null, heist.getHeistium() ); this.mnCrewId = nCrewId; this.mParentHeist = heist; Thread affinityThread = new Thread( this ); this.setThreadAffinity( affinityThread ); this.getAffiliateThread().setName( this.nomenclature() ); this.setName( affinityThread.getName() ); // this.mTaskConsumer = new LocalSingleTaskPageConsumer( // (TaskPageProducer) this.parentExecutum().getTaskProducer(), this.mFrame64ConsumerAdapter // ); this.mTaskConsumer = new LocalPreemptiveSingleFrame64Consumer( (TaskFrame64Producer) this.parentExecutum().getTaskProducer(), this.mFrame64ConsumerAdapter ); } protected abstract void consumeById( long index ); @Override public Heistium parentExecutum() { return (Heistium) super.parentExecutum(); } protected String nomenclature() { String szHeistName; if( this.mParentHeist instanceof CascadeHeist ) { szHeistName = ((CascadeHeist) this.mParentHeist).getInstanceFullName(); } else { szHeistName = this.mParentHeist.heistName(); } return String.format( "%s-%s", szHeistName, this.getAffiliateThread().getName() ).toLowerCase(); } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/LocalHeistium.java ================================================ package com.sauron.heist.heistron; import com.sauron.heist.heistron.orchestration.HeistTask; import com.sauron.heist.heistron.orchestration.Hierarchy; import com.sauron.heist.heistron.scheduler.LocalPreemptiveTaskFrame64Producer; import com.sauron.heist.heistron.scheduler.TaskProducer; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; public class LocalHeistium extends HeistTask implements Heistium { protected AtomicBoolean mTerminateSignal; protected int mnMaximumThread; protected TaskProducer mTaskProducer; protected long mnAwaitFinishedMaxMillis; protected Hierarchy mHierarchy; protected void initSelf( Heistum heistum, long nAwaitFinishedMaxMillis ) { if( heistum instanceof CascadeHeist ) { this.mHierarchy = ((CascadeHeist) heistum).getHierarchy(); } else { this.mHierarchy = Hierarchy.Slave; } this.mnMaximumThread = heistum.getMaximumThread(); this.mTerminateSignal = new AtomicBoolean( false ); this.mnAwaitFinishedMaxMillis = nAwaitFinishedMaxMillis; if( this.mHierarchy == Hierarchy.Slave ) { //TODO //this.mTaskProducer = (new LocalTaskSchedulerStrategy( this, 100 )).formulateProducer(); this.mTaskProducer = new LocalPreemptiveTaskFrame64Producer( this, this.mParentHeist.getMasterTaskPage() ); } } public LocalHeistium( String szName, Heistum heistum, long nAwaitFinishedMaxMillis ) { super( szName, heistum ); this.initSelf( heistum, nAwaitFinishedMaxMillis ); } public LocalHeistium( Heistum heistum, long nAwaitFinishedMaxMillis ) { super( heistum ); this.initSelf( heistum, nAwaitFinishedMaxMillis ); } @Override public TaskProducer getTaskProducer() { return this.mTaskProducer; } protected void verifyIsTerminated() { if( this.mTaskProducer.isFinished() || this.mTerminateSignal.get() ) { throw new HeistStatusTerminatedException( "Mission is already terminated." ); } } protected void beforeMultiTaskStart() { this.verifyIsTerminated(); } protected void vitalizeSoloClew( int nCrewId ) { Crewnium crewnium = this.getParentHeist().newCrew( nCrewId ); crewnium.getAffiliateThread().start(); this.getTaskManager().add( crewnium ); } protected void vitalizeMultiTasks (){ for ( int i = 0; i < this.mnMaximumThread; ++i ) { this.vitalizeSoloClew( i ); } } protected void awaitTasksFinished() { try { if( this.mnAwaitFinishedMaxMillis > 0 ) { this.mTaskProducer.awaitProducerFinished( this.mnAwaitFinishedMaxMillis, TimeUnit.MILLISECONDS ); } else { this.mTaskProducer.awaitProducerFinished(); } } catch ( InterruptedException e ) { this.handleAliveException( e ); } finally { this.getTaskManager().purge(); } } @Override public void apoptosis() { if( this.mHierarchy == Hierarchy.Slave ) { synchronized ( this ) { if( !this.mTerminateSignal.get() ) { this.mTerminateSignal.getAndSet( true ); this.mParentHeist.tracer().info( "[{} has been terminate.]", this.mParentHeist.heistName() ); } } } else { this.getHeistletOrchestrator().terminate(); // Cascading terminate children. } } @Override public void terminate(){ this.apoptosis(); //TODO } @Override public AtomicBoolean queryTerminationSignal() { return this.mTerminateSignal; } @Override public void joinStartMultiTasks() { this.beforeMultiTaskStart(); this.vitalizeMultiTasks(); this.awaitTasksFinished(); } protected void handleAliveException( Exception e ) { this.getParentHeist().handleAliveException( e ); } protected void handleKillException( Exception e ) throws IllegalStateException { this.getParentHeist().handleKillException( e ); } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/MegaDOMIndexCrew.java ================================================ package com.sauron.heist.heistron; import us.codecraft.webmagic.Request; import java.io.File; import java.io.IOException; public abstract class MegaDOMIndexCrew extends HTTPCrew { public MegaDOMIndexCrew ( HTTPIndexHeist heist, int id ){ super( heist, id ); } @Override public HTTPIndexHeist parentHeist() { return (HTTPIndexHeist) this.heist; } public String queryHrefById ( long id ) { return this.parentHeist().queryHrefById( id ); } public String queryFragNamespace( long id ) { id = (id == 0 ? 1 : id); long nBase = this.fragRange / this.fragBase; long nLow = id / this.fragRange; long nMod = id % this.fragRange; long nAbove = nLow; if ( nMod != 0 || id % 10 == 0 ) { ++nAbove; } nAbove *= nBase; nLow *= nBase; String szNS = nLow + "W"; szNS += "_" + nAbove + "W"; return szNS; } public String querySpoilStorageDir( long id ) { return this.parentHeist().spoilPath + this.queryFragNamespace( id ) + "/"; } public String querySpoilStoragePath( long id ) { return this.querySpoilStorageDir( id ) + "page_" + id + ".html"; } @Override protected void tryConsumeById( long id ) throws LootRecoveredException, LootAbortException, IllegalStateException, IOException { String szStorageDir = this.querySpoilStorageDir ( id ); String szStoragePath = this.querySpoilStoragePath( id ); File storageDir = new File( szStorageDir ); if ( !storageDir.isDirectory() ) { storageDir.mkdirs(); } String szDummyHref = this.queryHrefById( id ); String szHref; if ( szDummyHref.startsWith( "http" ) ) { szHref = szDummyHref; } else { szHref = this.heistURL + szDummyHref; } Request request = new Request( szHref ); request.putExtra( "id", id ); this.storeHrefCache( szStoragePath, request ); } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/PatriarchalHeistScheme.java ================================================ package com.sauron.heist.heistron; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.unit.MultiScopeMap; import com.pinecone.framework.unit.MultiScopeMaptron; import com.pinecone.framework.unit.TreeMap; import com.pinecone.framework.unit.affinity.RecursiveUnitOverrider; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.config.MapConfigReinterpreter; import com.pinecone.hydra.config.ScopedMapConfigReinterpreter; import java.util.LinkedHashMap; import java.util.Map; public class PatriarchalHeistScheme extends RecursiveUnitOverrider implements HeistScheme { protected Heistum mParentHeist; protected Heistgram mHeistron; protected JSONConfig mjoTemplateHeistSchemeConfig; protected JSONConfig mjoProtoConfig ; // 当前Heist的JSON原型配置项,等待被子Heist继承和重写 protected JSONObject mjoChildrenConfig; protected MultiScopeMap mHeistScope; protected MapConfigReinterpreter mReinterpreter; public PatriarchalHeistScheme( Heistum heist ) { this.mParentHeist = heist; this.mHeistron = this.mParentHeist.getHeistgram(); this.mjoTemplateHeistSchemeConfig = this.mHeistron.getTemplateHeistSchemeConfig(); this.mjoProtoConfig = this.getParentHeist().getProtoConfig(); this.mHeistScope = new MultiScopeMaptron<>(); this.getHeistScope().addParent( ( new MultiScopeMaptron<>( this.getProtoConfig() ) ).addParent( ( new MultiScopeMaptron<>( this.getTemplateHeistSchemeConfig() ) ).setName( "Template" ) ) ).setName( "ProtoConfig" ); this.mjoChildrenConfig = (JSONObject) this.getHeistScope().get( Heistum.ConfigChildrenKey ); this.mReinterpreter = new ScopedMapConfigReinterpreter( null ); } protected PatriarchalHeistScheme applyInstanceScope( Map instance ) { this.getHeistScope().setThisScope( instance ); return this; } @Override public JSONConfig getInstanceConfigByName( String name ) { return this.getInstanceConfigByName( name, false ); } /** * getInstanceConfigByName * @param name ( Child instance name, which will extents the parent scope, and get its instance config of this child. ) * ( The `null` is the current scope, [this] ) * @param bRecursive ( Override all object and list, if that key which its child doesnt`t had. ) * @return Instance Config */ @Override public JSONConfig getInstanceConfigByName( @Nullable String name, boolean bRecursive ) { Map selfProto = null; Map selfCopy ; if( name == null ) { selfCopy = this.getProtoConfig().clone(); } else { JSONObject sub = this.mjoChildrenConfig.optJSONObject( name ); if( sub != null ) { selfProto = sub; selfCopy = sub.clone(); } else { return null; } } // Protecting the children`s key ["Children"] Map thisChildren = (Map)selfCopy.get( Heistum.ConfigChildrenKey ); if( thisChildren != null ) { selfCopy.remove( Heistum.ConfigChildrenKey ); } this.applyInstanceScope( selfCopy ); JSONConfig neo = new JSONConfig( this.getProtoConfig() ); LinkedHashMap overridden = new LinkedHashMap<>(); this.getHeistScope().overrideTo( overridden ); neo.setThisScope( overridden ); if( bRecursive ) { this.overrideObject( overridden, this.getProtoConfig(), bRecursive ); this.overrideObject( overridden, this.getTemplateHeistSchemeConfig(), bRecursive ); //Debug.echo( JSON.stringify( overridden, 2 ) ); } // Restoring the protected children`s key ["Children"] if( thisChildren != null ) { neo.put( Heistum.ConfigChildrenKey, thisChildren ); } else { neo.put( Heistum.ConfigChildrenKey, new JSONMaptron() ); } if( name == null ) { this.overrideOrchestrationSegment( this.getProtoConfig(), neo ); } else { this.overrideOrchestrationSegment( selfProto, neo ); } return neo; } protected void overrideOrchestrationSegment( Map selfProto, JSONConfig neo ) { Map jp = (Map) selfProto.get( Heistum.ConfigOrchestrationKey ); Map copy ; if( jp == null ) { copy = new JSONMaptron(); } else { copy = ( (JSONObject) jp).clone(); } this.override( copy, this.getTemplateHeistSchemeConfig().opt( Heistum.ConfigOrchestrationKey ), true ); neo.put( Heistum.ConfigOrchestrationKey, copy ); } @Override public void overrideSegment ( Map parentProto, Map instance ) { if( parentProto == this.getHeistScope() ) { this.getHeistScope().overrideTo( instance ); } else { MultiScopeMap scope = new MultiScopeMaptron<>(); scope.setThisScope( parentProto ); scope.overrideTo( instance ); } } @Override public PatriarchalHeistScheme reinterpret( JSONConfig that ) { MultiScopeMap sysGlobalScope = this.getHeistgram().parentSystem().getGlobalConfigScope(); // System runtime global config scope. JSONConfig heistParentList = this.getHeistgram().getLocalHeistsConfigList(); // Parent Scope of the master[e.g. Heist.json5::Heists] config. JSONConfig rootConfig = (JSONConfig) this.getHeistgram().parentSystem().getGlobalConfig(); // Root Scope of the master[e.g. config.json5] config. MultiScopeMap keyWords = new MultiScopeMaptron<>( new TreeMap<>() ); keyWords.put( "this" , that ); keyWords.put( "super" , heistParentList ); keyWords.put( "__root__" , rootConfig ); this.mReinterpreter.setPrimaryScope( sysGlobalScope ); this.mReinterpreter.addExcludeKey( Heistum.ConfigChildrenKey ); this.mReinterpreter.reinterpretByBasicKeyWordsScope( that, keyWords ); return this; } @Override public MultiScopeMap getHeistScope() { return this.mHeistScope; } @Override public JSONConfig getProtoConfig() { return this.mjoProtoConfig; } @Override public Heistgram getHeistgram() { return this.mHeistron; } @Override public Heistum getParentHeist() { return this.mParentHeist; } @Override public JSONConfig getTemplateHeistSchemeConfig() { return this.mjoTemplateHeistSchemeConfig; } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/chronic/ArchPeriodicHeistRehearsal.java ================================================ package com.sauron.heist.heistron.chronic; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.RuntimeSystem; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.framework.util.json.homotype.AnnotatedObjectInjector; import com.pinecone.framework.util.json.homotype.JSONGet; import com.pinecone.framework.util.lang.NamespaceCollector; import com.pinecone.framework.util.lang.PackageNameFetcher; import com.pinecone.framework.util.name.FixScopeName; import com.pinecone.framework.util.name.Name; import com.pinecone.hydra.auto.PeriodicAutomaton; import com.pinecone.hydra.auto.PeriodicAutomatron; import com.sauron.heist.heistron.HTTPHeist; import com.sauron.heist.heistron.Heistgram; import com.sauron.heist.heistron.Heists; import com.sauron.heist.heistron.Heistum; import com.pinecone.tritium.system.TritiumSystem; import com.sauron.heist.heistron.orchestration.LocalHeistumOrchestrator; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; public abstract class ArchPeriodicHeistRehearsal implements PeriodicHeistRehearsal { protected Heistum mHeistum; @JSONGet( "ChronicPerAcc" ) protected long mnChronicPerAcc; @JSONGet( "ChronicPeriods" ) protected List mChronicPeriods; @JSONGet( "RaiderMarshaling" ) protected JSONObject mRaiderMarshaling; @JSONGet( "RaiderMarshaling.RaiderScopes" ) protected List mRaiderScopes; @JSONGet( "RaiderMarshaling.MarshalingList" ) protected List mMarshalingList; @JSONGet( "RaiderMarshaling.Raiders" ) protected JSONObject mRaiderConfigs; @JSONGet( "RaiderMarshaling.UsingSedation" ) protected boolean mbUsingSedation; @JSONGet( "RaiderMarshaling.UsingUniformFeast" ) protected boolean mbUsingUniformFeast; protected List mExclusiveRaiders; protected List mPreloadPrefixes; protected List mPreloadSuffixes; protected RaiderFactory mRaiderFactory; protected AtomicInteger mIndexId; protected PeriodicAutomatron mPrimaryAutomatron; protected ArchPeriodicHeistRehearsal( Heistum heistum , boolean bDaemon ) { this.mHeistum = heistum; this.mIndexId = new AtomicInteger(); this.mExclusiveRaiders = new ArrayList<>(); RuntimeSystem system = heistum.parentSystem(); if( system instanceof TritiumSystem) { ( (TritiumSystem) system ).getPrimaryConfigScope().autoInject( ArchPeriodicHeistRehearsal.class, this.mHeistum.getConfig(), this ); } else { AnnotatedObjectInjector injector = new AnnotatedObjectInjector( ArchPeriodicHeistRehearsal.class ); injector.inject( this.mHeistum.getConfig(), this ); } this.initDirectlyLoad(); this.prepareFactory( new LocalRaiderFactory( this.mHeistum.getThisHeistletOrchestrator() ) ); this.mPrimaryAutomatron = new PeriodicAutomaton( Heists.getCriterionNomenclatureName( this.mHeistum ), heistum.getHeistium(), this.mnChronicPerAcc, bDaemon ); } @Override public AtomicInteger getIndexId() { return this.mIndexId; } @Override public List getPreloadPrefixes() { return this.mPreloadPrefixes; } @Override public List getPreloadSuffixes() { return this.mPreloadSuffixes; } @SuppressWarnings( "unchecked" ) protected void initDirectlyLoad() { try{ JSONObject jDirectlyLoad = this.mRaiderMarshaling.optJSONObject( LocalHeistumOrchestrator.ConfigDirectlyLoadKey ); if( jDirectlyLoad == null ) { this.mPreloadPrefixes = new ArrayList<>(); this.mPreloadSuffixes = new ArrayList<>(); } else { this.mPreloadPrefixes = (List) jDirectlyLoad.getOrDefault( "Prefix", new ArrayList<>() ); this.mPreloadSuffixes = (List) jDirectlyLoad.getOrDefault( "Suffix", new ArrayList<>() ); } if ( !this.mPreloadSuffixes.contains( "" ) ) { this.mPreloadSuffixes.add( "" ); } if ( !this.mPreloadPrefixes.contains( "" ) ) { this.mPreloadPrefixes.add( "" ); } } catch ( Exception e ) { e.printStackTrace( this.getHeistgram().parentSystem().console().getErr() ); } } protected void prepareFactory( @Nullable RaiderFactory factory ) { if( factory != null ) { this.mRaiderFactory = factory; String szCurrentPackageName = this.getClass().getPackageName(); this.mRaiderScopes.add( szCurrentPackageName ); NamespaceCollector collector = new PackageNameFetcher( factory.getClassLoader() ); List children = collector.fetch( szCurrentPackageName ); this.mRaiderScopes.addAll( children ); for( String sz : this.mRaiderScopes ) { this.mRaiderFactory.getClassScope().addScope( sz ); } this.mRaiderFactory.getTraitClassLoader().updateScope(); } } @Override public Heistgram getHeistgram() { return this.getParentHeist().getHeistgram(); } @Override public Heistum getParentHeist() { return this.mHeistum; } @Override public PeriodicAutomatron getAutomatron() { return this.mPrimaryAutomatron; } @Override public List getRawChronicPeriods() { return this.mChronicPeriods; } @Override public JSONObject getRaiderMarshalingConf() { return this.mRaiderMarshaling; } @Override public JSONObject getRaiderConfigs() { return this.mRaiderConfigs; } protected int nextId(){ return this.getIndexId().getAndIncrement(); } @Override public void vitalize() { for( String szRaider : this.mMarshalingList ) { List list = this.popping( szRaider ); this.mExclusiveRaiders.addAll( list ); } if ( this.mbUsingUniformFeast ) { this.getAutomatron().command( new FeastInstructation( this ) ); } for( Raider raider : this.mExclusiveRaiders ) { this.getAutomatron().command( raider.getPrimeDirective() ); //this.getAutomatron().command( KernelInstructation.DIE ); // try{ // raider.getPrimeDirective().execute(); // } // catch ( Exception e ) { // // } } // this.getAutomatron().command(new Instructation() { // @Override // public void execute() throws Exception { // Debug.sleep(30000); // Debug.trace("shit"); // } // }); if( this.mbUsingSedation ){ this.getAutomatron().command( new SedationInstructation( this ) ); } //this.getAutomatron().command( KernelInstructation.TERMINATE ); this.getAutomatron().start(); } @Override public void joinVitalize() throws InterruptedException { this.vitalize(); this.getAutomatron().join(); } @SuppressWarnings( "unchecked" ) protected List popping( String szName ) { List prefixes = new ArrayList<>( this.mPreloadPrefixes ); prefixes.add( szName + "." ); return this.mRaiderFactory.popping( new FixScopeName(szName, prefixes, (List)this.mPreloadSuffixes), (HTTPHeist)this.getParentHeist(), this.nextId(), this.mRaiderConfigs.optJSONObject( szName ) ); } protected List popping( Name name ) { return this.mRaiderFactory.popping( name, this.getHeistgram() ); } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/chronic/ExcludeRaiderletFilters.java ================================================ package com.sauron.heist.heistron.chronic; import com.pinecone.framework.util.lang.TypeFilter; import com.pinecone.ulf.util.lang.HierarchyClassInspector; import javassist.ClassPool; import javassist.CtClass; import javassist.NotFoundException; import java.io.IOException; public class ExcludeRaiderletFilters implements TypeFilter { protected HierarchyClassInspector mClassInspector; public ExcludeRaiderletFilters( HierarchyClassInspector inspector ) { this.mClassInspector = inspector; } @Override public boolean match( String szClassName, Object pool ) throws IOException { try{ CtClass clz = ( (ClassPool) pool ).get( szClassName ); if( clz.isInterface() ) { return true; } if( this.mClassInspector.isImplemented( clz, Raider.class ) ) { return false; } return !this.mClassInspector.hasOwnAnnotation( clz, Raiderlet.class ) ; } catch ( NotFoundException e ) { return true; } } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/chronic/FeastInstructation.java ================================================ package com.sauron.heist.heistron.chronic; import com.pinecone.framework.util.datetime.UniformDateTimeAudit; import com.pinecone.hydra.auto.ArchInstructation; import com.pinecone.hydra.auto.ContinueException; import com.sauron.heist.heistron.Heistum; import com.sauron.heist.heistron.orchestration.Instructations; import java.time.LocalDateTime; import java.util.List; public class FeastInstructation extends ArchInstructation { private Heistum mHeistum; private List mChronicPeriods; private UniformDateTimeAudit mDateTimeAudit; public FeastInstructation( Heistum heistum, List chronicPeriods ) { this.mHeistum = heistum; this.mChronicPeriods = chronicPeriods; this.mDateTimeAudit = UniformDateTimeAudit.DefaultAudit; Instructations.infoConformed( heistum, this ); } public FeastInstructation( PeriodicHeistRehearsal kernel ) { this( kernel.getParentHeist(), kernel.getRawChronicPeriods() ); } @Override public void execute() throws Exception { LocalDateTime currentTime = LocalDateTime.now(); boolean isFeastTime = false; for ( String period : this.mChronicPeriods ) { if ( this.mDateTimeAudit.matches( period, currentTime ) ) { isFeastTime = true; break; } } if ( !isFeastTime ) { this.infoStarvation("It`s time to feast?", "Slumber" ); throw new ContinueException(); } this.infoStarvation("It`s time to feast?", "Berserking" ); } protected FeastInstructation infoStarvation( String szWhat, String szStateOrExtra ) { this.mHeistum.tracer().info( "[Starvation] [{}] <{}>", szWhat, szStateOrExtra ); return this; } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/chronic/LocalMultiRaiderLoader.java ================================================ package com.sauron.heist.heistron.chronic; import com.pinecone.framework.util.lang.ClassScope; import com.pinecone.framework.util.name.Name; import com.pinecone.hydra.servgram.filters.AnnotationValueFilter; import com.pinecone.ulf.util.lang.ArchMultiScopeLoader; import com.pinecone.ulf.util.lang.GenericPreloadClassInspector; import com.pinecone.ulf.util.lang.PooledClassCandidateScanner; import javassist.ClassPool; import javassist.bytecode.annotation.Annotation; public class LocalMultiRaiderLoader extends ArchMultiScopeLoader implements MultiRaiderLoader { protected AnnotationValueFilter mAnnoValueFilter ; protected LocalMultiRaiderLoader( ClassScope classScope, ClassLoader classLoader, ClassPool classPool ) { super( classScope, classLoader, classPool, null, null ); this.mClassScanner = new PooledClassCandidateScanner( new LocalRaiderScopeSet( this.mClassLoader ), this.mClassLoader, this.mClassPool ); this.mClassInspector = new GenericPreloadClassInspector( this.mClassPool ); this.mClassScanner.addExcludeFilter( new ExcludeRaiderletFilters( this.mClassInspector ) ); this.setAnnotationValueFilter( new RaiderletAnnotationValueFilter() ); } protected LocalMultiRaiderLoader( ClassScope classScope, ClassLoader classLoader ) { this( classScope, classLoader, ClassPool.getDefault() ); } public LocalMultiRaiderLoader( RaiderFactory factory ) { this( factory.getClassScope(), factory.getClassLoader() ); } @Override public void setAnnotationValueFilter( AnnotationValueFilter filter ) { this.mAnnoValueFilter = filter; } @Override protected boolean isAnnotationQualified( Annotation that, String szName ) { return !this.mAnnoValueFilter.match( that, szName ); } @Override @SuppressWarnings( "unchecked" ) public Class load( Name simpleName ) throws ClassNotFoundException { return (Class )super.load( simpleName ); } // Directly by it`s name. @Override @SuppressWarnings( "unchecked" ) public Class loadByName( Name simpleName ) throws ClassNotFoundException { return (Class )super.loadByName( simpleName ); } // Scanning class`s annotations, methods or others. @Override @SuppressWarnings( "unchecked" ) public Class loadInClassTrait( Name simpleName ) throws ClassNotFoundException { return (Class )super.loadInClassTrait( simpleName ); } @Override protected Class loadSingleByFullClassName( String szFullClassName ) { try { Class clazz = this.mClassLoader.loadClass( szFullClassName ); if( this.filter( clazz ) ) { return null; } if ( Raider.class.isAssignableFrom( clazz ) ) { return clazz.asSubclass( Raider.class ); } } catch ( ClassNotFoundException e ) { return null; } return null; } @Override public MultiRaiderLoader updateScope() { return (MultiRaiderLoader)super.updateScope(); } @Override public void clearCache() { this.mLoadedClassesPool.clear(); this.mVisitedClasses.clear(); } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/chronic/LocalRaiderFactory.java ================================================ package com.sauron.heist.heistron.chronic; import com.pinecone.framework.system.executum.TaskManager; import com.pinecone.framework.util.lang.ClassScope; import com.pinecone.framework.util.name.Name; import com.pinecone.ulf.util.lang.ArchMultiScopeFactory; import java.lang.reflect.InvocationTargetException; import java.util.List; public class LocalRaiderFactory extends ArchMultiScopeFactory implements RaiderFactory { public LocalRaiderFactory( TaskManager taskManager, ClassLoader classLoader, MultiRaiderLoader raiderLoader, ClassScope classScope ) { super( taskManager, classLoader, raiderLoader, classScope ); } public LocalRaiderFactory( TaskManager taskManager ) { this( taskManager, taskManager.getClassLoader(), null, null ); this.mClassScope = new LocalRaiderScopeSet( this ); this.mTraitClassLoader = new LocalMultiRaiderLoader( this ); } public LocalRaiderFactory( TaskManager taskManager, ClassScope classScope ) { this( taskManager, taskManager.getClassLoader(), null, classScope ); this.mTraitClassLoader = new LocalMultiRaiderLoader( this ); } @Override public MultiRaiderLoader getTraitClassLoader() { return (MultiRaiderLoader) super.getTraitClassLoader(); } @Override public Raider newInstance ( Class that, Class[] stereotypes, Object[] args ) throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException { return (Raider) super.newInstance( that, stereotypes, args ); } @Override public Raider spawn ( Name name, Object... args ) throws InvocationTargetException { return this.spawn( name, null, args ); } @Override public Raider spawn ( Name name, Class[] stereotypes, Object... args ) throws InvocationTargetException { return (Raider) super.spawn( name, stereotypes, args ); } @Override public List popping ( Name name, Object... args ) { return this.popping( name, null, args ); } @Override @SuppressWarnings( "unchecked" ) public List popping ( Name name, Class[] stereotypes, Object... args ) { return (List) super.popping( name, stereotypes, args ); } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/chronic/LocalRaiderScopeSet.java ================================================ package com.sauron.heist.heistron.chronic; import com.pinecone.framework.unit.LinkedTreeSet; import com.pinecone.framework.util.lang.ArchClassScopeSet; import com.pinecone.framework.util.lang.ScopedPackage; import java.util.Set; public class LocalRaiderScopeSet extends ArchClassScopeSet { public LocalRaiderScopeSet( Set scope, ClassLoader classLoader ) { super( scope, classLoader ); } public LocalRaiderScopeSet( ClassLoader classLoader ) { super( new LinkedTreeSet<>(), classLoader ); } public LocalRaiderScopeSet( RaiderFactory factory ) { super( new LinkedTreeSet<>(), factory.getClassLoader() ); } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/chronic/RaiderletAnnotationValueFilter.java ================================================ package com.sauron.heist.heistron.chronic; import com.pinecone.hydra.servgram.filters.AnnotationValueFilter; import javassist.bytecode.annotation.Annotation; public class RaiderletAnnotationValueFilter implements AnnotationValueFilter { public boolean match( Annotation that, String destinationName ) { if( that.getTypeName().equals( Raiderlet.class.getName() ) ) { String szAN = that.getMemberValue( Raiderlet.ValueKey ).toString(); if( szAN.startsWith( "\"" ) ){ return !szAN.equals("\"" + destinationName + "\""); } return !szAN.equals( destinationName ); } return true; } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/chronic/SedationInstructation.java ================================================ package com.sauron.heist.heistron.chronic; import com.pinecone.framework.util.datetime.GenericMultiFormDateTimeAudit; import com.pinecone.framework.util.datetime.StorageDateTime; import com.pinecone.framework.util.datetime.UniformDateTimeAudit; import com.pinecone.hydra.auto.ArchInstructation; import com.sauron.heist.heistron.Heistum; import com.sauron.heist.heistron.orchestration.Instructations; import java.time.LocalDateTime; import java.time.temporal.ChronoUnit; import java.util.List; /** * Force slumber to prevent excessive actions within the same time period. */ public class SedationInstructation extends ArchInstructation { private Heistum mHeistum; private List mChronicPeriods; private UniformDateTimeAudit mDateTimeAudit; public SedationInstructation( Heistum heistum, List chronicPeriods ) { this.mHeistum = heistum; this.mChronicPeriods = chronicPeriods; this.mDateTimeAudit = UniformDateTimeAudit.DefaultAudit; Instructations.infoConformed( heistum, this ); } public SedationInstructation( PeriodicHeistRehearsal kernel ) { this( kernel.getParentHeist(), kernel.getRawChronicPeriods() ); } // Increments the first non-negative component(wildcard) from the smallest to the largest unit // (nano, second, minute, hour, day, month, year) protected LocalDateTime firstJumpOutTime( String period, LocalDateTime now ) { StorageDateTime dateTime = GenericMultiFormDateTimeAudit.fromString( period ); if ( dateTime.getNano() != -1 ) { dateTime.setNano( dateTime.getNano() + 1 ); } else if ( dateTime.getSecond() != -1 ) { dateTime.setSecond( dateTime.getSecond() + 1 ); } else if ( dateTime.getMinute() != -1 ) { dateTime.setMinute( dateTime.getMinute() + 1 ); } else if ( dateTime.getHour() != -1 ) { dateTime.setHour( dateTime.getHour() + 1 ); } else if ( dateTime.getDayOfMonth() != -1 ) { dateTime.setDay( dateTime.getDayOfMonth() + 1 ); } else if ( dateTime.getMonthValue() != -1 ) { dateTime.setMonth( dateTime.getMonthValue() + 1 ); } else if ( dateTime.getYear() != -1 ) { dateTime.setYear( dateTime.getYear() + 1 ); } return GenericMultiFormDateTimeAudit.toLocalDateTime( dateTime, now ); } @Override public void execute() throws Exception { LocalDateTime currentTime = LocalDateTime.now(); for ( String period : this.mChronicPeriods ) { if ( this.mDateTimeAudit.matches( period, currentTime ) ) { StorageDateTime dateTime = GenericMultiFormDateTimeAudit.fromString( period ); LocalDateTime previous = GenericMultiFormDateTimeAudit.toLocalDateTime( dateTime, currentTime ); LocalDateTime next = this.firstJumpOutTime( period, currentTime ); long differenceInMillis = Math.abs( ChronoUnit.MILLIS.between( next, previous ) ); if( differenceInMillis != 0 ) { this.infoStarvation("Activates Sedative [Entrance] [ForceSlumber: " + (double)differenceInMillis / 1000d + "s]", "Activated" ); Thread.sleep( differenceInMillis ); this.infoStarvation("Deactivates Sedative [Slumber] [CurrentTime: " + LocalDateTime.now() + "]", "Deactivated" ); } break; } } this.infoStarvation("Aborts Sedative [Slumber]", "Abort" ); } protected SedationInstructation infoStarvation( String szWhat, String szStateOrExtra ) { this.mHeistum.tracer().info( "[Starvation] [{}] <{}>", szWhat, szStateOrExtra ); return this; } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/mapreduce/HTTPResourceTaskScheme.java ================================================ package com.sauron.heist.heistron.mapreduce; import java.net.URL; public class HTTPResourceTaskScheme implements TaskScheme { public URL url; public String method; public String charset; } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/orchestration/ExcludeHeistletFilters.java ================================================ package com.sauron.heist.heistron.orchestration; import com.pinecone.framework.util.lang.TypeFilter; import com.sauron.heist.heistron.Heistum; import com.pinecone.ulf.util.lang.HierarchyClassInspector; import javassist.ClassPool; import javassist.CtClass; import javassist.NotFoundException; import java.io.IOException; public class ExcludeHeistletFilters implements TypeFilter { protected HierarchyClassInspector mClassInspector; public ExcludeHeistletFilters( HierarchyClassInspector inspector ) { this.mClassInspector = inspector; } @Override public boolean match( String szClassName, Object pool ) throws IOException { try{ CtClass clz = ( (ClassPool) pool ).get( szClassName ); if( clz.isInterface() ) { return true; } if( this.mClassInspector.isImplemented( clz, Heistum.class ) ) { return false; } return !this.mClassInspector.hasOwnAnnotation( clz, Heistlet.class ) ; } catch ( NotFoundException e ) { return true; } } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/orchestration/HeistTask.java ================================================ package com.sauron.heist.heistron.orchestration; import com.pinecone.framework.system.executum.ArchProcessum; import com.pinecone.framework.system.executum.Executum; import com.sauron.heist.heistron.CascadeHeist; import com.sauron.heist.heistron.Heistum; import java.util.concurrent.atomic.AtomicInteger; public abstract class HeistTask extends ArchProcessum implements Taskium { private static final AtomicInteger rootAutoIncrementId = new AtomicInteger( 0 ); private static long nextRootAutoIncrementId() { return Executum.AutoIncrementId.getAndIncrement(); } private static String name( String szName, Heistum heistum ) { if( szName != null ) { return szName; } if( heistum instanceof CascadeHeist) { CascadeHeist cascadeHeist = (CascadeHeist) heistum; return cascadeHeist.getInstanceFullName(); } return heistum.heistName(); } private long mnTaskId; protected Heistum mParentHeist; public HeistTask( String szName, Heistum heistum ) { super( szName, heistum.getHeistgram() ); this.mParentHeist = heistum; //this.mTaskManager = new GenericMasterTaskManager( this ); this.mTaskManager = new LocalChildHeistOrchestrator( this, this.getParentHeist().getConfig() ); boolean bUsingRootId = true ; if( heistum instanceof CascadeHeist ) { CascadeHeist cascadeHeist = (CascadeHeist) heistum; if( cascadeHeist.parent() != null ){ this.mnTaskId = cascadeHeist.parent().getHeistium().getHeistletOrchestrator().nextAutoIncrementTaskId(); bUsingRootId = false; } } if( bUsingRootId ) { this.mnTaskId = HeistTask.nextRootAutoIncrementId(); } this.mszName = szName + "-task-" + this.getTaskId(); } public HeistTask( Heistum heistum ) { this( HeistTask.name( null, heistum ), heistum ); } @Override public long getTaskId() { return this.mnTaskId; } public Heistum getParentHeist() { return this.mParentHeist; } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/orchestration/HeistletAnnotationValueFilter.java ================================================ package com.sauron.heist.heistron.orchestration; import com.pinecone.hydra.servgram.filters.AnnotationValueFilter; import javassist.bytecode.annotation.Annotation; public class HeistletAnnotationValueFilter implements AnnotationValueFilter { public boolean match(Annotation that, String destinationName ) { if( that.getTypeName().equals( Heistlet.class.getName() ) ) { String szAN = that.getMemberValue( Heistlet.ValueKey ).toString(); if( szAN.startsWith( "\"" ) ){ return !szAN.equals("\"" + destinationName + "\""); } return !szAN.equals( destinationName ); } return true; } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/orchestration/LocalChildHeistOrchestrator.java ================================================ package com.sauron.heist.heistron.orchestration; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.framework.util.config.PatriarchalConfig; import com.pinecone.framework.util.name.Name; import com.pinecone.hydra.servgram.*; import com.sauron.heist.heistron.CascadeHeist; import com.sauron.heist.heistron.Heistgram; import com.sauron.heist.heistron.Heistium; import com.sauron.heist.heistron.Heistum; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; public class LocalChildHeistOrchestrator extends ArchServgramOrchestrator implements ChildHeistOrchestrator { private final AtomicInteger mAutoIncrementTaskId = new AtomicInteger( 0 ) ; protected Heistium mHeistium ; protected JSONConfig mChildren ; public LocalChildHeistOrchestrator( Processum parent, PatriarchalConfig sectionConfig, @Nullable GramFactory factory, GramTransaction transaction ) { super( parent, sectionConfig, factory, transaction ); } public LocalChildHeistOrchestrator( Processum parent, PatriarchalConfig sectionConfig ) { this( parent, sectionConfig, null, null ); //this.prepareFactory( new LocalHeistletFactory( this ) ); this.setTransaction( new LocalGramTransaction( this, parent ) ); if( parent instanceof Heistium ) { this.mHeistium = (Heistium)parent; this.mChildren = this.getHeist().getConfig().getChild( Heistum.ConfigChildrenKey ); } } @Override public int nextAutoIncrementTaskId() { return this.mAutoIncrementTaskId.getAndIncrement(); } @Override public Heistium getHeistium() { return this.mHeistium; } @Override public CascadeHeist getHeist() { return (CascadeHeist) this.getHeistium().getParentHeist(); } @Override public Heistgram getHeistgram() { return this.getHeist().getHeistgram(); } @Override protected List popping( String szName ) { List list = new ArrayList<>(); if( this.mChildren.hasOwnProperty( szName ) ) { try{ CascadeHeist heistum = this.getHeist().getClass().getConstructor( Heistgram.class, CascadeHeist.class, String.class ).newInstance( this.getHeistgram(), this.getHeist(), szName ); ChildHeistInstanceModifier modifier = this.getHeist().getChildHeistInstanceModifier(); if ( modifier != null ) { heistum.applyChildHeistInstanceModifier( modifier ); modifier.modify( heistum ); } this.infoLifecycle( "Child contrived -> " + heistum.getInstanceFullName() ) ; list.add( heistum ); } catch ( Exception e ) { this.tracer().warn( String.format( "[%s] Construct `%s` has been compromised.", this.className(), szName ), e ); } } return list; } @Override protected List popping( Name name ) { return this.popping( name.getName() ); } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/orchestration/LocalHeistletFactory.java ================================================ package com.sauron.heist.heistron.orchestration; import com.pinecone.framework.system.executum.TaskManager; import com.pinecone.hydra.servgram.ArchGramFactory; import com.pinecone.hydra.servgram.GramScope; import com.pinecone.hydra.servgram.LocalGramScopeSet; import com.pinecone.hydra.servgram.MultiGramsLoader; public class LocalHeistletFactory extends ArchGramFactory { public LocalHeistletFactory( TaskManager taskManager, ClassLoader classLoader, MultiGramsLoader gramLoader, GramScope gramScope ) { super( taskManager, classLoader, gramLoader, gramScope ); } public LocalHeistletFactory( TaskManager taskManager ) { this( taskManager, taskManager.getClassLoader(), null, null ); this.mClassScope = new LocalGramScopeSet( this ); this.mTraitClassLoader = new LocalHeistletLoader( this ); } public LocalHeistletFactory( TaskManager taskManager, GramScope gramScope ) { this( taskManager, taskManager.getClassLoader(), null, gramScope ); this.mTraitClassLoader = new LocalHeistletLoader( this ); } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/orchestration/LocalHeistletLoader.java ================================================ package com.sauron.heist.heistron.orchestration; import com.pinecone.hydra.servgram.ArchGramLoader; import com.pinecone.hydra.servgram.GramFactory; import com.pinecone.hydra.servgram.GramScope; import com.pinecone.hydra.servgram.filters.AnnotationValueFilter; import com.sauron.heist.heistron.Heistum; import javassist.ClassPool; import javassist.bytecode.annotation.Annotation; public class LocalHeistletLoader extends ArchGramLoader { protected AnnotationValueFilter mAnnoValueFilter ; public LocalHeistletLoader( GramScope classScope, ClassLoader classLoader, ClassPool classPool ) { super( classScope, classLoader, classPool ); this.mClassScanner.addExcludeFilter( new ExcludeHeistletFilters( this.mClassInspector ) ); this.setAnnotationValueFilter( new HeistletAnnotationValueFilter() ); } public LocalHeistletLoader( GramScope classScope, ClassLoader classLoader ) { this( classScope, classLoader, ClassPool.getDefault() ); } public LocalHeistletLoader( GramFactory factory ) { this( factory.getClassScope(), factory.getClassLoader() ); } public void setAnnotationValueFilter( AnnotationValueFilter filter ) { this.mAnnoValueFilter = filter; } @Override protected boolean isAnnotationQualified( Annotation that, String szName ) { return !this.mAnnoValueFilter.match( that, szName ); } @Override protected Class loadSingleByFullClassName(String szFullClassName ) { try { Class clazz = this.mClassLoader.loadClass( szFullClassName ); if( this.filter( clazz ) ) { return null; } if ( Heistum.class.isAssignableFrom( clazz ) ) { return clazz.asSubclass( Heistum.class ); } } catch ( ClassNotFoundException e ) { return null; } return null; } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/orchestration/LocalHeistumOrchestrator.java ================================================ package com.sauron.heist.heistron.orchestration; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.config.PatriarchalConfig; import com.pinecone.framework.util.name.FixScopeName; import com.pinecone.framework.util.name.Name; import com.pinecone.hydra.servgram.ArchServgramOrchestrator; import com.pinecone.hydra.servgram.GramFactory; import com.pinecone.hydra.servgram.GramTransaction; import com.pinecone.hydra.servgram.LocalGramTransaction; import com.pinecone.hydra.servgram.Servgram; import com.sauron.heist.heistron.Heistgram; import java.util.ArrayList; import java.util.List; public class LocalHeistumOrchestrator extends ArchServgramOrchestrator implements HeistletOrchestrator { public final static String ConfigDirectlyLoadKey = "DirectlyLoad"; protected Heistgram mHeistgram; protected List mPreloadPrefixes; protected List mPreloadSuffixes; public LocalHeistumOrchestrator( Processum parent, PatriarchalConfig sectionConfig, @Nullable GramFactory factory, GramTransaction transaction ) { super( parent, sectionConfig, factory, transaction ); this.initDirectlyLoad(); } public LocalHeistumOrchestrator( Processum parent, PatriarchalConfig sectionConfig ) { this( parent, sectionConfig, null, null ); this.prepareFactory( new LocalHeistletFactory( this ) ); this.setTransaction( new LocalGramTransaction( this, parent ) ); if( parent instanceof Heistgram ) { this.mHeistgram = (Heistgram)parent; } } @Override public List getPreloadPrefixes() { return this.mPreloadPrefixes; } @Override public List getPreloadSuffixes() { return this.mPreloadSuffixes; } @SuppressWarnings( "unchecked" ) protected void initDirectlyLoad() { try{ PatriarchalConfig jDirectlyLoad = this.getOrchestrationConfig().getChild( LocalHeistumOrchestrator.ConfigDirectlyLoadKey ); if( jDirectlyLoad != null ) { this.mPreloadPrefixes = (List) jDirectlyLoad.getOrDefault( "Prefix", new ArrayList<>() ); this.mPreloadSuffixes = (List) jDirectlyLoad.getOrDefault( "Suffix", new ArrayList<>() ); } else { this.mPreloadPrefixes = new ArrayList<>(); this.mPreloadSuffixes = new ArrayList<>(); } if ( !this.mPreloadPrefixes.contains( "" ) ) { this.mPreloadPrefixes.add( "" ); } if ( !this.mPreloadSuffixes.contains( "" ) ) { this.mPreloadSuffixes.add( "" ); } } catch ( Exception e ) { e.printStackTrace( this.getHeistgram().parentSystem().console().getErr() ); } } @Override public Heistgram getHeistgram() { return this.mHeistgram; } @Override @SuppressWarnings( "unchecked" ) protected List popping( String szName ) { List prefixes = new ArrayList<>( this.mPreloadPrefixes ); prefixes.add( szName + "." ); return ( (GramFactory)this.getClassFactory() ).popping( new FixScopeName(szName, prefixes, (List)this.mPreloadSuffixes), this.getHeistgram() ); } @Override protected List popping( Name name ) { return ( (GramFactory)this.getClassFactory() ).popping( name, this.getHeistgram() ); } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/scheduler/ActiveTaskPageProducer.java ================================================ package com.sauron.heist.heistron.scheduler; import com.pinecone.slime.chunk.scheduler.ActivePageScheduler64; import com.pinecone.slime.chunk.scheduler.LocalMapChunkRegister; import com.pinecone.slime.chunk.scheduler.PageDivider; public abstract class ActiveTaskPageProducer extends ActivePageScheduler64 implements TaskPageProducer { public ActiveTaskPageProducer(PageDivider divider, long autoIncrementId ) { super( divider, autoIncrementId ); this.mChunkRegister = new LocalMapChunkRegister<>(); } @Override public boolean hasMoreProducts() { return this.getDivider().remainAllocatable() > 0; } @Override public TaskPage require() { return (TaskPage) this.activate(); } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/scheduler/LocalMultiActiveTaskPageProducer.java ================================================ package com.sauron.heist.heistron.scheduler; import com.sauron.heist.heistron.Heistium; import com.pinecone.slime.chunk.ContiguousPage; import com.pinecone.slime.chunk.scheduler.PageDivider; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; public class LocalMultiActiveTaskPageProducer extends ActiveTaskPageProducer { protected Heistium mHeistium; protected ReadWriteLock mActivateLock; protected CountDownLatch mTaskCountDownLatch; public LocalMultiActiveTaskPageProducer( Heistium heistium, PageDivider divider, long autoIncrementId ) { super( divider, autoIncrementId ); this.mHeistium = heistium; this.mActivateLock = new ReentrantReadWriteLock(); long nProductsSum = this.getProductsSum(); if( nProductsSum > Integer.MAX_VALUE ) { throw new IllegalArgumentException( "Number of local tasks should not above INT32_MAX" ); } this.mTaskCountDownLatch = new CountDownLatch( (int)nProductsSum ); } @Override public boolean hasTerminateSignal() { return this.mHeistium.queryTerminationSignal().get(); } @Override public boolean hasMoreProducts() { this.mActivateLock.readLock().lock(); try { return super.hasMoreProducts(); } finally { this.mActivateLock.readLock().unlock(); } } @Override public TaskPage require() { return this.activate(); } @Override public TaskPage activate() { this.mActivateLock.writeLock().lock(); try { return (TaskPage) super.activate(); } finally { this.mActivateLock.writeLock().unlock(); } } @Override public void activate( ContiguousPage that ) { this.mActivateLock.writeLock().lock(); try { super.activate( that ); } finally { this.mActivateLock.writeLock().unlock(); } } @Override public void deactivate( ContiguousPage that ) { this.mActivateLock.writeLock().lock(); try { super.deactivate( that ); } finally { this.mActivateLock.writeLock().unlock(); } this.mTaskCountDownLatch.countDown(); } @Override public void deactivate( ContiguousPage[] those ) { this.mActivateLock.writeLock().lock(); try { for ( ContiguousPage p : those ) { super.deactivate( p ); this.mTaskCountDownLatch.countDown(); } } finally { this.mActivateLock.writeLock().unlock(); } } @Override public long getActivatedSize() { this.mActivateLock.readLock().lock(); try { return super.getActivatedSize(); } finally { this.mActivateLock.readLock().unlock(); } } @Override public ContiguousPage getPageById(long id ) { this.mActivateLock.readLock().lock(); try { return super.getPageById( id ); } finally { this.mActivateLock.readLock().unlock(); } } @Override public long getProductsSum() { return this.getDivider().getMaxAllocations(); } @Override public void awaitProducerFinished() throws InterruptedException { this.mTaskCountDownLatch.await(); } @Override public void awaitProducerFinished( long timeout, TimeUnit unit ) throws InterruptedException { this.mTaskCountDownLatch.await( timeout, unit ); } @Override public boolean isFinished() { return this.mTaskCountDownLatch.getCount() <= 0; } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/scheduler/LocalPreemptiveSingleFrame64Consumer.java ================================================ package com.sauron.heist.heistron.scheduler; public class LocalPreemptiveSingleFrame64Consumer implements TaskFrame64Consumer { protected TaskFrame64Producer mFrameProducer; protected PageFrame64ConsumerAdapter mFrame64ConsumerAdapter; public LocalPreemptiveSingleFrame64Consumer( TaskFrame64Producer pageProducer, PageFrame64ConsumerAdapter frame64ConsumerAdapter ) { this.mFrameProducer = pageProducer; this.mFrame64ConsumerAdapter = frame64ConsumerAdapter; } @Override public void consume () { Long id = this.mFrameProducer.require(); while ( id != null ) { if( this.mFrameProducer.hasTerminateSignal() || Thread.currentThread().isInterrupted() ) { break; } try{ this.mFrame64ConsumerAdapter.consumeById( id ); id = this.mFrameProducer.require(); } finally { this.mFrameProducer.deactivate( id ); } } } @Override public TaskFrame64Producer getTaskPageProducer() { return this.mFrameProducer; } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/scheduler/LocalPreemptiveTaskFrame64Producer.java ================================================ package com.sauron.heist.heistron.scheduler; import com.sauron.heist.heistron.Heistium; import com.pinecone.slime.unitization.MinMaxRange; import com.pinecone.slime.chunk.RangedPage; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; public class LocalPreemptiveTaskFrame64Producer implements TaskFrame64Producer { protected Heistium mHeistium; protected RangedPage mMasterPage; protected ReadWriteLock mActivateLock; protected CountDownLatch mTaskCountDownLatch; protected long mnProductStartOffset; protected AtomicLong mFinishedTasks; protected long mnProductsSum; protected long mnFinishedProductsSum; public LocalPreemptiveTaskFrame64Producer( Heistium heistium, RangedPage masterPage ) { this.mHeistium = heistium; this.mMasterPage = masterPage; this.mActivateLock = new ReentrantReadWriteLock(); MinMaxRange range = (MinMaxRange)this.mMasterPage.getRange(); this.mnProductStartOffset = range.getMin().longValue(); this.mnProductsSum = range.span().longValue(); this.mnFinishedProductsSum = this.mnProductsSum + this.mnProductStartOffset; if( this.mnProductsSum > Integer.MAX_VALUE ) { throw new IllegalArgumentException( "Number of local tasks should not above INT32_MAX" ); } this.mFinishedTasks = new AtomicLong ( this.mnProductStartOffset ); this.mTaskCountDownLatch = new CountDownLatch( (int)this.mnProductsSum ); } @Override public boolean hasTerminateSignal() { return this.mHeistium.queryTerminationSignal().get(); } @Override public long getProductsSum() { return this.mnProductsSum; } @Override public boolean hasMoreProducts() { return this.mFinishedTasks.get() < this.mnFinishedProductsSum; } @Override public Long require() { long index = this.mFinishedTasks.getAndIncrement(); if( index < this.mnFinishedProductsSum ){ return index; } return null; } @Override public void deactivate( Long that ){ this.mTaskCountDownLatch.countDown(); } @Override public boolean isFinished() { return this.mTaskCountDownLatch.getCount() <= 0; } @Override public void awaitProducerFinished() throws InterruptedException { this.mTaskCountDownLatch.await(); } @Override public void awaitProducerFinished( long timeout, TimeUnit unit ) throws InterruptedException { this.mTaskCountDownLatch.await( timeout, unit ); } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/scheduler/LocalSingleTaskPageConsumer.java ================================================ package com.sauron.heist.heistron.scheduler; public class LocalSingleTaskPageConsumer extends SingleTaskPageConsumer { public LocalSingleTaskPageConsumer( TaskPageProducer pageProducer, PageFrame64ConsumerAdapter frame64ConsumerAdapter ) { super( pageProducer, frame64ConsumerAdapter ); } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/scheduler/LocalTaskSchedulerStrategy.java ================================================ package com.sauron.heist.heistron.scheduler; import com.sauron.heist.heistron.Heistium; import com.pinecone.slime.chunk.RangedPage; import com.pinecone.slime.chunk.Splitunk; import com.pinecone.slime.chunk.scheduler.*; public class LocalTaskSchedulerStrategy implements TaskSchedulerStrategy { protected Heistium mParentHeistium; protected PagePool mHeistTaskPagePool; protected PageDivider mPageDivider; protected PageRecycleStrategy mPageRecycleStrategy; protected TaskProducer mTaskProducer; public LocalTaskSchedulerStrategy( Heistium heistium, PagePool pagePool, PageDivider divider, PageRecycleStrategy recycleStrategy ) { this.mParentHeistium = heistium; this.mHeistTaskPagePool = pagePool; this.mPageDivider = divider; this.mPageRecycleStrategy = recycleStrategy; } public LocalTaskSchedulerStrategy( Heistium heistium, long each ) { this( heistium, new DirectPagePool( RangedTaskPage.class ), null, null ); this.mPageDivider = new FixedPageDivider64( (Splitunk) this.getMasterPage(), this.getHeistTaskPagePool(), each ); } @Override public Heistium getParentHeistium() { return this.mParentHeistium; } @Override public RangedPage getMasterPage() { return this.getParentHeistium().getParentHeist().getMasterTaskPage(); } @Override public PagePool getHeistTaskPagePool() { return this.mHeistTaskPagePool; } @Override public PageDivider getPageDivider() { return this.mPageDivider; } @Override public PageRecycleStrategy getPageRecycleStrategy() { return this.mPageRecycleStrategy; } @Override public TaskSchedulerStrategy setHeistTaskPagePool( PagePool pagePool ) { this.mHeistTaskPagePool = pagePool; return this; } @Override public TaskSchedulerStrategy setPageDivider( PageDivider divider ) { this.mPageDivider = divider; return this; } @Override public TaskSchedulerStrategy setPageRecycleStrategy( PageRecycleStrategy strategy ) { this.mPageRecycleStrategy = strategy; return this; } @Override public TaskProducer formulateProducer() { this.mTaskProducer = new LocalMultiActiveTaskPageProducer( this.mParentHeistium, this.getPageDivider(), this.getMasterPage().getId() + 1 ); return this.mTaskProducer; } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/scheduler/RangedTaskPage.java ================================================ package com.sauron.heist.heistron.scheduler; import com.pinecone.slime.chunk.RangedChunk64; import com.pinecone.slime.chunk.RangedPage64; public class RangedTaskPage extends RangedPage64 implements TaskPage { public RangedTaskPage(){ super(); } public RangedTaskPage( long nStart, long nEnd, long id, RangedChunk64 parent ) { super( nStart, nEnd, id, parent ); } public RangedTaskPage( long nStart, long nEnd, long id ) { super( nStart, nEnd, id ); } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/scheduler/SingleTaskPageConsumer.java ================================================ package com.sauron.heist.heistron.scheduler; import com.pinecone.slime.unitization.MinMaxRange64; public abstract class SingleTaskPageConsumer implements TaskPageConsumer { protected TaskPageProducer mPageProducer; protected PageFrame64ConsumerAdapter mFrame64ConsumerAdapter; protected SingleTaskPageConsumer( TaskPageProducer pageProducer, PageFrame64ConsumerAdapter frame64ConsumerAdapter ) { this.mPageProducer = pageProducer; this.mFrame64ConsumerAdapter = frame64ConsumerAdapter; } @Override public void consume () { while ( this.mPageProducer.hasMoreProducts() ) { if( this.mPageProducer.hasTerminateSignal() || Thread.currentThread().isInterrupted() ) { break; } TaskPage page = this.mPageProducer.require(); try{ this.consumeSinglePage( page ); } finally { this.mPageProducer.deactivate( page ); } } } protected void consumeSinglePage( TaskPage page ) { long min = ( (MinMaxRange64)page.getRange()).getMin(); long max = ( (MinMaxRange64)page.getRange()).getMax(); for ( long i = min; i < max; ++i ) { this.mFrame64ConsumerAdapter.consumeById( i ); } } @Override public TaskPageProducer getTaskPageProducer() { return this.mPageProducer; } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/http/AbstractDownloader.java ================================================ package com.sauron.heist.http; import us.codecraft.webmagic.Page; import us.codecraft.webmagic.Request; import us.codecraft.webmagic.Site; import us.codecraft.webmagic.Task; import us.codecraft.webmagic.selector.Html; public abstract class AbstractDownloader implements PageDownloader { public AbstractDownloader() { } public Html download( String url) { return this.download(url, (String)null); } public Html download( String url, String charset) { Page page = this.download(new Request(url), Site.me().setCharset(charset).toTask()); return page.getHtml(); } /** @deprecated */ @Deprecated protected void onSuccess(Request request) { } protected void onSuccess( Request request, Task task ) { this.onSuccess(request); } /** @deprecated */ @Deprecated protected void onError(Request request) { } protected void onError(Request request, Task task, Throwable e) { this.onError(request); } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/http/GenericHttpClientGenerator.java ================================================ package com.sauron.heist.http; import java.io.IOException; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; import java.util.Iterator; import java.util.Map; import java.util.concurrent.locks.ReentrantReadWriteLock; import javax.net.ssl.KeyManager; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; import org.apache.commons.lang3.JavaVersion; import org.apache.commons.lang3.SystemUtils; import org.apache.http.HttpException; import org.apache.http.HttpRequest; import org.apache.http.HttpRequestInterceptor; import org.apache.http.client.CookieStore; import org.apache.http.config.Registry; import org.apache.http.config.RegistryBuilder; import org.apache.http.config.SocketConfig; import org.apache.http.conn.HttpClientConnectionManager; import org.apache.http.conn.socket.ConnectionSocketFactory; import org.apache.http.conn.socket.PlainConnectionSocketFactory; import org.apache.http.conn.ssl.DefaultHostnameVerifier; import org.apache.http.conn.ssl.SSLConnectionSocketFactory; import org.apache.http.impl.client.BasicCookieStore; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.DefaultHttpRequestRetryHandler; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.apache.http.impl.cookie.BasicClientCookie; import org.apache.http.protocol.HttpContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import us.codecraft.webmagic.Site; import us.codecraft.webmagic.downloader.CustomRedirectStrategy; public class GenericHttpClientGenerator implements HttpClientGenerator { private transient Logger logger = LoggerFactory.getLogger( this.getClass() ); private PoolingHttpClientConnectionManager connectionManager; private final ReentrantReadWriteLock generatorLock = new ReentrantReadWriteLock(); private Registry registry; public GenericHttpClientGenerator() { this.initConnectionManager(); } protected void initConnectionManager(){ this.registry = RegistryBuilder.create() .register("http", PlainConnectionSocketFactory.INSTANCE) .register("https", this.buildSSLConnectionSocketFactory()).build(); this.connectionManager = new PoolingHttpClientConnectionManager( this.registry ); this.connectionManager.setDefaultMaxPerRoute(1000); this.connectionManager.setValidateAfterInactivity(10000); } @Override public HttpClientConnectionManager getConnectionManager() { return this.connectionManager; } private SSLConnectionSocketFactory buildSSLConnectionSocketFactory() { try { SSLContext sslContext = this.createIgnoreVerifySSL(); String[] supportedProtocols; if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_11)) { supportedProtocols = new String[]{"SSLv3", "TLSv1", "TLSv1.1", "TLSv1.2", "TLSv1.3"}; } else { supportedProtocols = new String[]{"SSLv3", "TLSv1", "TLSv1.1", "TLSv1.2"}; } this.logger.debug("supportedProtocols: {}", String.join(", ", supportedProtocols)); return new SSLConnectionSocketFactory(sslContext, supportedProtocols, (String[])null, new DefaultHostnameVerifier()); } catch (KeyManagementException e) { this.logger.error("ssl connection fail", e); } catch (NoSuchAlgorithmException e2) { this.logger.error("ssl connection fail", e2); } return SSLConnectionSocketFactory.getSocketFactory(); } private SSLContext createIgnoreVerifySSL() throws NoSuchAlgorithmException, KeyManagementException { X509TrustManager trustManager = new X509TrustManager() { public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException { } public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException { } public X509Certificate[] getAcceptedIssuers() { return null; } }; SSLContext sc = SSLContext.getInstance("TLS"); sc.init((KeyManager[])null, new TrustManager[]{trustManager}, (SecureRandom)null); return sc; } @Override public GenericHttpClientGenerator setPoolSize( int poolSize ) { this.connectionManager.setMaxTotal(poolSize); return this; } @Override public int getPoolSize(){ return this.connectionManager.getMaxTotal(); } @Override public CloseableHttpClient getClient( Site site ) { return this.generateClient( site, true ); } @Override public CloseableHttpClient getClient( Site site, boolean bPooled ) { return this.generateClient( site, bPooled ); } protected CloseableHttpClient generateClient( Site site, boolean bPooled ) { this.generatorLock.readLock().lock(); try{ HttpClientBuilder httpClientBuilder = HttpClients.custom(); if( bPooled ) { httpClientBuilder.setConnectionManager( this.connectionManager ); } if ( site.getUserAgent() != null ) { httpClientBuilder.setUserAgent(site.getUserAgent()); } else { httpClientBuilder.setUserAgent(""); } if (site.isUseGzip()) { httpClientBuilder.addInterceptorFirst(new HttpRequestInterceptor() { public void process(HttpRequest request, HttpContext context) throws HttpException, IOException { if (!request.containsHeader("Accept-Encoding")) { request.addHeader("Accept-Encoding", "gzip"); } } }); } httpClientBuilder.setRedirectStrategy(new CustomRedirectStrategy()); SocketConfig.Builder socketConfigBuilder = SocketConfig.custom(); socketConfigBuilder.setSoKeepAlive(true).setTcpNoDelay(true); socketConfigBuilder.setSoTimeout(site.getTimeOut()); if( bPooled ) { SocketConfig socketConfig = socketConfigBuilder.build(); httpClientBuilder.setDefaultSocketConfig(socketConfig); this.connectionManager.setDefaultSocketConfig( socketConfig ); } httpClientBuilder.setRetryHandler(new DefaultHttpRequestRetryHandler(site.getRetryTimes(), true)); this.generateCookie(httpClientBuilder, site); return httpClientBuilder.build(); } finally { this.generatorLock.readLock().unlock(); } } private void generateCookie( HttpClientBuilder httpClientBuilder, Site site ) { if ( site.isDisableCookieManagement() ) { httpClientBuilder.disableCookieManagement(); } else { CookieStore cookieStore = new BasicCookieStore(); Iterator iterator = site.getCookies().entrySet().iterator(); Map.Entry domainEntry; while( iterator.hasNext() ) { domainEntry = (Map.Entry)iterator.next(); BasicClientCookie cookie = new BasicClientCookie((String)domainEntry.getKey(), (String)domainEntry.getValue()); cookie.setDomain(site.getDomain()); cookieStore.addCookie(cookie); } iterator = site.getAllCookies().entrySet().iterator(); while( iterator.hasNext() ) { domainEntry = (Map.Entry)iterator.next(); Iterator it = ((Map)domainEntry.getValue()).entrySet().iterator(); while( it.hasNext() ) { Map.Entry cookieEntry = (Map.Entry)it.next(); BasicClientCookie cookie = new BasicClientCookie((String)cookieEntry.getKey(), (String)cookieEntry.getValue()); cookie.setDomain((String)domainEntry.getKey()); cookieStore.addCookie(cookie); } } httpClientBuilder.setDefaultCookieStore(cookieStore); } } @Override public void close() { this.generatorLock.writeLock().lock(); try{ this.connectionManager.close(); } finally { this.generatorLock.writeLock().unlock(); } } @Override public void clearPool() { if( this.connectionManager != null ) { this.generatorLock.writeLock().lock(); try{ this.connectionManager.close(); this.initConnectionManager(); } finally { this.generatorLock.writeLock().unlock(); } } } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/http/HttpBrowserDownloader.java ================================================ package com.sauron.heist.http; import com.pinecone.framework.system.ProxyProvokeHandleException; import org.apache.commons.io.IOUtils; import org.apache.http.HttpResponse; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.util.EntityUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import us.codecraft.webmagic.Page; import us.codecraft.webmagic.Request; import us.codecraft.webmagic.Site; import us.codecraft.webmagic.Task; import us.codecraft.webmagic.downloader.HttpClientRequestContext; import us.codecraft.webmagic.downloader.HttpUriRequestConverter; import us.codecraft.webmagic.proxy.Proxy; import us.codecraft.webmagic.proxy.ProxyProvider; import us.codecraft.webmagic.selector.PlainText; import us.codecraft.webmagic.utils.CharsetUtils; import us.codecraft.webmagic.utils.HttpClientUtils; import javax.net.ssl.SSLException; import java.io.IOException; import java.nio.charset.Charset; import java.util.HashMap; import java.util.Map; import java.util.concurrent.TimeUnit; public class HttpBrowserDownloader extends AbstractDownloader { protected Logger logger = LoggerFactory.getLogger(this.getClass()); protected final Map httpClients = new HashMap<>(); protected HttpClientGenerator httpClientGenerator = new GenericHttpClientGenerator(); protected HttpUriRequestConverter httpUriRequestConverter = new HttpUriRequestConverter(); protected ProxyProvider proxyProvider; protected boolean responseHeader = true; protected Task parentTask = null; public HttpBrowserDownloader() { } public HttpBrowserDownloader( Task task ) { this.parentTask = task; } public void setHttpUriRequestConverter( HttpUriRequestConverter httpUriRequestConverter) { this.httpUriRequestConverter = httpUriRequestConverter; } public void setProxyProvider( ProxyProvider proxyProvider ) { this.proxyProvider = proxyProvider; } protected CloseableHttpClient getHttpClient( Site site, boolean bPooled ) { if( !bPooled ) { // Explicit using false. return this.httpClientGenerator.getClient( site, false ); } if ( site == null ) { return this.httpClientGenerator.getClient((Site)null); } else { String domain = site.getDomain(); CloseableHttpClient httpClient = (CloseableHttpClient)this.httpClients.get(domain); if ( httpClient == null ) { synchronized(this) { httpClient = (CloseableHttpClient)this.httpClients.get(domain); if (httpClient == null) { httpClient = this.httpClientGenerator.getClient(site); this.httpClients.put(domain, httpClient); } } } return httpClient; } } @Override public Page download( Request request, Task task ) { return this.download( request, task, true ); } protected Page download0( Request request, Task task, boolean bPooled ) throws IOException { CloseableHttpResponse httpResponse = null; CloseableHttpClient httpClient = this.getHttpClient( task.getSite(), bPooled ); Proxy proxy = this.proxyProvider != null ? this.proxyProvider.getProxy(task) : null; HttpClientRequestContext requestContext = this.httpUriRequestConverter.convert(request, task.getSite(), proxy); Page page = Page.fail(); try { httpResponse = httpClient.execute(requestContext.getHttpUriRequest(), requestContext.getHttpClientContext()); page = this.handleResponse(request, request.getCharset() != null ? request.getCharset() : task.getSite().getCharset(), httpResponse, task); this.onSuccess( request, task ); this.logger.info( "downloading page success {}", request.getUrl() ); if( !bPooled ) { try{ httpClient.close(); } catch ( IOException e ) { throw new ProxyProvokeHandleException( e ); } } return page; } finally { if (httpResponse != null) { EntityUtils.consumeQuietly(httpResponse.getEntity()); } if ( this.proxyProvider != null && proxy != null ) { this.proxyProvider.returnProxy(proxy, page, task); } } } @Override public Page download( Request request, Task task, boolean bPooled ) { if ( task != null && task.getSite() != null ) { try{ return this.download0( request, task, bPooled ); } catch ( IOException firstIOE ) { // First try this.httpClientGenerator.getConnectionManager().closeExpiredConnections(); this.httpClientGenerator.getConnectionManager().closeIdleConnections( 0, TimeUnit.SECONDS ); this.logger.info( "First connection {} in error, retrying.", request.getUrl() ); try{ return this.download0( request, task, bPooled ); } catch ( SSLException sse ) { throw new ProxyProvokeHandleException( sse ); } catch ( IOException e ) { this.onError( request, task, e ); this.logger.info( "download page {} error", request.getUrl(), e ); } return Page.fail(); } } else { throw new NullPointerException("task or site can not be null"); } } @Override public void setThread( int threads ) { this.httpClientGenerator.setPoolSize( threads ); } protected Page handleResponse(Request request, String charset, HttpResponse httpResponse, Task task) throws IOException { byte[] bytes = IOUtils.toByteArray(httpResponse.getEntity().getContent()); String contentType = httpResponse.getEntity().getContentType() == null ? "" : httpResponse.getEntity().getContentType().getValue(); Page page = new Page(); page.setBytes(bytes); if (!request.isBinaryContent()) { if (charset == null) { charset = this.getHtmlCharset(contentType, bytes); } page.setCharset(charset); page.setRawText(new String(bytes, charset)); } page.setUrl(new PlainText(request.getUrl())); page.setRequest(request); page.setStatusCode(httpResponse.getStatusLine().getStatusCode()); page.setDownloadSuccess(true); if (this.responseHeader) { page.setHeaders(HttpClientUtils.convertHeaders(httpResponse.getAllHeaders())); } return page; } private String getHtmlCharset(String contentType, byte[] contentBytes) throws IOException { String charset = CharsetUtils.detectCharset(contentType, contentBytes); if (charset == null) { charset = Charset.defaultCharset().name(); this.logger.warn("Charset autodetect failed, use {} as charset. Please specify charset in Site.setCharset()", Charset.defaultCharset()); } return charset; } public Logger getLogger() { return this.logger; } @Override public void reset() { this.httpClientGenerator.clearPool(); this.httpClients.clear(); } } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/http/HttpClientGenerator.java ================================================ package com.sauron.heist.http; import com.sauron.system.Saunut; import org.apache.http.conn.HttpClientConnectionManager; import org.apache.http.impl.client.*; import us.codecraft.webmagic.Site; public interface HttpClientGenerator extends Saunut { GenericHttpClientGenerator setPoolSize( int poolSize ) ; int getPoolSize(); CloseableHttpClient getClient( Site site ); CloseableHttpClient getClient( Site site, boolean bPooled ); void close(); void clearPool(); HttpClientConnectionManager getConnectionManager(); } ================================================ FILE: Saurons/heist-system-schedule/src/main/java/com/sauron/heist/http/PageDownloader.java ================================================ package com.sauron.heist.http; import com.sauron.system.Saunut; import us.codecraft.webmagic.Page; import us.codecraft.webmagic.Request; import us.codecraft.webmagic.Task; import us.codecraft.webmagic.downloader.Downloader; public interface PageDownloader extends Downloader, Saunut { @Override Page download( Request request, Task task ); Page download( Request request, Task task, boolean bPooled ); @Override void setThread( int threads ); void reset(); } ================================================ FILE: Saurons/heist-system-schedule/src/main/resources/application.yaml ================================================ # Any private port must start with '75', eg.7580, 7577, etc. server: port: 7580 spring: datasource: url: jdbc:mysql://node1.nutgit.com:13393/predator?useSSL=false&serverTimezone=UTC username: root password: root driver-class-name: com.mysql.jdbc.Driver redis: host: localhost port: 6379 password: Genius123 lettuce: pool: max-active: 8 max-idle: 8 min-idle: 0 max-wait: 100 time-between-eviction-runs: 10s rabbitmq: host: node1.nutgit.com # 主机名 port: 1234 # 端口 virtual-host: /wolf # 虚拟主机 username: undefined # 用户名 password: 1234 # 密码 nonjron: system: config: serviceId: Nonaron-Kingpin-Prime minionName: Nonaron tracer: consoleTrace: true infoTracer: "./system/logs/%s_SysTrace.log" errTracer: "./system/logs/%s_SysError.log" components: heist: failure-retry-times: 3 heist-num: 5 rob-task-name: douban ================================================ FILE: Saurons/heist-system-schedule/src/test/java/com/Test.java ================================================ package com; public class Test { } ================================================ FILE: Saurons/heist-system-schedule/src/test/java/com/others/TestServgramTritium.java ================================================ package com.others; import com.pinecone.Pinecone; import com.pinecone.framework.system.NonNull; import com.pinecone.framework.system.Nullable; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.config.JSONConfig; import com.sauron.heist.heistron.CascadeHeist; import com.sauron.heist.heistron.Crew; import com.sauron.heist.heistron.HTTPIndexHeist; import com.sauron.heist.heistron.Heist; import com.sauron.heist.heistron.Heistgram; import com.sauron.heist.heistron.Heistotron; import com.pinecone.ulf.util.lang.GenericPreloadClassInspector; import javassist.ClassPool; import javassist.CtClass; import javassist.bytecode.annotation.Annotation; class FakeHeist extends HTTPIndexHeist { public FakeHeist( Heistgram heistron ){ super( heistron ); } public FakeHeist( Heistgram heistron, JSONConfig joConfig ){ super( heistron, joConfig ); } public FakeHeist(Heistgram heistron, @Nullable CascadeHeist parent, @NonNull String szChildName ) { super( heistron, parent, szChildName ); } @Override public Crew newCrew(int nCrewId ) { return null; } @Override public void toRavage(){ super.toRavage(); } @Override public void toStalk(){ } } public class TestServgramTritium { public static void testJavassist() throws Exception{ GenericPreloadClassInspector inspector = new GenericPreloadClassInspector( ClassPool.getDefault() ); String className = "com.sauron.heist.heistron.Heistotron"; Debug.trace( inspector.isImplementedDirectly( className, Heistgram.class ) ); Debug.trace( inspector.isImplemented( className, com.pinecone.hydra.servgram.Servgram.class ) ); Debug.trace( inspector.isExtendedDirectly( Heistgram.class.getName(), com.pinecone.hydra.servgram.Servgram.class ) ); Debug.trace( inspector.isExtended( Heistgram.class.getName(), com.pinecone.framework.system.executum.Processum.class ) ); Debug.trace( inspector.hasOwnAnnotations( Heistotron.class.getName(), new Class[]{ com.pinecone.hydra.servgram.Gram.class } ) ); Debug.trace( inspector.isImplemented( com.others.FakeHeist.class.getName(), Heist.class /*com.pinecone.framework.system.prototype.Pinenut.class*/ ) ); Debug.trace( inspector.isExtended( com.others.FakeHeist.class.getName(), Heist.class ) ); ClassPool pool = ClassPool.getDefault(); CtClass ctClass = pool.get(className); Debug.trace( ctClass.isFrozen() ) ; Annotation[] annotations = inspector.queryVisibleAnnotations( ctClass ); Debug.echo( annotations[0].getMemberValue( "value" ) ); } public static void main(String[] args) throws Exception { // String szJson = FileUtils.readAll("J:/120KWordsPhonetics.json5"); Pinecone.init((Object... cfg) -> { // TestServgramTritium.testClassReader(); TestServgramTritium.testJavassist(); return 0; }, (Object[]) args); } } ================================================ FILE: Saurons/pom.xml ================================================ sauron com.sauron 1.2.7 4.0.0 com.saurons saurons pom 1.2.7 heist-framework-architecture heist-system-schedule heist-http-client-okhttp-suit sauron-core saurye shadow org.springframework.boot spring-boot-starter-web ================================================ FILE: Saurons/sauron-core/pom.xml ================================================ saurons com.saurons 1.2.7 4.0.0 com.sauron.core sauron-core 1.2.7 jar org.apache.maven.plugins maven-compiler-plugin 11 11 11 11 UTF-8 com.pinecone pinecone 2.5.1 com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 com.pinecone.tritium hydra-system-tritium 2.1.0 com.pinecone.slime slime 2.1.0 com.pinecone.ulf ulfhedinn 1.2.1 ================================================ FILE: Saurons/sauron-core/src/main/java/com/sauron/Sauron.java ================================================ package com.sauron; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.tritium.Tritium; import com.sauron.system.SauronKingdom; public class Sauron extends Tritium implements SauronKingdom { public Sauron( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Sauron( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override protected void traceSubsystemWelcomeInfo() { this.pout().print( "------------------------Sauron Framework-----------------------\n" ); this.pout().print( "\u001B[31m\uD83D\uDE08 Bean Sauron Engine, Project.`Manhattan, the Grand Design` \uD83D\uDE08 \u001B[0m\n" ); this.pout().print( "\u001B[32mCthulhu Data-Platform of Bean Nuts Digital IDC \u001B[0m\n" ); this.pout().print( "\u001B[32mCopyright(C) 2008-2028 Bean Nuts Foundation. All rights reserved.\u001B[0m\n" ); this.pout().print( "---------------------------------------------------------------\n" ); this.pout().print( "Greeting! My name is Sauron, I insight omniscience.\n" ); this.pout().print( "Salve! Nomen Sauron est, omnia perspicio.\n" ); this.pout().print( "----------------------Kernel Information-----------------------\n" ); this.pout().print( "PineconeVer : Bean Nuts Pinecone Ursus " + Pinecone.VERSION + "\n" ); this.pout().print( "PineconeAlly : Bean Nuts Acorn Hydra\n" ); this.pout().print( "PineconeAlly : Bean Nuts Walnut Ulfhedinn (Pinecone Framework Edition)\n" ); this.pout().print( "PineconeAlly : Bean Nuts Walnut Sparta\n" ); this.pout().print( "PineconeAlly : Bean Nuts Acorn Summer (Pinecone Framework Edition)\n" ); this.pout().print( "PineconeAlly : Bean Nuts Hazelnut Slime\n" ); this.pout().print( "PineconeAlly : Bean Nuts Hazelnut Hydra Tritium\n" ); this.pout().print( "PineconeAlly : Bean Nuts Acorn Summer Springron (org.springframework.boot 2.4.1)\n" ); } } ================================================ FILE: Saurons/sauron-core/src/main/java/com/sauron/system/Saunut.java ================================================ package com.sauron.system; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.system.prototype.Prototype; public interface Saunut extends Pinenut { default String toJSONString() { return "\"[object " + this.className() + "]\""; } default String prototypeName() { return this.className(); } default String className() { return Prototype.prototypeName( this ); } } ================================================ FILE: Saurons/sauron-core/src/main/java/com/sauron/system/SauronKingdom.java ================================================ package com.sauron.system; import com.pinecone.tritium.system.TritiumSystem; public interface SauronKingdom extends TritiumSystem { } ================================================ FILE: Skynet/pom.xml ================================================ sauron com.sauron 1.2.7 4.0.0 com.acorn.skynet skynet pom 2.5.1 skynet-architecture skynet-system skynet-cloud-deploy ================================================ FILE: Skynet/skynet-architecture/pom.xml ================================================ skynet com.acorn.skynet 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.acorn.skynet.kernel skynet-architecture 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile com.pinecone.ulf ulfhedinn 1.2.1 compile com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile ================================================ FILE: Skynet/skynet-cloud-deploy/pom.xml ================================================ skynet com.acorn.skynet 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.acorn.skynet.deploy skynet-cloud-deploy 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile com.pinecone.ulf ulfhedinn 1.2.1 compile com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.pinecone.hydra.kernel hydra-service-control 2.1.0 compile com.pinecone.hydra.kernel hydra-framework-device 2.1.0 compile ================================================ FILE: Skynet/skynet-cloud-deploy/src/main/java/com/acorn/skynet/deploy/CloudDeploy.java ================================================ package com.acorn.skynet.deploy; public interface CloudDeploy { } ================================================ FILE: Skynet/skynet-cloud-deploy/src/main/java/com/acorn/skynet/deploy/service/CollectiveServiceDeployRegiment.java ================================================ package com.acorn.skynet.deploy.service; import com.pinecone.framework.system.regime.Regiment; import com.pinecone.hydra.system.component.Slf4jTraceable; public interface CollectiveServiceDeployRegiment extends Regiment, Slf4jTraceable { } ================================================ FILE: Skynet/skynet-cloud-deploy/src/main/java/com/acorn/skynet/deploy/service/SkyCollectiveServiceDeployRegiment.java ================================================ package com.acorn.skynet.deploy.service; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.deploy.kom.DeployInstrument; import com.pinecone.hydra.service.registry.server.ServiceManager; import com.pinecone.hydra.service.registry.event.ServiceRegisterEvent; import com.pinecone.hydra.service.registry.event.ServiceRegisterEventHandler; import org.slf4j.Logger; public class SkyCollectiveServiceDeployRegiment implements CollectiveServiceDeployRegiment { protected Logger mLogger; protected ServiceManager mServiceManager; protected DeployInstrument mDeployInstrument; public SkyCollectiveServiceDeployRegiment( ServiceManager serviceManager, DeployInstrument deployInstrument ) { this.mServiceManager = serviceManager; this.mDeployInstrument = deployInstrument; this.initServiceEvent(); } protected void initServiceEvent() { this.mServiceManager.addRegisterEventHandler(new ServiceRegisterEventHandler() { @Override public void fired( long clientId, GUID insId, GUID serviceId, ServiceRegisterEvent event, Object caused ) { } }); } @Override public Logger getLogger() { return this.mLogger; } } ================================================ FILE: Skynet/skynet-system/pom.xml ================================================ skynet com.acorn.skynet 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.acorn.skynet.kernel skynet-system 2.1.0 jar 11 11 UTF-8 com.pinecone pinecone 2.5.1 compile com.pinecone.ulf ulfhedinn 1.2.1 compile com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile ================================================ FILE: Skynet/skynet-system/src/main/java/com/acorn/skynet/Skynet.java ================================================ package com.acorn.skynet; import com.acorn.skynet.system.SkynetSubsystem; import com.pinecone.framework.util.config.PatriarchalConfig; import com.pinecone.framework.util.io.Tracer; import com.pinecone.hydra.system.ArchModularizedSubsystem; import com.pinecone.hydra.system.Hydrogen; import com.pinecone.hydra.system.component.LogStatuses; public class Skynet extends ArchModularizedSubsystem implements SkynetSubsystem { public Skynet( Hydrogen primarySystem, String name, PatriarchalConfig config ) { super( primarySystem, name, config ); } @Override protected void traceWelcomeInfo() { Tracer console = this.mPrimarySystem.console(); console.getOut().print( "---------------------------------------------------------------\n" ); console.getOut().print( "\u001B[31mBean Nuts Acorn Skynet\u001B[0m\n" ); console.getOut().print( "\u001B[31mSkynet cloud computing infrastructure \u001B[0m\n" ); console.getOut().print( "\u001B[32mCopyright(C) 2008-2028 Bean Nuts Foundation. All rights reserved.\u001B[0m\n" ); console.getOut().print( "---------------------------------------------------------------\n" ); } protected void init() { this.getLogger().info( " >>> System Booting..." ); this.infoLifecycle( " Domain Subsystem Initialization", LogStatuses.StatusStart ); this.traceWelcomeInfo(); this.prepare_system_skeleton(); this.infoLifecycle( " Welcome to the Skynet cloud computing!", LogStatuses.StatusReady ); this.infoLifecycle( " Domain Subsystem Initialization", LogStatuses.StatusReady ); } protected void prepare_system_skeleton() { } @Override public void vitalize() { this.init(); } @Override public void terminate() { } } ================================================ FILE: Skynet/skynet-system/src/main/java/com/acorn/skynet/system/SkynetSubsystem.java ================================================ package com.acorn.skynet.system; import com.pinecone.framework.system.ModularizedSubsystem; import com.pinecone.framework.system.SynergicSystem; import com.pinecone.framework.system.regime.arch.Lord; import com.pinecone.hydra.system.component.Slf4jTraceable; public interface SkynetSubsystem extends SynergicSystem, ModularizedSubsystem, Lord, Slf4jTraceable { } ================================================ FILE: Sparta/pom.xml ================================================ sauron com.sauron 1.2.7 4.0.0 com.walnuts.sparta sparta pom 2.5.1 sparta-core-console sparta-uofs-service sparta-uofs-console sparta-uac-console sparta-api-uac sparta-ucdn-service sparta-ucdn-console sparta-api-uofs ================================================ FILE: Sparta/sparta-api-uac/pom.xml ================================================ sparta com.walnuts.sparta 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.walnut.sparta.api.uac sparta-api-uac 2.1.0 junit junit 3.8.1 test com.pinecone pinecone 2.5.1 compile com.pinecone.hydra.kom.driver.default hydra-kom-default-driver 2.1.0 test com.pinecone.tritium hydra-system-tritium 2.1.0 test com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.pinecone.summer.springram springram 2.1.0 compile com.pinecone.hydra.sdk.thrift hydra-lib-thrift-sdk 1.2.1 compile org.springframework.boot spring-boot-test test org.testng testng RELEASE test org.apache.thrift libthrift 0.18.0 compile ================================================ FILE: Sparta/sparta-api-uac/src/main/java/com/walnut/sparta/account/rpc/thrift/AccountIface.java ================================================ package com.walnut.sparta.account.rpc.thrift; /** * Autogenerated by Thrift Compiler (0.18.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.18.0)", date = "2025-01-25") @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) public class AccountIface { public interface Iface { public java.lang.String queryNodeByPath(java.lang.String path) throws org.apache.thrift.TException; } public interface AsyncIface { public void queryNodeByPath(java.lang.String path, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException; } public static class Client extends org.apache.thrift.TServiceClient implements Iface { public static class Factory implements org.apache.thrift.TServiceClientFactory { public Factory() {} @Override public Client getClient(org.apache.thrift.protocol.TProtocol prot) { return new Client(prot); } @Override public Client getClient(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) { return new Client(iprot, oprot); } } public Client(org.apache.thrift.protocol.TProtocol prot) { super(prot, prot); } public Client(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) { super(iprot, oprot); } @Override public java.lang.String queryNodeByPath(java.lang.String path) throws org.apache.thrift.TException { send_queryNodeByPath(path); return recv_queryNodeByPath(); } public void send_queryNodeByPath(java.lang.String path) throws org.apache.thrift.TException { queryNodeByPath_args args = new queryNodeByPath_args(); args.setPath(path); sendBase("queryNodeByPath", args); } public java.lang.String recv_queryNodeByPath() throws org.apache.thrift.TException { queryNodeByPath_result result = new queryNodeByPath_result(); receiveBase(result, "queryNodeByPath"); if (result.isSetSuccess()) { return result.success; } throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "queryNodeByPath failed: unknown result"); } } public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface { public static class Factory implements org.apache.thrift.async.TAsyncClientFactory { private org.apache.thrift.async.TAsyncClientManager clientManager; private org.apache.thrift.protocol.TProtocolFactory protocolFactory; public Factory(org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.protocol.TProtocolFactory protocolFactory) { this.clientManager = clientManager; this.protocolFactory = protocolFactory; } @Override public AsyncClient getAsyncClient(org.apache.thrift.transport.TNonblockingTransport transport) { return new AsyncClient(protocolFactory, clientManager, transport); } } public AsyncClient(org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.transport.TNonblockingTransport transport) { super(protocolFactory, clientManager, transport); } @Override public void queryNodeByPath(java.lang.String path, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException { checkReady(); queryNodeByPath_call method_call = new queryNodeByPath_call(path, resultHandler, this, ___protocolFactory, ___transport); this.___currentMethod = method_call; ___manager.call(method_call); } public static class queryNodeByPath_call extends org.apache.thrift.async.TAsyncMethodCall { private java.lang.String path; public queryNodeByPath_call(java.lang.String path, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException { super(client, protocolFactory, transport, resultHandler, false); this.path = path; } @Override public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException { prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("queryNodeByPath", org.apache.thrift.protocol.TMessageType.CALL, 0)); queryNodeByPath_args args = new queryNodeByPath_args(); args.setPath(path); args.write(prot); prot.writeMessageEnd(); } @Override public java.lang.String getResult() throws org.apache.thrift.TException { if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) { throw new java.lang.IllegalStateException("Method call not finished!"); } org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array()); org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport); return (new Client(prot)).recv_queryNodeByPath(); } } } public static class Processor extends org.apache.thrift.TBaseProcessor implements org.apache.thrift.TProcessor { private static final org.slf4j.Logger _LOGGER = org.slf4j.LoggerFactory.getLogger(Processor.class.getName()); public Processor(I iface) { super(iface, getProcessMap(new java.util.HashMap>())); } protected Processor(I iface, java.util.Map> processMap) { super(iface, getProcessMap(processMap)); } private static java.util.Map> getProcessMap(java.util.Map> processMap) { processMap.put("queryNodeByPath", new queryNodeByPath()); return processMap; } public static class queryNodeByPath extends org.apache.thrift.ProcessFunction { public queryNodeByPath() { super("queryNodeByPath"); } @Override public queryNodeByPath_args getEmptyArgsInstance() { return new queryNodeByPath_args(); } @Override protected boolean isOneway() { return false; } @Override protected boolean rethrowUnhandledExceptions() { return false; } @Override public queryNodeByPath_result getResult(I iface, queryNodeByPath_args args) throws org.apache.thrift.TException { queryNodeByPath_result result = new queryNodeByPath_result(); result.success = iface.queryNodeByPath(args.path); return result; } } } public static class AsyncProcessor extends org.apache.thrift.TBaseAsyncProcessor { private static final org.slf4j.Logger _LOGGER = org.slf4j.LoggerFactory.getLogger(AsyncProcessor.class.getName()); public AsyncProcessor(I iface) { super(iface, getProcessMap(new java.util.HashMap>())); } protected AsyncProcessor(I iface, java.util.Map> processMap) { super(iface, getProcessMap(processMap)); } private static java.util.Map> getProcessMap(java.util.Map> processMap) { processMap.put("queryNodeByPath", new queryNodeByPath()); return processMap; } public static class queryNodeByPath extends org.apache.thrift.AsyncProcessFunction { public queryNodeByPath() { super("queryNodeByPath"); } @Override public queryNodeByPath_args getEmptyArgsInstance() { return new queryNodeByPath_args(); } @Override public org.apache.thrift.async.AsyncMethodCallback getResultHandler(final org.apache.thrift.server.AbstractNonblockingServer.AsyncFrameBuffer fb, final int seqid) { final org.apache.thrift.AsyncProcessFunction fcall = this; return new org.apache.thrift.async.AsyncMethodCallback() { @Override public void onComplete(java.lang.String o) { queryNodeByPath_result result = new queryNodeByPath_result(); result.success = o; try { fcall.sendResponse(fb, result, org.apache.thrift.protocol.TMessageType.REPLY,seqid); } catch (org.apache.thrift.transport.TTransportException e) { _LOGGER.error("TTransportException writing to internal frame buffer", e); fb.close(); } catch (java.lang.Exception e) { _LOGGER.error("Exception writing to internal frame buffer", e); onError(e); } } @Override public void onError(java.lang.Exception e) { byte msgType = org.apache.thrift.protocol.TMessageType.REPLY; org.apache.thrift.TSerializable msg; queryNodeByPath_result result = new queryNodeByPath_result(); if (e instanceof org.apache.thrift.transport.TTransportException) { _LOGGER.error("TTransportException inside handler", e); fb.close(); return; } else if (e instanceof org.apache.thrift.TApplicationException) { _LOGGER.error("TApplicationException inside handler", e); msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION; msg = (org.apache.thrift.TApplicationException)e; } else { _LOGGER.error("Exception inside handler", e); msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION; msg = new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage()); } try { fcall.sendResponse(fb,msg,msgType,seqid); } catch (java.lang.Exception ex) { _LOGGER.error("Exception writing to internal frame buffer", ex); fb.close(); } } }; } @Override protected boolean isOneway() { return false; } @Override public void start(I iface, queryNodeByPath_args args, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException { iface.queryNodeByPath(args.path,resultHandler); } } } @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) public static class queryNodeByPath_args implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("queryNodeByPath_args"); private static final org.apache.thrift.protocol.TField PATH_FIELD_DESC = new org.apache.thrift.protocol.TField("path", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new queryNodeByPath_argsStandardSchemeFactory(); private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new queryNodeByPath_argsTupleSchemeFactory(); public @org.apache.thrift.annotation.Nullable java.lang.String path; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { PATH((short)1, "path"); private static final java.util.Map byName = new java.util.HashMap(); static { for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // PATH return PATH; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByName(java.lang.String name) { return byName.get(name); } private final short _thriftId; private final java.lang.String _fieldName; _Fields(short thriftId, java.lang.String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } @Override public short getThriftFieldId() { return _thriftId; } @Override public java.lang.String getFieldName() { return _fieldName; } } // isset id assignments public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.PATH, new org.apache.thrift.meta_data.FieldMetaData("path", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); metaDataMap = java.util.Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(queryNodeByPath_args.class, metaDataMap); } public queryNodeByPath_args() { } public queryNodeByPath_args( java.lang.String path) { this(); this.path = path; } /** * Performs a deep copy on other. */ public queryNodeByPath_args(queryNodeByPath_args other) { if (other.isSetPath()) { this.path = other.path; } } @Override public queryNodeByPath_args deepCopy() { return new queryNodeByPath_args(this); } @Override public void clear() { this.path = null; } @org.apache.thrift.annotation.Nullable public java.lang.String getPath() { return this.path; } public queryNodeByPath_args setPath(@org.apache.thrift.annotation.Nullable java.lang.String path) { this.path = path; return this; } public void unsetPath() { this.path = null; } /** Returns true if field path is set (has been assigned a value) and false otherwise */ public boolean isSetPath() { return this.path != null; } public void setPathIsSet(boolean value) { if (!value) { this.path = null; } } @Override public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) { switch (field) { case PATH: if (value == null) { unsetPath(); } else { setPath((java.lang.String)value); } break; } } @org.apache.thrift.annotation.Nullable @Override public java.lang.Object getFieldValue(_Fields field) { switch (field) { case PATH: return getPath(); } throw new java.lang.IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ @Override public boolean isSet(_Fields field) { if (field == null) { throw new java.lang.IllegalArgumentException(); } switch (field) { case PATH: return isSetPath(); } throw new java.lang.IllegalStateException(); } @Override public boolean equals(java.lang.Object that) { if (that instanceof queryNodeByPath_args) return this.equals((queryNodeByPath_args)that); return false; } public boolean equals(queryNodeByPath_args that) { if (that == null) return false; if (this == that) return true; boolean this_present_path = true && this.isSetPath(); boolean that_present_path = true && that.isSetPath(); if (this_present_path || that_present_path) { if (!(this_present_path && that_present_path)) return false; if (!this.path.equals(that.path)) return false; } return true; } @Override public int hashCode() { int hashCode = 1; hashCode = hashCode * 8191 + ((isSetPath()) ? 131071 : 524287); if (isSetPath()) hashCode = hashCode * 8191 + path.hashCode(); return hashCode; } @Override public int compareTo(queryNodeByPath_args other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = java.lang.Boolean.compare(isSetPath(), other.isSetPath()); if (lastComparison != 0) { return lastComparison; } if (isSetPath()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.path, other.path); if (lastComparison != 0) { return lastComparison; } } return 0; } @org.apache.thrift.annotation.Nullable @Override public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } @Override public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { scheme(iprot).read(iprot, this); } @Override public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { scheme(oprot).write(oprot, this); } @Override public java.lang.String toString() { java.lang.StringBuilder sb = new java.lang.StringBuilder("queryNodeByPath_args("); boolean first = true; sb.append("path:"); if (this.path == null) { sb.append("null"); } else { sb.append(this.path); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class queryNodeByPath_argsStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { @Override public queryNodeByPath_argsStandardScheme getScheme() { return new queryNodeByPath_argsStandardScheme(); } } private static class queryNodeByPath_argsStandardScheme extends org.apache.thrift.scheme.StandardScheme { @Override public void read(org.apache.thrift.protocol.TProtocol iprot, queryNodeByPath_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // PATH if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.path = iprot.readString(); struct.setPathIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method struct.validate(); } @Override public void write(org.apache.thrift.protocol.TProtocol oprot, queryNodeByPath_args struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.path != null) { oprot.writeFieldBegin(PATH_FIELD_DESC); oprot.writeString(struct.path); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class queryNodeByPath_argsTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { @Override public queryNodeByPath_argsTupleScheme getScheme() { return new queryNodeByPath_argsTupleScheme(); } } private static class queryNodeByPath_argsTupleScheme extends org.apache.thrift.scheme.TupleScheme { @Override public void write(org.apache.thrift.protocol.TProtocol prot, queryNodeByPath_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot; java.util.BitSet optionals = new java.util.BitSet(); if (struct.isSetPath()) { optionals.set(0); } oprot.writeBitSet(optionals, 1); if (struct.isSetPath()) { oprot.writeString(struct.path); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, queryNodeByPath_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot; java.util.BitSet incoming = iprot.readBitSet(1); if (incoming.get(0)) { struct.path = iprot.readString(); struct.setPathIsSet(true); } } } private static S scheme(org.apache.thrift.protocol.TProtocol proto) { return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme(); } } @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) public static class queryNodeByPath_result implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("queryNodeByPath_result"); private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRING, (short)0); private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new queryNodeByPath_resultStandardSchemeFactory(); private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new queryNodeByPath_resultTupleSchemeFactory(); public @org.apache.thrift.annotation.Nullable java.lang.String success; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { SUCCESS((short)0, "success"); private static final java.util.Map byName = new java.util.HashMap(); static { for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 0: // SUCCESS return SUCCESS; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByName(java.lang.String name) { return byName.get(name); } private final short _thriftId; private final java.lang.String _fieldName; _Fields(short thriftId, java.lang.String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } @Override public short getThriftFieldId() { return _thriftId; } @Override public java.lang.String getFieldName() { return _fieldName; } } // isset id assignments public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); metaDataMap = java.util.Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(queryNodeByPath_result.class, metaDataMap); } public queryNodeByPath_result() { } public queryNodeByPath_result( java.lang.String success) { this(); this.success = success; } /** * Performs a deep copy on other. */ public queryNodeByPath_result(queryNodeByPath_result other) { if (other.isSetSuccess()) { this.success = other.success; } } @Override public queryNodeByPath_result deepCopy() { return new queryNodeByPath_result(this); } @Override public void clear() { this.success = null; } @org.apache.thrift.annotation.Nullable public java.lang.String getSuccess() { return this.success; } public queryNodeByPath_result setSuccess(@org.apache.thrift.annotation.Nullable java.lang.String success) { this.success = success; return this; } public void unsetSuccess() { this.success = null; } /** Returns true if field success is set (has been assigned a value) and false otherwise */ public boolean isSetSuccess() { return this.success != null; } public void setSuccessIsSet(boolean value) { if (!value) { this.success = null; } } @Override public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) { switch (field) { case SUCCESS: if (value == null) { unsetSuccess(); } else { setSuccess((java.lang.String)value); } break; } } @org.apache.thrift.annotation.Nullable @Override public java.lang.Object getFieldValue(_Fields field) { switch (field) { case SUCCESS: return getSuccess(); } throw new java.lang.IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ @Override public boolean isSet(_Fields field) { if (field == null) { throw new java.lang.IllegalArgumentException(); } switch (field) { case SUCCESS: return isSetSuccess(); } throw new java.lang.IllegalStateException(); } @Override public boolean equals(java.lang.Object that) { if (that instanceof queryNodeByPath_result) return this.equals((queryNodeByPath_result)that); return false; } public boolean equals(queryNodeByPath_result that) { if (that == null) return false; if (this == that) return true; boolean this_present_success = true && this.isSetSuccess(); boolean that_present_success = true && that.isSetSuccess(); if (this_present_success || that_present_success) { if (!(this_present_success && that_present_success)) return false; if (!this.success.equals(that.success)) return false; } return true; } @Override public int hashCode() { int hashCode = 1; hashCode = hashCode * 8191 + ((isSetSuccess()) ? 131071 : 524287); if (isSetSuccess()) hashCode = hashCode * 8191 + success.hashCode(); return hashCode; } @Override public int compareTo(queryNodeByPath_result other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = java.lang.Boolean.compare(isSetSuccess(), other.isSetSuccess()); if (lastComparison != 0) { return lastComparison; } if (isSetSuccess()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success); if (lastComparison != 0) { return lastComparison; } } return 0; } @org.apache.thrift.annotation.Nullable @Override public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } @Override public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { scheme(iprot).read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { scheme(oprot).write(oprot, this); } @Override public java.lang.String toString() { java.lang.StringBuilder sb = new java.lang.StringBuilder("queryNodeByPath_result("); boolean first = true; sb.append("success:"); if (this.success == null) { sb.append("null"); } else { sb.append(this.success); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class queryNodeByPath_resultStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { @Override public queryNodeByPath_resultStandardScheme getScheme() { return new queryNodeByPath_resultStandardScheme(); } } private static class queryNodeByPath_resultStandardScheme extends org.apache.thrift.scheme.StandardScheme { @Override public void read(org.apache.thrift.protocol.TProtocol iprot, queryNodeByPath_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 0: // SUCCESS if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.success = iprot.readString(); struct.setSuccessIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method struct.validate(); } @Override public void write(org.apache.thrift.protocol.TProtocol oprot, queryNodeByPath_result struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.success != null) { oprot.writeFieldBegin(SUCCESS_FIELD_DESC); oprot.writeString(struct.success); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class queryNodeByPath_resultTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { @Override public queryNodeByPath_resultTupleScheme getScheme() { return new queryNodeByPath_resultTupleScheme(); } } private static class queryNodeByPath_resultTupleScheme extends org.apache.thrift.scheme.TupleScheme { @Override public void write(org.apache.thrift.protocol.TProtocol prot, queryNodeByPath_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot; java.util.BitSet optionals = new java.util.BitSet(); if (struct.isSetSuccess()) { optionals.set(0); } oprot.writeBitSet(optionals, 1); if (struct.isSetSuccess()) { oprot.writeString(struct.success); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, queryNodeByPath_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot; java.util.BitSet incoming = iprot.readBitSet(1); if (incoming.get(0)) { struct.success = iprot.readString(); struct.setSuccessIsSet(true); } } } private static S scheme(org.apache.thrift.protocol.TProtocol proto) { return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme(); } } } ================================================ FILE: Sparta/sparta-api-uac/src/main/java/com/walnut/sparta/account/rpc/wolfmc/xx.java ================================================ package com.walnut.sparta.account.rpc.wolfmc; public class xx { } ================================================ FILE: Sparta/sparta-api-uac/src/main/java/com/walnut/sparta/account/xxx.java ================================================ package com.walnut.sparta.account; public class xxx { } ================================================ FILE: Sparta/sparta-api-uac/src/test/java/org/example/AppTest.java ================================================ package org.example; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; /** * Unit test for simple App. */ public class AppTest extends TestCase { /** * Create the test case * * @param testName name of the test case */ public AppTest( String testName ) { super( testName ); } /** * @return the suite of tests being tested */ public static Test suite() { return new TestSuite( AppTest.class ); } /** * Rigourous Test :-) */ public void testApp() { assertTrue( true ); } } ================================================ FILE: Sparta/sparta-api-uofs/pom.xml ================================================ sparta com.walnuts.sparta 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.walnut.sparta.api.uofs sparta-api-uofs 2.1.0 junit junit 3.8.1 test com.pinecone pinecone 2.5.1 compile com.pinecone.hydra.kom.driver.default hydra-kom-default-driver 2.1.0 test com.pinecone.tritium hydra-system-tritium 2.1.0 test com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.pinecone.summer.springram springram 2.1.0 compile com.pinecone.hydra.sdk.thrift hydra-lib-thrift-sdk 1.2.1 compile org.springframework.boot spring-boot-test test org.testng testng RELEASE test org.apache.thrift libthrift 0.18.0 compile ================================================ FILE: Sparta/sparta-api-uofs/src/main/java/com/walnut/sparta/uofs/thrift/UOFSIface.java ================================================ package com.walnut.sparta.uofs.thrift; /** * Autogenerated by Thrift Compiler (0.18.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.18.0)", date = "2025-03-01") @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) public class UOFSIface { public interface Iface { public java.lang.String test(java.lang.String msg) throws org.apache.thrift.TException; } public interface AsyncIface { public void test(java.lang.String msg, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException; } public static class Client extends org.apache.thrift.TServiceClient implements Iface { public static class Factory implements org.apache.thrift.TServiceClientFactory { public Factory() {} @Override public Client getClient(org.apache.thrift.protocol.TProtocol prot) { return new Client(prot); } @Override public Client getClient(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) { return new Client(iprot, oprot); } } public Client(org.apache.thrift.protocol.TProtocol prot) { super(prot, prot); } public Client(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) { super(iprot, oprot); } @Override public java.lang.String test(java.lang.String msg) throws org.apache.thrift.TException { send_test(msg); return recv_test(); } public void send_test(java.lang.String msg) throws org.apache.thrift.TException { test_args args = new test_args(); args.setMsg(msg); sendBase("test", args); } public java.lang.String recv_test() throws org.apache.thrift.TException { test_result result = new test_result(); receiveBase(result, "test"); if (result.isSetSuccess()) { return result.success; } throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "test failed: unknown result"); } } public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface { public static class Factory implements org.apache.thrift.async.TAsyncClientFactory { private org.apache.thrift.async.TAsyncClientManager clientManager; private org.apache.thrift.protocol.TProtocolFactory protocolFactory; public Factory(org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.protocol.TProtocolFactory protocolFactory) { this.clientManager = clientManager; this.protocolFactory = protocolFactory; } @Override public AsyncClient getAsyncClient(org.apache.thrift.transport.TNonblockingTransport transport) { return new AsyncClient(protocolFactory, clientManager, transport); } } public AsyncClient(org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.transport.TNonblockingTransport transport) { super(protocolFactory, clientManager, transport); } @Override public void test(java.lang.String msg, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException { checkReady(); test_call method_call = new test_call(msg, resultHandler, this, ___protocolFactory, ___transport); this.___currentMethod = method_call; ___manager.call(method_call); } public static class test_call extends org.apache.thrift.async.TAsyncMethodCall { private java.lang.String msg; public test_call(java.lang.String msg, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException { super(client, protocolFactory, transport, resultHandler, false); this.msg = msg; } @Override public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException { prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("test", org.apache.thrift.protocol.TMessageType.CALL, 0)); test_args args = new test_args(); args.setMsg(msg); args.write(prot); prot.writeMessageEnd(); } @Override public java.lang.String getResult() throws org.apache.thrift.TException { if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) { throw new java.lang.IllegalStateException("Method call not finished!"); } org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array()); org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport); return (new Client(prot)).recv_test(); } } } public static class Processor extends org.apache.thrift.TBaseProcessor implements org.apache.thrift.TProcessor { private static final org.slf4j.Logger _LOGGER = org.slf4j.LoggerFactory.getLogger(Processor.class.getName()); public Processor(I iface) { super(iface, getProcessMap(new java.util.HashMap>())); } protected Processor(I iface, java.util.Map> processMap) { super(iface, getProcessMap(processMap)); } private static java.util.Map> getProcessMap(java.util.Map> processMap) { processMap.put("test", new test()); return processMap; } public static class test extends org.apache.thrift.ProcessFunction { public test() { super("test"); } @Override public test_args getEmptyArgsInstance() { return new test_args(); } @Override protected boolean isOneway() { return false; } @Override protected boolean rethrowUnhandledExceptions() { return false; } @Override public test_result getResult(I iface, test_args args) throws org.apache.thrift.TException { test_result result = new test_result(); result.success = iface.test(args.msg); return result; } } } public static class AsyncProcessor extends org.apache.thrift.TBaseAsyncProcessor { private static final org.slf4j.Logger _LOGGER = org.slf4j.LoggerFactory.getLogger(AsyncProcessor.class.getName()); public AsyncProcessor(I iface) { super(iface, getProcessMap(new java.util.HashMap>())); } protected AsyncProcessor(I iface, java.util.Map> processMap) { super(iface, getProcessMap(processMap)); } private static java.util.Map> getProcessMap(java.util.Map> processMap) { processMap.put("test", new test()); return processMap; } public static class test extends org.apache.thrift.AsyncProcessFunction { public test() { super("test"); } @Override public test_args getEmptyArgsInstance() { return new test_args(); } @Override public org.apache.thrift.async.AsyncMethodCallback getResultHandler(final org.apache.thrift.server.AbstractNonblockingServer.AsyncFrameBuffer fb, final int seqid) { final org.apache.thrift.AsyncProcessFunction fcall = this; return new org.apache.thrift.async.AsyncMethodCallback() { @Override public void onComplete(java.lang.String o) { test_result result = new test_result(); result.success = o; try { fcall.sendResponse(fb, result, org.apache.thrift.protocol.TMessageType.REPLY,seqid); } catch (org.apache.thrift.transport.TTransportException e) { _LOGGER.error("TTransportException writing to internal frame buffer", e); fb.close(); } catch (java.lang.Exception e) { _LOGGER.error("Exception writing to internal frame buffer", e); onError(e); } } @Override public void onError(java.lang.Exception e) { byte msgType = org.apache.thrift.protocol.TMessageType.REPLY; org.apache.thrift.TSerializable msg; test_result result = new test_result(); if (e instanceof org.apache.thrift.transport.TTransportException) { _LOGGER.error("TTransportException inside handler", e); fb.close(); return; } else if (e instanceof org.apache.thrift.TApplicationException) { _LOGGER.error("TApplicationException inside handler", e); msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION; msg = (org.apache.thrift.TApplicationException)e; } else { _LOGGER.error("Exception inside handler", e); msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION; msg = new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage()); } try { fcall.sendResponse(fb,msg,msgType,seqid); } catch (java.lang.Exception ex) { _LOGGER.error("Exception writing to internal frame buffer", ex); fb.close(); } } }; } @Override protected boolean isOneway() { return false; } @Override public void start(I iface, test_args args, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException { iface.test(args.msg,resultHandler); } } } @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) public static class test_args implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("test_args"); private static final org.apache.thrift.protocol.TField MSG_FIELD_DESC = new org.apache.thrift.protocol.TField("msg", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new test_argsStandardSchemeFactory(); private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new test_argsTupleSchemeFactory(); public @org.apache.thrift.annotation.Nullable java.lang.String msg; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { MSG((short)1, "msg"); private static final java.util.Map byName = new java.util.HashMap(); static { for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // MSG return MSG; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByName(java.lang.String name) { return byName.get(name); } private final short _thriftId; private final java.lang.String _fieldName; _Fields(short thriftId, java.lang.String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } @Override public short getThriftFieldId() { return _thriftId; } @Override public java.lang.String getFieldName() { return _fieldName; } } // isset id assignments public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.MSG, new org.apache.thrift.meta_data.FieldMetaData("msg", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); metaDataMap = java.util.Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(test_args.class, metaDataMap); } public test_args() { } public test_args( java.lang.String msg) { this(); this.msg = msg; } /** * Performs a deep copy on other. */ public test_args(test_args other) { if (other.isSetMsg()) { this.msg = other.msg; } } @Override public test_args deepCopy() { return new test_args(this); } @Override public void clear() { this.msg = null; } @org.apache.thrift.annotation.Nullable public java.lang.String getMsg() { return this.msg; } public test_args setMsg(@org.apache.thrift.annotation.Nullable java.lang.String msg) { this.msg = msg; return this; } public void unsetMsg() { this.msg = null; } /** Returns true if field msg is set (has been assigned a value) and false otherwise */ public boolean isSetMsg() { return this.msg != null; } public void setMsgIsSet(boolean value) { if (!value) { this.msg = null; } } @Override public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) { switch (field) { case MSG: if (value == null) { unsetMsg(); } else { setMsg((java.lang.String)value); } break; } } @org.apache.thrift.annotation.Nullable @Override public java.lang.Object getFieldValue(_Fields field) { switch (field) { case MSG: return getMsg(); } throw new java.lang.IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ @Override public boolean isSet(_Fields field) { if (field == null) { throw new java.lang.IllegalArgumentException(); } switch (field) { case MSG: return isSetMsg(); } throw new java.lang.IllegalStateException(); } @Override public boolean equals(java.lang.Object that) { if (that instanceof test_args) return this.equals((test_args)that); return false; } public boolean equals(test_args that) { if (that == null) return false; if (this == that) return true; boolean this_present_msg = true && this.isSetMsg(); boolean that_present_msg = true && that.isSetMsg(); if (this_present_msg || that_present_msg) { if (!(this_present_msg && that_present_msg)) return false; if (!this.msg.equals(that.msg)) return false; } return true; } @Override public int hashCode() { int hashCode = 1; hashCode = hashCode * 8191 + ((isSetMsg()) ? 131071 : 524287); if (isSetMsg()) hashCode = hashCode * 8191 + msg.hashCode(); return hashCode; } @Override public int compareTo(test_args other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = java.lang.Boolean.compare(isSetMsg(), other.isSetMsg()); if (lastComparison != 0) { return lastComparison; } if (isSetMsg()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.msg, other.msg); if (lastComparison != 0) { return lastComparison; } } return 0; } @org.apache.thrift.annotation.Nullable @Override public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } @Override public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { scheme(iprot).read(iprot, this); } @Override public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { scheme(oprot).write(oprot, this); } @Override public java.lang.String toString() { java.lang.StringBuilder sb = new java.lang.StringBuilder("test_args("); boolean first = true; sb.append("msg:"); if (this.msg == null) { sb.append("null"); } else { sb.append(this.msg); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class test_argsStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { @Override public test_argsStandardScheme getScheme() { return new test_argsStandardScheme(); } } private static class test_argsStandardScheme extends org.apache.thrift.scheme.StandardScheme { @Override public void read(org.apache.thrift.protocol.TProtocol iprot, test_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // MSG if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.msg = iprot.readString(); struct.setMsgIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method struct.validate(); } @Override public void write(org.apache.thrift.protocol.TProtocol oprot, test_args struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.msg != null) { oprot.writeFieldBegin(MSG_FIELD_DESC); oprot.writeString(struct.msg); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class test_argsTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { @Override public test_argsTupleScheme getScheme() { return new test_argsTupleScheme(); } } private static class test_argsTupleScheme extends org.apache.thrift.scheme.TupleScheme { @Override public void write(org.apache.thrift.protocol.TProtocol prot, test_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot; java.util.BitSet optionals = new java.util.BitSet(); if (struct.isSetMsg()) { optionals.set(0); } oprot.writeBitSet(optionals, 1); if (struct.isSetMsg()) { oprot.writeString(struct.msg); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, test_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot; java.util.BitSet incoming = iprot.readBitSet(1); if (incoming.get(0)) { struct.msg = iprot.readString(); struct.setMsgIsSet(true); } } } private static S scheme(org.apache.thrift.protocol.TProtocol proto) { return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme(); } } @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) public static class test_result implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("test_result"); private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRING, (short)0); private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new test_resultStandardSchemeFactory(); private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new test_resultTupleSchemeFactory(); public @org.apache.thrift.annotation.Nullable java.lang.String success; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { SUCCESS((short)0, "success"); private static final java.util.Map byName = new java.util.HashMap(); static { for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 0: // SUCCESS return SUCCESS; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByName(java.lang.String name) { return byName.get(name); } private final short _thriftId; private final java.lang.String _fieldName; _Fields(short thriftId, java.lang.String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } @Override public short getThriftFieldId() { return _thriftId; } @Override public java.lang.String getFieldName() { return _fieldName; } } // isset id assignments public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); metaDataMap = java.util.Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(test_result.class, metaDataMap); } public test_result() { } public test_result( java.lang.String success) { this(); this.success = success; } /** * Performs a deep copy on other. */ public test_result(test_result other) { if (other.isSetSuccess()) { this.success = other.success; } } @Override public test_result deepCopy() { return new test_result(this); } @Override public void clear() { this.success = null; } @org.apache.thrift.annotation.Nullable public java.lang.String getSuccess() { return this.success; } public test_result setSuccess(@org.apache.thrift.annotation.Nullable java.lang.String success) { this.success = success; return this; } public void unsetSuccess() { this.success = null; } /** Returns true if field success is set (has been assigned a value) and false otherwise */ public boolean isSetSuccess() { return this.success != null; } public void setSuccessIsSet(boolean value) { if (!value) { this.success = null; } } @Override public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) { switch (field) { case SUCCESS: if (value == null) { unsetSuccess(); } else { setSuccess((java.lang.String)value); } break; } } @org.apache.thrift.annotation.Nullable @Override public java.lang.Object getFieldValue(_Fields field) { switch (field) { case SUCCESS: return getSuccess(); } throw new java.lang.IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ @Override public boolean isSet(_Fields field) { if (field == null) { throw new java.lang.IllegalArgumentException(); } switch (field) { case SUCCESS: return isSetSuccess(); } throw new java.lang.IllegalStateException(); } @Override public boolean equals(java.lang.Object that) { if (that instanceof test_result) return this.equals((test_result)that); return false; } public boolean equals(test_result that) { if (that == null) return false; if (this == that) return true; boolean this_present_success = true && this.isSetSuccess(); boolean that_present_success = true && that.isSetSuccess(); if (this_present_success || that_present_success) { if (!(this_present_success && that_present_success)) return false; if (!this.success.equals(that.success)) return false; } return true; } @Override public int hashCode() { int hashCode = 1; hashCode = hashCode * 8191 + ((isSetSuccess()) ? 131071 : 524287); if (isSetSuccess()) hashCode = hashCode * 8191 + success.hashCode(); return hashCode; } @Override public int compareTo(test_result other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = java.lang.Boolean.compare(isSetSuccess(), other.isSetSuccess()); if (lastComparison != 0) { return lastComparison; } if (isSetSuccess()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success); if (lastComparison != 0) { return lastComparison; } } return 0; } @org.apache.thrift.annotation.Nullable @Override public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } @Override public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { scheme(iprot).read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { scheme(oprot).write(oprot, this); } @Override public java.lang.String toString() { java.lang.StringBuilder sb = new java.lang.StringBuilder("test_result("); boolean first = true; sb.append("success:"); if (this.success == null) { sb.append("null"); } else { sb.append(this.success); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class test_resultStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { @Override public test_resultStandardScheme getScheme() { return new test_resultStandardScheme(); } } private static class test_resultStandardScheme extends org.apache.thrift.scheme.StandardScheme { @Override public void read(org.apache.thrift.protocol.TProtocol iprot, test_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 0: // SUCCESS if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.success = iprot.readString(); struct.setSuccessIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method struct.validate(); } @Override public void write(org.apache.thrift.protocol.TProtocol oprot, test_result struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.success != null) { oprot.writeFieldBegin(SUCCESS_FIELD_DESC); oprot.writeString(struct.success); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class test_resultTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { @Override public test_resultTupleScheme getScheme() { return new test_resultTupleScheme(); } } private static class test_resultTupleScheme extends org.apache.thrift.scheme.TupleScheme { @Override public void write(org.apache.thrift.protocol.TProtocol prot, test_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot; java.util.BitSet optionals = new java.util.BitSet(); if (struct.isSetSuccess()) { optionals.set(0); } oprot.writeBitSet(optionals, 1); if (struct.isSetSuccess()) { oprot.writeString(struct.success); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, test_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot; java.util.BitSet incoming = iprot.readBitSet(1); if (incoming.get(0)) { struct.success = iprot.readString(); struct.setSuccessIsSet(true); } } } private static S scheme(org.apache.thrift.protocol.TProtocol proto) { return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme(); } } } ================================================ FILE: Sparta/sparta-api-uofs/src/main/java/com/walnut/sparta/uofs/xxx.java ================================================ package com.walnut.sparta.uofs; public class xxx { } ================================================ FILE: Sparta/sparta-api-uofs/src/test/java/cn/ken/AppTest.java ================================================ package cn.ken; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; /** * Unit test for simple App. */ public class AppTest extends TestCase { /** * Create the test case * * @param testName name of the test case */ public AppTest( String testName ) { super( testName ); } /** * @return the suite of tests being tested */ public static Test suite() { return new TestSuite( AppTest.class ); } /** * Rigourous Test :-) */ public void testApp() { assertTrue( true ); } } ================================================ FILE: Sparta/sparta-core-console/pom.xml ================================================ sparta com.walnuts.sparta 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.walnut.sparta sparta-core-console 2.1.0 com.pinecone pinecone 2.5.1 compile com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.pinecone.hydra.kernel hydra-architecture 2.1.0 compile com.pinecone.hydra.kom.driver.default hydra-kom-default-driver 2.1.0 compile com.pinecone.slime slime 2.1.0 compile com.pinecone.slime.jelly jelly 2.1.0 compile com.pinecone.ulf ulfhedinn 1.2.1 compile com.pinecone.summer summer 2.1.0 compile com.pinecone.summer.springram springram 2.1.0 compile com.pinecone.tritium hydra-system-tritium 2.1.0 compile com.walnut.sailor.sdd sailor-stream-distribute-sdk 2.1.0 org.springframework.boot spring-boot-starter-test test org.mybatis.spring.boot mybatis-spring-boot-starter 2.2.2 com.pinecone.hydra.storage.uofs.cache hydra-lib-uofs-cache 1.2.1 com.walnut.odin odin-architecture 2.5.1 test com.walnut.odin odin-framework-runtime 2.5.1 test com.walnut.odin odin-framework-conduct 2.5.1 test com.walnut.odin odin-mapper-driver 2.5.1 test com.walnut.ender.system ender-system-hydra 2.5.1 test com.pinecone.hydra.kernel hydra-service-control 2.1.0 test org.apache.commons commons-csv 1.10.0 com.acorn.redqueen.kernel redqueen-framework-service 2.1.0 test com.pinecone.hydra.sdk.grpc hydra-lib-grpc-service-sdk 1.2.1 test com.walnut.odin odin-system 2.5.1 test ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/Sparta.java ================================================ package com.walnut.sparta; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.servgram.Servgram; import com.pinecone.hydra.system.component.Slf4jTraceable; import com.pinecone.summer.spring.Springron; import java.io.IOException; import java.nio.file.Path; public class Sparta extends Springron implements Slf4jTraceable { public Sparta( String szName, Processum parent, String[] springbootArgs ) { super( szName, parent, springbootArgs ); this.mSpringKernel.setPrimarySources( SpartaBoot.class ); } public Sparta( String szName, Processum parent ) { this( szName, parent, new String[0] ); } @Override protected void loadConfig() { this.mServgramList = this.getAttachedOrchestrator().getSectionConfig().getChild( Servgram.ConfigServgramsKey ); Object dyServgramConf = this.mServgramList.get( this.gramName() ); if( dyServgramConf instanceof String ) { try{ this.mServgramConf = this.mServgramList.getChildFromPath( Path.of((String) dyServgramConf) ); } catch ( IOException ignore ) { this.getLogger().info( "[Notice] Spring will use the default config `application.yaml`." ); } } else { this.mServgramConf = this.mServgramList.getChild( this.gramName() ); } } } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/SpartaBoot.java ================================================ package com.walnut.sparta; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.context.annotation.ComponentScan; // Dummy boot for spring to found the main classpath. @SpringBootApplication public class SpartaBoot { // @PostConstruct // public void init() { // try { // ShadowBoot.shadow.vitalize(); // } // catch ( Exception e ) { // ShadowBoot.shadow.console().error( e.getMessage() ); // } // } // public static void main(String[] args) { // SpringApplication.run(SpartaBoot.class, args); // } } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/config/SpringGlobalJSONConfig.java ================================================ package com.walnut.sparta.config; import java.io.IOException; import com.pinecone.framework.util.uoi.UOI; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.http.converter.json.Jackson2ObjectMapperBuilder; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.JsonSerializer; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.module.SimpleModule; import com.pinecone.framework.util.id.GUID; @Configuration public class SpringGlobalJSONConfig { @Bean public ObjectMapper objectMapper( Jackson2ObjectMapperBuilder builder ) { ObjectMapper mapper = builder.createXmlMapper( false ).build(); SimpleModule module = new SimpleModule(); // 为 GUID 类添加序列化器 module.addSerializer(GUID.class, new JsonSerializer() { @Override public void serialize(GUID value, JsonGenerator gen, SerializerProvider serializers) throws IOException { gen.writeString(value.toString()); } }); // 为 UOI 类添加序列化器 module.addSerializer(UOI.class, new JsonSerializer() { @Override public void serialize(UOI value, JsonGenerator gen, SerializerProvider serializers) throws IOException { gen.writeString(value.toString()); // 假设 UOI 类也有 toString 方法 } }); mapper.registerModule(module); return mapper; } } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/config/ibatis/IBatisConfig.java ================================================ package com.walnut.sparta.config.ibatis; import org.apache.ibatis.session.SqlSessionFactory; import org.mybatis.spring.SqlSessionFactoryBean; import org.mybatis.spring.annotation.MapperScan; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.io.support.PathMatchingResourcePatternResolver; import javax.sql.DataSource; @Configuration @MapperScan( "com.walnut.sparta.services.mapper" ) @MapperScan( { "com.pinecone.hydra.registry.ibatis", "com.pinecone.hydra.service.ibatis", "com.pinecone.hydra.task.ibatis", "com.pinecone.hydra.scenario.ibatis", "com.pinecone.hydra.deploy.ibatis" } ) public class IBatisConfig { @Bean public SqlSessionFactory sqlSessionFactory(DataSource dataSource) throws Exception { SqlSessionFactoryBean factoryBean = new SqlSessionFactoryBean(); factoryBean.setDataSource(dataSource); factoryBean.setTypeHandlersPackage( "com.pinecone.hydra.entity.ibatis" ); factoryBean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources( "classpath*:mapper/*.xml" )); // 指定Mapper XML的位置 return factoryBean.getObject(); } // @Bean // public SqlSessionTemplate sqlSessionTemplate(SqlSessionFactory sqlSessionFactory) { // return new SqlSessionTemplate(sqlSessionFactory); // } // // @Bean // public PlatformTransactionManager transactionManager(DataSource dataSource) { // return new DataSourceTransactionManager(dataSource); // } } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/controller/Dummy.java ================================================ package com.walnut.sparta.services.controller; public class Dummy { } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/controller/FileSystemController.java ================================================ package com.walnut.sparta.services.controller; import javax.annotation.Resource; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.walnut.sparta.system.BasicResultResponse; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; @RestController @RequestMapping( "/api/v2/service/uofs/" ) public class FileSystemController { @Resource private KOMFileSystem primaryFileSystem; @GetMapping("/bucket") public BasicResultResponse createBucket(@RequestParam String bucketName){ return BasicResultResponse.success(bucketName); } @GetMapping("/miao") public BasicResultResponse miao(@RequestParam String arg){ return BasicResultResponse.success(arg ); } } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/controller/UOFSController.java ================================================ package com.walnut.sparta.services.controller; import com.walnut.sparta.services.dto.updateObjectDto; import com.walnut.sparta.system.BasicResultResponse; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; @RestController @RequestMapping( "/api/v2/UOFSController" ) public class UOFSController { public BasicResultResponse updateObject(@RequestBody updateObjectDto dto){ return null; } } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/controller/v2/RegistryMetaController.java ================================================ package com.walnut.sparta.services.controller.v2; //import com.walnut.sparta.services.drivers.RegistryMasterManipulatorImpl; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; @RestController @RequestMapping( "/api/v2/registryMeta" ) public class RegistryMetaController { /*@Resource private RegistryMasterManipulator registryMasterManipulator; private KOMRegistry KOMRegistry; @PostConstruct public void init() { this.KOMRegistry = new GenericKOMRegistry(null, this.registryMasterManipulator ); } *//** * 添加一条namespace节点 * @param namespaceNode 节点信息 * @return 返回操作情况 *//* @PostMapping("/putNamespaceNode") public BasicResultResponse putNamespaceNode(@RequestBody GenericNamespace namespaceNode){ this.KOMRegistry.put(namespaceNode); return BasicResultResponse.success(); } *//** * 添加一个configNode节点 * @param configNode 节点信息 * @return 返回操作情况 *//* @PostMapping("/putConfigNode") public BasicResultResponse putConfigNode( @RequestBody ArchConfigNode configNode ){ this.KOMRegistry.put(configNode); return BasicResultResponse.success(); } *//** * 获取节点路径信息 * @param guid 节点guid * @return 返回节点路径 *//* @GetMapping("/getPath") public BasicResultResponse getPath( @RequestParam("guid") String guid ){ String path = this.KOMRegistry.getPath( GUIDs.GUID72( guid ) ); return BasicResultResponse.success(path); } *//** * 获取节点信息 * @param guid 节点guid * @return 返回节点信息 *//* @GetMapping("/getNode") public BasicResultResponse getNode( @RequestParam("guid") String guid ){ TreeNode node = this.KOMRegistry.get( GUIDs.GUID72( guid ) ); return BasicResultResponse.success(node); } *//** * 给节点添加配置信息 * @param key 键 * @param guid 所属节点的guid * @param value 值 * @param type 值的类型 * @return 返回状态码 *//* @PostMapping("/insertProperties") public BasicResultResponse insertProperties(@RequestParam("key")String key, @RequestParam("Guid") String guid, @RequestParam("value") String value, @RequestParam("type") String type){ Property genericProperties = Property.newDummy(); genericProperties.setCreateTime(LocalDateTime.now()); genericProperties.setUpdateTime(LocalDateTime.now()); genericProperties.setKey(key); genericProperties.setValue(value); genericProperties.setType(type); this.KOMRegistry.putProperty( genericProperties, GUIDs.GUID72( guid ) ); return BasicResultResponse.success(); } *//** * 移除节点 * @param guid 节点guid * @return 返回状态码 *//* @DeleteMapping("/remove") public BasicResultResponse remove(@RequestParam("Guid") GUID guid){ this.KOMRegistry.remove( guid ); return BasicResultResponse.success(); } *//** * 解析路径 * @param path 路径信息 * @return 返回解析后的节点信息 *//* @GetMapping("/queryElement") public BasicResultResponse getNodeByPath( @RequestParam("path") String path ){ TreeNode treeNode = this.KOMRegistry.queryElement(path); return BasicResultResponse.success( treeNode ); } *//** * 给节点添加text信息 * @param guid 几点guid * @param text text信息 * @param type text类型 * @return 返回状态码 *//* @PostMapping("/putTextValue") public BasicResultResponse insertTextValue(@RequestParam("guid")String guid, @RequestParam("text") String text, @RequestParam("type") String type){ this.KOMRegistry.putTextValue( GUIDs.GUID72( guid ) ,text,type); return BasicResultResponse.success(); } *//** * 获取节点信息不含继承 * @param guid 节点guid * @return 返回节点信息 *//* @GetMapping("/getSelf") public BasicResultResponse getAsRootDepth(@RequestParam("guid") String guid){ return BasicResultResponse.success( this.KOMRegistry.getAsRootDepth( GUIDs.GUID72( guid ) ) ); }*/ } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/controller/v2/RegistryTreeController.java ================================================ package com.walnut.sparta.services.controller.v2; public class RegistryTreeController { } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/controller/v2/ScenarioMetaController.java ================================================ package com.walnut.sparta.services.controller.v2; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; @RestController @RequestMapping( "/api/v2/ScenarioMeta" ) public class ScenarioMetaController { // @Resource // ScenarioTreeManipulatorSharerImpl scenarioTreeManipulatorSharer; // @Resource // ScenarioMasterManipulatorImpl scenarioMetaManipulatorSharer; // // private DistributedScenarioMetaTree distributedScenarioMetaTree; // // @PostConstruct // public void init() { // this.distributedScenarioMetaTree = new GenericDistributedScenarioMetaTree(null,this.scenarioMetaManipulatorSharer); // } // // /** // * 插入一个节点 // * @param genericNamespaceNode 节点信息 // * @return // */ // @PostMapping("/insert") // public BasicResultResponse insert(@RequestBody GenericNamespaceNode genericNamespaceNode){ // this.distributedScenarioMetaTree.insert(genericNamespaceNode); // return BasicResultResponse.success(); // } // // /** // * 获取路径信息 // * @param guid 节点guid // * @return 返回路径信息 // */ // @GetMapping("/getPath") // public BasicResultResponse getPath(@RequestParam("guid") String guid){ // GUID72 guid72 = new GUID72(guid); // String path = this.distributedScenarioMetaTree.getPath(guid72); // return BasicResultResponse.success(path); // } // // /** // * 获取命名空间信息 // * @param guid 节点guid // * @return 返回节点信息 // */ // @GetMapping("/getNode") // public BasicResultResponse getNode(@RequestParam("guid") String guid){ // GUID72 guid72 = new GUID72(guid); // TreeNode treeNode = this.distributedScenarioMetaTree.get(guid72); // return BasicResultResponse.success(treeNode); // } // // /** // * 解析路径信息 // * @param path 路径 // * @return 返回解析出来的节点信息 // */ // @GetMapping("/parsePath") // public BasicResultResponse parsePath(@RequestParam("path") String path){ // TreeNode treeNode = this.distributedScenarioMetaTree.parsePath(path); // return BasicResultResponse.success(treeNode); // } // // /** // * 删除节点 // * @param guid 节点guid // * @return 返回操作结果 // */ // @DeleteMapping("/remove") // public BasicResultResponse remove(@RequestParam("guid") String guid){ // GUID72 guid72 = new GUID72(guid); // this.distributedScenarioMetaTree.remove(guid72); // return BasicResultResponse.success(); // } } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/controller/v2/ServiceMetaController.java ================================================ //package com.walnut.sparta.services.controller.v2; // //import com.pinecone.framework.util.id.GUID; //import com.pinecone.hydra.service.kom.ServiceInstrument; //import com.pinecone.hydra.service.kom.nodes.GenericApplicationNode; //import com.pinecone.hydra.service.kom.nodes.GenericNamespace; //import com.pinecone.hydra.service.kom.nodes.GenericServiceNode; //import com.pinecone.hydra.service.kom.nodes.ServiceTreeNode; //import com.pinecone.hydra.service.kom.source.ServiceMasterManipulator; //import com.pinecone.hydra.service.kom.source.ServiceFamilyTreeManipulator; //import com.pinecone.hydra.service.kom.entity.GenericMetaNodeInstanceFactory; //import com.pinecone.hydra.service.kom.entity.MetaNodeWideEntity; //import com.pinecone.hydra.service.kom.entity.MetaNodeInstanceFactory; //import com.pinecone.ulf.util.id.GUID72; //import com.pinecone.hydra.service.kom.CentralServicesInstrument; //import com.walnut.sparta.services.drivers.ServiceMasterTreeManipulatorImpl; //import com.walnut.sparta.system.BasicResultResponse; //import org.springframework.web.bind.annotation.DeleteMapping; //import org.springframework.web.bind.annotation.GetMapping; //import org.springframework.web.bind.annotation.PathVariable; //import org.springframework.web.bind.annotation.PostMapping; //import org.springframework.web.bind.annotation.RequestBody; //import org.springframework.web.bind.annotation.RequestMapping; //import org.springframework.web.bind.annotation.RequestParam; //import org.springframework.web.bind.annotation.RestController; // //import javax.annotation.PostConstruct; //import javax.annotation.Resource; // //@RestController //@RequestMapping( "/api/v2/serviceMeta" ) //public class ServiceMetaController { // @Resource // private ServiceMasterManipulator serviceMasterManipulator; // // @Resource // private ServiceMasterTreeManipulatorImpl treeManipulatorSharer; // // private ServiceInstrument servicesTree; // // MetaNodeInstanceFactory metaNodeInstanceFactory; // // @PostConstruct // public void init() { // this.servicesTree = new CentralServicesInstrument( null,serviceMasterManipulator); // this.metaNodeInstanceFactory = new GenericMetaNodeInstanceFactory(this.serviceMasterManipulator,treeManipulatorSharer); // } // // /** // * 渲染单节点信息 // * @param guid 节点UUID // * @return 返回节点信息 // */ // @GetMapping("/queryNodeInfoByGUID/{guid}") // public BasicResultResponse queryNodeInfoByGUID(@PathVariable("guid") String guid ){ // GUID72 guid72 = new GUID72( guid ); // return BasicResultResponse.success(this.servicesTree.getNode( guid72 )); // } // // /** // * 用于将路径反解析为节点信息 // * @param path 节点路径 // * @return 返回节点信息 // */ // @GetMapping("/queryNodeInfoByPath") // public BasicResultResponse queryNodeInfoByPath( @RequestParam("path") String path ){ // ServiceTreeNode node = this.servicesTree.parsePath( path ); // if( node == null ) { // return BasicResultResponse.error( "No such node" ); // } // return BasicResultResponse.success( this.servicesTree.parsePath(path) ); // } // // /** // * 创建一个服务节点 // * @param serviceNode 服务节点信息 // * @return 创建的节点的GUID // */ // @PostMapping("/putServiceNode") // public BasicResultResponse putServiceNode( @RequestBody GenericServiceNode serviceNode ){ // return BasicResultResponse.success(this.servicesTree.addNode( serviceNode ).toString()); // } // // /** // * 创建一个应用节点 // * @param applicationNode 应用节点信息 // * @return 创建的节点的GUID // */ // @PostMapping("/putApplicationNode") // public BasicResultResponse putApplicationNode( @RequestBody GenericApplicationNode applicationNode ){ // return BasicResultResponse.success(this.servicesTree.addNode(applicationNode).toString()); // } // // /** // * 创建一个分类节点 // * @param classificationNode 分类节点信息 // * @return 创建的节点的GUID // */ // @PostMapping("/putClassificationNode") // public BasicResultResponse putClassificationNode( @RequestBody GenericNamespace classificationNode ){ // return BasicResultResponse.success(this.servicesTree.addNode(classificationNode).toString()); // } // // /** // * 删除节点 // * @param guid 节点的guid // * @return 返回删除情况 // */ // @DeleteMapping("/removeSingleNode") // public BasicResultResponse removeSingleNode(@RequestParam("guid") String guid){ // this.servicesTree.removeNode( new GUID72( guid ) ); // return BasicResultResponse.success(); // } // // /** // * 渲染单节点所有信息(含继承) // * @param guid 节点UUID // * @return 返回节点信息 // */ // @GetMapping("/queryNodeWideInfo/{guid}") // public BasicResultResponse queryNodeWideInfo(@PathVariable("guid") String guid ){ // GUID72 guid72 = new GUID72( guid ); // return BasicResultResponse.success(this.servicesTree.getWideMeta(guid72)); // } // // /** // * 删除节点(完全移除) // * @param guid 节点的guid // * @return 返回移除结果 // */ // @GetMapping("/remove") // public BasicResultResponse remove(@RequestParam("guid") String guid){ // GUID72 guid72 = new GUID72( guid ); // this.servicesTree.remove(guid72); // return BasicResultResponse.success(); // } // // /** // * 用于添加继承关系 // * @param childNode 子节点GUID // * @param parentNode 父节点GUID // * @return 返回继承信息 // */ // @PostMapping("/inherit") // public BasicResultResponse inherit(@RequestParam("childNode") GUID childNode,@RequestParam("parentNode") GUID parentNode){ // ServiceFamilyTreeManipulator serviceFamilyTreeManipulator = this.serviceMasterManipulator.getServiceFamilyTreeManipulator(); // serviceFamilyTreeManipulator.insert(childNode,parentNode); // return BasicResultResponse.success(); // } // // /** // * 用于渲染路径信息 // * @param guid 节点UUID // * @return 返回路径信息 // */ // @GetMapping("/getPath/{GUID}") // public BasicResultResponse getPath(@PathVariable("GUID") String guid){ // return BasicResultResponse.success( this.servicesTree.getPath( new GUID72(guid) ) ); // } //} ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/controller/v2/ServiceTreeController.java ================================================ package com.walnut.sparta.services.controller.v2; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; @RestController @RequestMapping( "/api/v2/serviceTree" ) public class ServiceTreeController { // @Resource // private ServiceMasterTreeManipulatorImpl treeManipulatorSharer; // // private DistributedTrieTree distributedTrieTree; // // @PostConstruct // public void init() { // this.distributedTrieTree = new GenericDistributedTrieTree( this.treeManipulatorSharer); // } // // /** // * 用于渲染路径信息 // * @param guid 节点UUID // * @return 返回路径信息 // */ // @GetMapping("/getPath/{GUID}") // public BasicResultResponse getPath(@PathVariable("GUID") String guid){ // return BasicResultResponse.success( this.distributedTrieTree.getCachePath( new GUID72(guid) ) ); // } // // /** // * 向指定父节点添加子节点 // * @param nodeGUID 子节点GUID // * @param parentGUID 父节点GUID // * @return 返回添加情况 // */ // @PostMapping("/addNodeToParent") // public BasicResultResponse addNodeToParent(@RequestParam("nodeGUID") String nodeGUID, @RequestParam("parentGUID") String parentGUID ){ // GUID72 nodeGUID72 = new GUID72(nodeGUID); // GUID72 parentGUID72 = new GUID72(parentGUID); // this.distributedTrieTree.affirmOwnedNode(nodeGUID72,parentGUID72); // return BasicResultResponse.success(); // } } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/controller/v2/TaskMetaController.java ================================================ package com.walnut.sparta.services.controller.v2; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; @RestController @RequestMapping( "/api/v2/TaskMeta" ) public class TaskMetaController { // @Resource // private TaskMasterManipulator taskMasterManipulator; // // @Resource // private TaskTreeManipulatorSharerImpl treeManipulatorSharer; // // private DistributedTaskMetaTree distributedTaskMetaTree; // // @PostConstruct // public void init() { // this.distributedTaskMetaTree = new GenericDistributedTaskMetaTree(null,this.taskMasterManipulator); // } // // /** // * 新增一个节点 // * @param genericTaskNode 节点信息 // * @return 返回节点guid // */ // @PostMapping("/insert") // public BasicResultResponse insert(@RequestBody GenericTaskNode genericTaskNode){ // GUID insert = this.distributedTaskMetaTree.insert(genericTaskNode); // return BasicResultResponse.success(insert.toString()); // } // // /** // * 获取节点路径信息 // * @param guid 节点guid // * @return 返回路径信息 // */ // @GetMapping("/getPath") // public BasicResultResponse getPath(@RequestParam("guid") String guid){ // GUID72 guid72 = new GUID72(guid); // String path = this.distributedTaskMetaTree.getPath(guid72); // return BasicResultResponse.success(path); // } // // /** // * 获取节点信息 // * @param guid 节点guid // * @return 返回节点信息 // */ // @GetMapping("/getNode") // public BasicResultResponse getNode(@RequestParam("guid") String guid){ // GUID72 guid72 = new GUID72(guid); // TreeNode treeNode = this.distributedTaskMetaTree.get(guid72); // return BasicResultResponse.success(treeNode); // } // // /** // * 解析路径信息 // * @param path 路径 // * @return 返回节点信息 // */ // @GetMapping("/parsePath") // public BasicResultResponse parsePath(@RequestParam("path") String path){ // TreeNode treeNode = this.distributedTaskMetaTree.parsePath(path); // return BasicResultResponse.success(treeNode); // } // // /** // * 移除节点 // * @param guid 节点guid // * @return 返回操作信息 // */ // @DeleteMapping("/remove") // public BasicResultResponse remove(@RequestParam("guid") String guid){ // GUID72 guid72 = new GUID72(guid); // this.distributedTaskMetaTree.remove(guid72); // return BasicResultResponse.success(); // } } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/drivers/RegistryMasterManipulatorImpl.java ================================================ package com.walnut.sparta.services.drivers; import com.pinecone.hydra.registry.source.RegistryMasterManipulator; import com.pinecone.hydra.registry.source.RegistryConfigNodeManipulator; import com.pinecone.hydra.registry.source.RegistryNodeMetaManipulator; import com.pinecone.hydra.registry.source.RegistryNSNodeManipulator; import com.pinecone.hydra.registry.source.RegistryNSNodeMetaManipulator; import com.pinecone.hydra.registry.source.RegistryAttributesManipulator; import com.pinecone.hydra.registry.source.RegistryPropertiesManipulator; import com.pinecone.hydra.registry.source.RegistryTextFileManipulator; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; import org.springframework.stereotype.Component; import javax.annotation.Resource; @Component public class RegistryMasterManipulatorImpl implements RegistryMasterManipulator { @Resource RegistryConfigNodeManipulator configNodeManipulator; @Resource RegistryNSNodeManipulator namespaceNodeManipulator; @Resource RegistryPropertiesManipulator registryPropertiesManipulator; @Resource RegistryTextFileManipulator registryTextFileManipulator; @Resource RegistryNodeMetaManipulator configNodeMetaManipulator; @Resource RegistryNSNodeMetaManipulator namespaceNodeMetaManipulator; @Resource RegistryAttributesManipulator registryAttributesManipulator; @Resource( type = RegistryMasterTreeManipulatorImpl.class ) KOISkeletonMasterManipulator skeletonMasterManipulator; @Override public KOISkeletonMasterManipulator getSkeletonMasterManipulator() { return this.skeletonMasterManipulator; } @Override public RegistryConfigNodeManipulator getConfigNodeManipulator() { return this.configNodeManipulator; } @Override public RegistryNSNodeManipulator getNSNodeManipulator() { return this.namespaceNodeManipulator; } @Override public RegistryPropertiesManipulator getPropertiesManipulator() { return this.registryPropertiesManipulator; } @Override public RegistryTextFileManipulator getTextFileManipulator() { return this.registryTextFileManipulator; } @Override public RegistryNodeMetaManipulator getNodeMetaManipulator() { return this.configNodeMetaManipulator; } @Override public RegistryNSNodeMetaManipulator getNSNodeMetaManipulator() { return this.namespaceNodeMetaManipulator; } @Override public RegistryAttributesManipulator getAttributesManipulator() { return this.registryAttributesManipulator; } } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/drivers/RegistryMasterTreeManipulatorImpl.java ================================================ package com.walnut.sparta.services.drivers; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator; import com.pinecone.hydra.registry.ibatis.RegistryNodeOwnerMapper; import com.pinecone.hydra.registry.ibatis.RegistryNodePathCacheMapper; import com.pinecone.hydra.registry.ibatis.RegistryTreeMapper; import org.springframework.stereotype.Component; import javax.annotation.Resource; @Component public class RegistryMasterTreeManipulatorImpl implements TreeMasterManipulator { @Resource RegistryNodePathCacheMapper configNodePathMapper; @Resource RegistryNodeOwnerMapper configNodeOwnerManipulator; @Resource RegistryTreeMapper trieTreeManipulator; @Override public TireOwnerManipulator getTireOwnerManipulator() { return this.configNodeOwnerManipulator; } @Override public TrieTreeManipulator getTrieTreeManipulator() { return this.trieTreeManipulator; } @Override public TriePathCacheManipulator getTriePathCacheManipulator() { return this.configNodePathMapper; } } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/drivers/ScenarioMasterManipulatorImpl.java ================================================ package com.walnut.sparta.services.drivers; import com.pinecone.hydra.scenario.source.NamespaceNodeManipulator; import com.pinecone.hydra.scenario.source.NamespaceNodeMetaManipulator; import com.pinecone.hydra.scenario.source.ScenarioCommonDataManipulator; import com.pinecone.hydra.scenario.source.ScenarioMasterManipulator; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; import org.springframework.stereotype.Component; @Component public class ScenarioMasterManipulatorImpl implements ScenarioMasterManipulator { //@Resource NamespaceNodeManipulator namespaceNodeManipulator; //@Resource NamespaceNodeMetaManipulator namespaceNodeMetaManipulator; //@Resource ScenarioCommonDataManipulator scenarioCommonDataManipulator; //@Resource KOISkeletonMasterManipulator koiSkeletonMasterManipulator; //@Override public NamespaceNodeManipulator getNamespaceNodeManipulator() { return this.namespaceNodeManipulator; } @Override public NamespaceNodeMetaManipulator getNSNodeMetaManipulator() { return this.namespaceNodeMetaManipulator; } @Override public ScenarioCommonDataManipulator getScenarioCommonDataManipulator() { return this.scenarioCommonDataManipulator; } @Override public KOISkeletonMasterManipulator getSkeletonMasterManipulator() { return this.koiSkeletonMasterManipulator; } } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/drivers/ScenarioTreeManipulatorSharerImpl.java ================================================ package com.walnut.sparta.services.drivers; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator; import com.pinecone.hydra.scenario.ibatis.ScenarioNodeOwnerMapper; import com.pinecone.hydra.scenario.ibatis.ScenarioNodePathCacheMapper; import com.pinecone.hydra.scenario.ibatis.ScenarioTreeMapper; import org.springframework.stereotype.Component; import javax.annotation.Resource; @Component public class ScenarioTreeManipulatorSharerImpl implements TreeMasterManipulator { @Resource ScenarioTreeMapper scenarioTreeMapper; @Resource ScenarioNodeOwnerMapper scenarioNodeOwnerMapper; @Resource ScenarioNodePathCacheMapper scenarioNodePathMapper; @Override public TireOwnerManipulator getTireOwnerManipulator() { return this.scenarioNodeOwnerMapper; } @Override public TrieTreeManipulator getTrieTreeManipulator() { return this.scenarioTreeMapper; } @Override public TriePathCacheManipulator getTriePathCacheManipulator() { return this.scenarioNodePathMapper; } } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/drivers/ServiceMasterManipulatorImpl.java ================================================ package com.walnut.sparta.services.drivers; import javax.annotation.Resource; import com.pinecone.framework.system.construction.Structure; import com.pinecone.hydra.service.ibatis.AppNodeMetaMapper; import com.pinecone.hydra.service.ibatis.ApplicationNodeMapper; import com.pinecone.hydra.service.ibatis.ServiceInstanceMapper; import com.pinecone.hydra.service.ibatis.ServiceNamespaceMapper; import com.pinecone.hydra.service.ibatis.NamespaceRulesMapper; import com.pinecone.hydra.service.ibatis.ServiceNodeMetaMapper; import com.pinecone.hydra.service.ibatis.ServiceMetaMapper; import com.pinecone.hydra.service.ibatis.ServiceNodeMapper; import com.pinecone.hydra.service.kom.source.ApplicationMetaManipulator; import com.pinecone.hydra.service.kom.source.ApplicationNodeManipulator; import com.pinecone.hydra.service.kom.source.ServiceInstanceManipulator; import com.pinecone.hydra.service.kom.source.ServiceNamespaceManipulator; import com.pinecone.hydra.service.kom.source.NamespaceRulesManipulator; import com.pinecone.hydra.service.kom.source.ServiceMasterManipulator; import com.pinecone.hydra.service.kom.source.NodeMetaManipulator; import com.pinecone.hydra.service.kom.source.ServiceMetaManipulator; import com.pinecone.hydra.service.kom.source.ServiceNodeManipulator; import com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import com.pinecone.hydra.service.ibatis.ServiceTreeMapper; import com.pinecone.hydra.service.ibatis.ServiceNodeOwnerMapper; import com.pinecone.hydra.service.ibatis.ServicePathCacheMapper; import org.springframework.stereotype.Component; @Component public class ServiceMasterManipulatorImpl implements ServiceMasterManipulator { @Resource @Structure(type = ServiceTreeMapper.class) private ServiceTreeMapper trieTreeManipulator; @Resource @Structure(type = ServiceNodeMetaMapper.class) private NodeMetaManipulator nodeMetaManipulator; @Resource @Structure(type = ApplicationNodeMapper.class) private ApplicationNodeManipulator applicationNodeManipulator; @Resource @Structure(type = AppNodeMetaMapper.class) private ApplicationMetaManipulator applicationMetaManipulator; @Resource @Structure(type = ServiceNodeMapper.class) private ServiceNodeManipulator serviceNodeManipulator; @Resource @Structure(type = ServiceMetaMapper.class) private ServiceMetaManipulator serviceMetaManipulator; @Resource @Structure(type = ServiceNamespaceMapper.class) private ServiceNamespaceManipulator serviceNamespaceManipulator; @Resource @Structure(type = ServiceInstanceMapper.class) private ServiceInstanceManipulator serviceInstanceManipulator; @Resource @Structure(type = NamespaceRulesMapper.class) private NamespaceRulesManipulator namespaceRulesManipulator; @Resource @Structure(type = ServiceNodeOwnerMapper.class) private ServiceNodeOwnerMapper scopeOwnerManipulator; @Resource @Structure(type = ServicePathCacheMapper.class) private ServicePathCacheMapper scopePathManipulator; @Resource( type = ServiceMasterTreeManipulatorImpl.class ) KOISkeletonMasterManipulator skeletonMasterManipulator; @Override public TrieTreeManipulator getTrieTreeManipulator() { return this.trieTreeManipulator; } @Override public NodeMetaManipulator getNodeMetaManipulator() { return this.nodeMetaManipulator; } @Override public ApplicationNodeManipulator getApplicationNodeManipulator() { return this.applicationNodeManipulator; } @Override public ApplicationMetaManipulator getApplicationElementManipulator() { return this.applicationMetaManipulator; } @Override public ServiceNodeManipulator getServiceNodeManipulator() { return this.serviceNodeManipulator; } @Override public ServiceMetaManipulator getServiceMetaManipulator() { return this.serviceMetaManipulator; } @Override public ServiceNamespaceManipulator getNamespaceManipulator() { return this.serviceNamespaceManipulator; } @Override public NamespaceRulesManipulator getNamespaceRulesManipulator() { return this.namespaceRulesManipulator; } @Override public TireOwnerManipulator getTireOwnerManipulator() { return this.scopeOwnerManipulator; } @Override public KOISkeletonMasterManipulator getSkeletonMasterManipulator() { return this.skeletonMasterManipulator; } @Override public ServiceInstanceManipulator getServiceInstanceManipulator() { return this.serviceInstanceManipulator; } } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/drivers/ServiceMasterTreeManipulatorImpl.java ================================================ package com.walnut.sparta.services.drivers; import com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator; import com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator; import com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator; import com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator; import com.pinecone.hydra.service.ibatis.ServiceTreeMapper; import com.pinecone.hydra.service.ibatis.ServiceNodeOwnerMapper; import com.pinecone.hydra.service.ibatis.ServicePathCacheMapper; import org.springframework.stereotype.Component; import javax.annotation.Resource; @Component public class ServiceMasterTreeManipulatorImpl implements TreeMasterManipulator { @Resource ServicePathCacheMapper scopePathManipulator; @Resource ServiceTreeMapper trieTreeManipulator; @Resource ServiceNodeOwnerMapper scopeOwnerManipulator; @Override public TireOwnerManipulator getTireOwnerManipulator() { return this.scopeOwnerManipulator; } @Override public TrieTreeManipulator getTrieTreeManipulator() { return this.trieTreeManipulator; } @Override public TriePathCacheManipulator getTriePathCacheManipulator() { return this.scopePathManipulator; } } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/dto/updateObjectDto.java ================================================ package com.walnut.sparta.services.dto; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.summer.multiparts.MultipartFile; public class updateObjectDto implements Pinenut { private MultipartFile object; private String path; private String volumeGuid; public updateObjectDto() { } public updateObjectDto(MultipartFile object, String path, String volumeGuid) { this.object = object; this.path = path; this.volumeGuid = volumeGuid; } public MultipartFile getObject() { return object; } public void setObject(MultipartFile object) { this.object = object; } public String getPath() { return path; } public void setPath(String path) { this.path = path; } public String getVolumeGuid() { return volumeGuid; } public void setVolumeGuid(String volumeGuid) { this.volumeGuid = volumeGuid; } public String toString() { return "updateObjectDto{object = " + object + ", path = " + path + ", volumeGuid = " + volumeGuid + "}"; } } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/mapper/FakeNews.java ================================================ package com.walnut.sparta.services.mapper; public class FakeNews { } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/pojo/Dummy.java ================================================ package com.walnut.sparta.services.pojo; public class Dummy { } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/service/ServiceNodeService.java ================================================ package com.walnut.sparta.services.service; public interface ServiceNodeService { } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/service/ServiceTreeService.java ================================================ package com.walnut.sparta.services.service; import com.pinecone.framework.util.id.GUID; public interface ServiceTreeService { void addNodeToParent(GUID nodeGUID,GUID parentGUID); void removeNode(GUID nodeGUID); } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/service/serviceImpl/ServiceNodeServiceImpl.java ================================================ package com.walnut.sparta.services.service.serviceImpl; import com.walnut.sparta.services.service.ServiceNodeService; public class ServiceNodeServiceImpl implements ServiceNodeService { } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/service/serviceImpl/ServiceTreeServiceImpl.java ================================================ //package com.walnut.sparta.services.service.serviceImpl; // //import com.pinecone.framework.util.Debug; //import com.pinecone.framework.util.id.GUID; //import com.pinecone.framework.util.uoi.UOI; //import com.pinecone.hydra.service.kom.nodes.ServiceTreeNode; //import com.pinecone.hydra.service.kom.source.ServiceMasterManipulator; //import com.pinecone.hydra.unit.udtt.GUIDDistributedTrieNode; //import com.pinecone.hydra.service.ibatis.ServiceTrieTreeMapper; //import com.walnut.sparta.services.service.ServiceTreeService; //import org.springframework.stereotype.Service; // //import javax.annotation.PostConstruct; //import javax.annotation.Resource; //import java.util.List; // //@Service // //public class ServiceTreeServiceImpl implements ServiceTreeService { // @Resource // private ServiceTrieTreeMapper trieTreeManipulator; // @Resource // private ServiceMasterManipulator serviceMasterManipulator; // // private MetaNodeOperatorProxy metaNodeOperatorProxy; // // @PostConstruct // public void init() { // this.metaNodeOperatorProxy = new MetaNodeOperatorProxy(this.serviceMasterManipulator); // } // // // @Override // public void addNodeToParent(GUID nodeGUID, GUID parentGUID) { // //将节点加入指定位置 // this.trieTreeManipulator.insertOwnedNode(nodeGUID,parentGUID); // //添加后要更新节点路径 // //递归查询所有要更新的节点 // upDateAllPath(nodeGUID); // } // // @Override // public void removeNode(GUID nodeGUID) { // //像文件夹一样删除父文件会连带一起输出子文件 // removeAllNode(nodeGUID); // } // // private void removeAllNode(GUID nodeGUID){ // List childNodes = this.trieTreeManipulator.getChild(nodeGUID); // this.trieTreeManipulator.purge(nodeGUID); // this.trieTreeManipulator.removePath(nodeGUID); // if (childNodes==null) return; // for (GUIDDistributedTrieNode guidDistributedTrieNode :childNodes){ // removeNode(guidDistributedTrieNode.getGuid()); // } // } // // private void upDateAllPath(GUID guid){ // updatePath(guid); // List childNodes = this.trieTreeManipulator.getChild(guid); // Debug.trace("节点"+guid+"的子节点有"+childNodes.toString()); // for(GUIDDistributedTrieNode guidDistributedTrieNode :childNodes){ // if (guidDistributedTrieNode !=null){ // upDateAllPath(guidDistributedTrieNode.getGuid()); // } // } // } // private void updatePath(GUID guid){ // GUIDDistributedTrieNode node = this.trieTreeManipulator.getNode(guid); // String nodeName = getNodeName(node); // String pathString=""; // pathString=pathString+nodeName; // while (node.getParentGUIDs() != null){ // for (GUID parentGUID : node.getParentGUIDs()){ // node = this.trieTreeManipulator.getNode(parentGUID); // nodeName = getNodeName(node); // pathString = nodeName + "." + pathString; // } // } // this.trieTreeManipulator.updatePath(guid,pathString); // } // private String getNodeName(GUIDDistributedTrieNode node){ // UOI type = node.getType(); // ServiceTreeNode newInstance = (ServiceTreeNode)type.newInstance(); // MetaNodeOperator operator = metaNodeOperatorProxy.getOperator(newInstance.getMetaType()); // ServiceTreeNode serviceTreeNode = operator.get(node.getGuid()); // Debug.trace("获取到了节点"+serviceTreeNode); // return serviceTreeNode.getName(); // } //} ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/system/BasicResultResponse.java ================================================ package com.walnut.sparta.system; import java.io.Serializable; import org.springframework.http.HttpStatus; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.json.JSONEncoder; public class BasicResultResponse implements Pinenut, Serializable { private Integer code = HttpStatus.OK.value(); private String msg; //错误信息 private T data; //数据 public static BasicResultResponse success() { BasicResultResponse result = new BasicResultResponse<>(); result.code = HttpStatus.OK.value(); return result; } public static BasicResultResponse successMsg( String msg ) { BasicResultResponse result = new BasicResultResponse<>(); result.msg = msg; result.code = HttpStatus.OK.value(); return result; } public static BasicResultResponse success( T object ) { BasicResultResponse result = new BasicResultResponse<>(); result.data = object; result.code = HttpStatus.OK.value(); return result; } public static BasicResultResponse error( String msg ) { BasicResultResponse result = new BasicResultResponse<>(); result.msg = msg; result.code = HttpStatus.INTERNAL_SERVER_ERROR.value(); return result; } /** * 获取 * @return code */ public Integer getCode() { return this.code; } /** * 设置 * @param code */ public void setCode(Integer code) { this.code = code; } /** * 获取 * @return msg */ public String getMsg() { return this.msg; } /** * 设置 * @param msg */ public void setMsg(String msg) { this.msg = msg; } /** * 获取 * @return data */ public T getData() { return this.data; } /** * 设置 * @param data */ public void setData(T data) { this.data = data; } @Override public String toJSONString() { return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "code" , this.code ), new KeyValue<>( "msg" , this.msg ), new KeyValue<>( "data" , this.data ) } ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Sparta/sparta-core-console/src/main/java/com/walnut/sparta/system/SystemController.java ================================================ package com.walnut.sparta.system; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; @RestController @RequestMapping( "/system" ) public class SystemController { // @GetMapping( "/undefined" ) // public String undefined() { // return "Hello, hi, good afternoon! This is undefined specking!"; // } } ================================================ FILE: Sparta/sparta-core-console/src/main/resources/uid/default-uid-spring.xml ================================================  ================================================ FILE: Sparta/sparta-core-console/src/test/java/UniTrieMaptronTest.java ================================================ import com.pinecone.framework.unit.trie.GenericReparseNode; import com.pinecone.framework.unit.trie.UniTrieMaptron; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import java.util.Map; import java.util.Set; import static org.junit.jupiter.api.Assertions.*; import java.util.AbstractMap; public class UniTrieMaptronTest { private UniTrieMaptron trieMap; @BeforeEach public void setUp() { trieMap = new UniTrieMaptron<>(); } @Test public void testPutAndGet() { trieMap.put("a/b/c", "value1"); trieMap.put("a/b/d", "value2"); assertEquals("value1", trieMap.get("a/b/c")); assertEquals("value2", trieMap.get("a/b/d")); //assertNull(trieMap.get("a/b")); } @Test public void testPutReference() { trieMap.put("a/b/c", "value1"); //trieMap.putReference("ref1", new GenericReparseNode<>("a/b/c",trieMap)); //assertEquals("value1", trieMap.get("ref1")); } @Test public void testContainsKey() { trieMap.put("a/b/c", "value1"); // assertTrue(trieMap.containsKey("a/b/c")); // assertFalse(trieMap.containsKey("a/b")); } @Test public void testContainsValue() { trieMap.put("a/b/c", "value1"); assertTrue(trieMap.containsValue("value1")); assertFalse(trieMap.containsValue("value2")); } @Test public void testRemove() { trieMap.put("a/b/c", "value1"); assertEquals("value1", trieMap.remove("a/b/c")); assertNull(trieMap.get("a/b/c")); assertNull(trieMap.remove("a/b/c")); // Key already removed } @Test public void testPutAll() { Map map = Map.of( "a/b/c", "value1", "x/y/z", "value2" ); trieMap.putAll(map); assertEquals("value1", trieMap.get("a/b/c")); assertEquals("value2", trieMap.get("x/y/z")); } @Test public void testClear() { trieMap.put("a/b/c", "value1"); trieMap.put("x/y/z", "value2"); trieMap.clear(); // assertTrue(trieMap.isEmpty()); // assertNull(trieMap.get("a/b/c")); // assertNull(trieMap.get("x/y/z")); } @Test public void testKeySet() { trieMap.put("a/b/c", "value1"); trieMap.put("x/y/z", "value2"); Set keys = trieMap.keySet(); assertTrue(keys.contains("a/b/c")); assertTrue(keys.contains("x/y/z")); } @Test public void testValues() { trieMap.put("a/b/c", "value1"); trieMap.put("x/y/z", "value2"); assertTrue(trieMap.values().contains("value1")); assertTrue(trieMap.values().contains("value2")); } @Test public void testEntrySet() { // 初始化测试数据 trieMap.put("apple", "fruit"); trieMap.put("banana", "fruit"); trieMap.put("car", "vehicle"); trieMap.put("cat", "animal"); // 获取 entrySet Set> entrySet = trieMap.entrySet(); // 确保 entrySet 的大小与 Trie 中的键值对数量一致 assertEquals(4, entrySet.size(), "EntrySet should contain 4 entries."); // 检查具体的键值对是否正确 assertTrue(entrySet.contains(new AbstractMap.SimpleEntry<>("apple", "fruit")), "EntrySet should contain ('apple', 'fruit')."); assertTrue(entrySet.contains(new AbstractMap.SimpleEntry<>("banana", "fruit")), "EntrySet should contain ('banana', 'fruit')."); assertTrue(entrySet.contains(new AbstractMap.SimpleEntry<>("car", "vehicle")), "EntrySet should contain ('car', 'vehicle')."); assertTrue(entrySet.contains(new AbstractMap.SimpleEntry<>("cat", "animal")), "EntrySet should contain ('cat', 'animal')."); // 移除一个键值对,确保 entrySet 反映了变化 trieMap.remove("cat"); entrySet = trieMap.entrySet(); assertEquals(3, entrySet.size(), "EntrySet should contain 3 entries after removal."); assertFalse(entrySet.contains(new AbstractMap.SimpleEntry<>("cat", "animal")), "EntrySet should not contain ('cat', 'animal') after removal."); } @Test public void testEntrySetIsEmpty() { // 确保在空的 Trie 上 entrySet 为空 Set> entrySet = trieMap.entrySet(); assertTrue(entrySet.isEmpty(), "EntrySet should be empty for a new Trie."); } } ================================================ FILE: Sparta/sparta-core-console/src/test/java/com/ender/TestEnderHydra.java ================================================ package com.ender; import java.util.Map; import java.util.UUID; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.deploy.ibatis.hydranium.DeployMappingDriver; import com.pinecone.hydra.deploy.kom.UniformDeployInstrument; import com.pinecone.hydra.proc.LocalHostedProcess; import com.pinecone.hydra.proc.LocalUProcess; import com.pinecone.hydra.proc.ProcessManager; import com.pinecone.hydra.proc.event.ProcessEvent; import com.pinecone.hydra.proc.event.ProcessEventHandler; import com.pinecone.hydra.proc.image.ArchEntryPointRunnable; import com.pinecone.hydra.proc.image.EntryPointRunnable; import com.pinecone.hydra.proc.image.ExecutionImage; import com.pinecone.hydra.proc.image.GenericClassImage; import com.pinecone.hydra.proc.image.LocalHostedClassImage; import com.pinecone.hydra.registry.GenericKOMRegistry; import com.pinecone.hydra.registry.KOMRegistry; import com.pinecone.hydra.registry.ibatis.hydranium.RegistryMappingDriver; import com.pinecone.hydra.reign.UnixInstitutionalizedMetaImperiumPrivy; import com.pinecone.hydra.storage.mfs.MappingFileSystem; import com.pinecone.hydra.storage.file.external.ExternalFolder; import com.pinecone.hydra.storage.mfs.NativeMFile; import com.pinecone.hydra.storage.mfs.NativeMappingFileSystem; import com.pinecone.hydra.system.imperium.KernelObjectRootMountPoint; import com.pinecone.hydra.system.imperium.KernelRootMountPoint; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.system.ko.kom.ExpressInstrument; import com.pinecone.hydra.unit.imperium.entity.EntityNode; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.pinecone.ulf.util.guid.i128.GUID128; import com.pinecone.ulf.util.guid.i128.GuidAllocator128V7; import com.walnut.archcraft.ender.EnderHydra; import com.walnut.odin.task.GenericRavenTaskConfig; import com.walnut.odin.task.RavenTaskInstrument; import com.walnut.odin.task.mapper.OdinUniformTaskMappingDriver; class Floki extends EnderHydra { public Floki( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Floki( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { UnixInstitutionalizedMetaImperiumPrivy privy = new UnixInstitutionalizedMetaImperiumPrivy( this, null ); ExpressInstrument instrument = privy.getExpressInstrument(); this.prepareKOMTrees( instrument ); MappingFileSystem mappingFileSystem = new NativeMappingFileSystem( "E:/" ); //MappingFileSystem mappingFileSystem = new NativeMappingFileSystem( "/" ); instrument.directMount( KernelRootMountPoint.Mount.getMountPoint() + "/volE", mappingFileSystem); this.testSimple( instrument ); // this.testProcess( instrument ); } private void testProcess( ExpressInstrument instrument ) throws Exception{ ProcessManager manager = this.processManager(); //instrument.mount( KernelRootMountPoint.Process.getMountPoint(), manager ); ProcessEventHandler eventHandler = new ProcessEventHandler() { @Override public void fired( EntryPointRunnable runnable, ProcessEvent event ) { Debug.bluef( runnable, event ); } }; ExecutionImage image = new LocalHostedClassImage( "gay", new ArchEntryPointRunnable( eventHandler ) { @Override public int main( Map args ) { Debug.greenfs( "Hello, hi, I am `" + this.ownedProcess().getName() + "`!" ); Debug.greenfs( this.ownedProcess().getPID() ); Debug.greenfs( this.ownedProcess().getLocalPID() ); Debug.greenfs( this.ownedProcess().getEnvironmentVariables() ); Debug.greenfs( this.ownedProcess().getStartupArguments() ); Debug.bluef( this.ownedProcess().getControllableLevel() ); Debug.bluef( this.ownedProcess().getOwnedProcessManager() ); Debug.greenfs( this.ownedProcess().parentProcess() ); return 0; } }, manager ); LocalUProcess process = manager.createLocalHostedProcess( image, null, Map.of( "fuck", new String[]{ "you", "she", "he", "it" } ) ); Debug.redfs( manager.fetchProcesses() ); this.getServgramOrchestrator().add(process); process.start(); this.getServgramOrchestrator().syncWaitingTerminated(); Debug.redfs( manager.fetchProcesses() ); } private void prepareKOMTrees( ExpressInstrument instrument ) { OdinUniformTaskMappingDriver categoryMappingDriver = new OdinUniformTaskMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); RavenTaskInstrument ravenTaskInstrument = new RavenTaskInstrument( categoryMappingDriver, new GenericRavenTaskConfig() ); KOIMappingDriver koiMappingDriver = new RegistryMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); KOMRegistry registry = new GenericKOMRegistry( koiMappingDriver ); DeployMappingDriver deployMappingDriver = new DeployMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); UniformDeployInstrument deployInstrument = new UniformDeployInstrument( deployMappingDriver ); instrument.mount( KernelObjectRootMountPoint.TaskMeta.getMountPoint(), ravenTaskInstrument ); instrument.mount( KernelObjectRootMountPoint.Registry.getMountPoint(), registry ); instrument.mount( KernelObjectRootMountPoint.DeployMeta.getMountPoint(), deployInstrument ); } private void testSimple( ExpressInstrument instrument ) { EntityNode entityNode = instrument.queryNode( "meta/task/test/job/task" ); //Debug.fmp( 2, entityNode ); Debug.fmp( 2, instrument.querySystemKernelObjectPath( entityNode.getGuid() ) ); Debug.fmp( 2, instrument.getMountedInstrument( "meta/task" ) ); Debug.greenfs( instrument.fetchOwnMappingPath() ); Debug.fmp( 2, instrument.queryNode( "conf/registry/game3a/witcher/people/s4/urge" ) ); Debug.fmp( 2, instrument.queryNode( "conf/registry/game3a/witcher/people/s4/urge" ) ); Debug.fmp( 2, instrument.queryNode( "/dev/deploy/root/test/cluster/vm1" ) ); //EntityNode myf = instrument.queryNode( "mnt/volE/Users" ); EntityNode myf = instrument.queryNode( "/mnt/volE/MyFiles" ); Debug.fmp( 2, myf ); NativeMFile myff = (NativeMFile) myf; Debug.fmp( 2, myff.listFiles() ); } } public class TestEnderHydra { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ Floki loki = (Floki) Pinecone.sys().getTaskManager().add( new Floki( args, Pinecone.sys() ) ); loki.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-core-console/src/test/java/com/rpc/TestGrpcService.java ================================================ package com.rpc; import com.acorn.redqueen.service.conduct.RedCollectiveServiceRegiment; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.hydra.grpc.client.GrpcAppointClient; import com.pinecone.hydra.grpc.client.GrpcClientConfig; import com.pinecone.hydra.grpc.server.GrpcAppointServer; import com.pinecone.hydra.grpc.server.GrpcServerConfig; import com.pinecone.hydra.service.ibatis.hydranium.ServiceMappingDriver; import com.pinecone.hydra.service.kom.UniformServiceInstrument; import com.pinecone.hydra.service.registry.grpc.client.GrpcServiceClient; import com.pinecone.hydra.service.registry.grpc.server.GrpcServiceAppointServer; import com.pinecone.hydra.service.registry.server.ServiceMetaManipulationIface; import com.pinecone.hydra.service.registry.server.UniformServiceManager; import com.pinecone.hydra.service.registry.client.HuskyServiceClient; import com.pinecone.hydra.service.registry.dto.ServiceMetaDTO; import com.pinecone.hydra.service.registry.ulf.HuskyServiceAppointServer; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.uma.DuplexAppointClient; import com.pinecone.hydra.uma.HuskyDuplexExpress; import com.pinecone.hydra.uma.wolf.WolvesAppointServer; import com.pinecone.hydra.umc.wolf.client.UlfClient; import com.pinecone.hydra.umc.wolf.client.WolfMCClient; import com.pinecone.hydra.umc.wolf.server.WolfMCServer; import com.pinecone.tritium.Tritium; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.pinecone.ulf.util.guid.i64.GuidAllocator72V2; import java.util.List; class Brian extends Tritium { public Brian( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Brian( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } public void vitalize () throws Exception { KOIMappingDriver koiMappingDriver = new ServiceMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); UniformServiceInstrument servicesTree = new UniformServiceInstrument( koiMappingDriver ); UniformServiceManager serviceManager = new UniformServiceManager( servicesTree ); GrpcServiceAppointServer grpcServer = new GrpcServiceAppointServer( new GrpcAppointServer( new GrpcServerConfig( new JSONMaptron( "{ port: 5888 }" ) )) ); serviceManager.hookAppointServer(grpcServer); RedCollectiveServiceRegiment serviceRegiment = new RedCollectiveServiceRegiment(this, servicesTree, serviceManager); serviceRegiment.startServiceManage(); GrpcServiceClient client = new GrpcServiceClient( new GrpcAppointClient( new GuidAllocator72V2().nextGUIDi64(), new GrpcClientConfig( new JSONMaptron( "{ host: 'localhost', port: 5888 }" ) ) ), servicesTree.getGuidAllocator() ); client.startService(); testUniformServiceRegister_Proactive(client, serviceManager); } public static void testUniformServiceRegister_Proactive( GrpcServiceClient client, UniformServiceManager serviceManager ) throws Exception { ServiceMetaDTO meta = client.getMetaManipulation().queryServiceMetaByPath("root/test/app/ser"); Debug.bluef("Meta: " + meta); String guid = client.getMetaManipulation().evalCreationStatement( "{ root: { test: { app: { metaType: ApplicationElement } } } }"); Debug.bluef("Creation GUID: " + guid); meta = client.getMetaManipulation().queryServiceMetaByPath("root/test/app/test1"); Debug.greenfs( meta ); client.registerService( client.getGuidAllocator().parse(meta.getGuid()), null ); List serviceMetaDTOS = client.getMetaManipulation().fetchServiceInsMetaByServiceId(meta.getGuid()); Debug.bluefs( serviceMetaDTOS ); client.getAppointNodus().close(); } public void vitalize1 () throws Exception { KOIMappingDriver koiMappingDriver = new ServiceMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); UniformServiceInstrument servicesTree = new UniformServiceInstrument( koiMappingDriver ); WolfMCServer wolfKing = new WolfMCServer( "", this, new JSONMaptron("{host: \"0.0.0.0\",\n" + "port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}") ); UniformServiceManager serviceManager = new UniformServiceManager( servicesTree ); serviceManager.hookAppointServer( new HuskyServiceAppointServer( new WolvesAppointServer( wolfKing, HuskyDuplexExpress.class ) )); RedCollectiveServiceRegiment serviceRegiment = new RedCollectiveServiceRegiment(this, servicesTree, serviceManager); serviceRegiment.startServiceManage(); UlfClient ulfClient = new WolfMCClient( new GuidAllocator72V2().nextGUIDi64(), "", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( "Messagers.Messagers.WolfMCKingpin" ) ); HuskyServiceClient managerClient = new HuskyServiceClient( ulfClient, servicesTree.getGuidAllocator() ); managerClient.startService(); this.testUniformServiceRegister_Proactive( managerClient ); //this.oldTest( servicesTree ); } public void testUniformServiceRegister_Proactive( HuskyServiceClient managerClient ) throws Exception { DuplexAppointClient client = managerClient.getAppointNodus(); ServiceMetaManipulationIface metaIface = client.getIface(ServiceMetaManipulationIface.class); ServiceMetaDTO meta = metaIface.queryServiceMetaByPath( "root/test/app/ser" ); Debug.greenfs( meta ); String guid = metaIface.evalCreationStatement( "{ root: { test: { app: { metaType: ApplicationElement, alias:as, services: { test1: { metaType: ServiceElement, type: Microservice } } } } } }" ); ServiceMetaDTO meta1 = metaIface.queryServiceMetaByPath( "root/test/app/test1" ); Debug.greenfs( meta1 ); managerClient.registerService( managerClient.getGuidAllocator().parse(meta1.getGuid()), null ); List serviceMetaDTOS = metaIface.fetchServiceInsMetaByServiceId( meta1.getGuid() ); Debug.bluefs( serviceMetaDTOS ); //managerClient.deregister(); client.close(); //Debug.trace(iface.hasOwnedServiceByServiceId( "181e9e6-000395-0000-94" )); } } public class TestGrpcService { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ Brian brian = (Brian) Pinecone.sys().getTaskManager().add( new Brian( args, Pinecone.sys() ) ); brian.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-core-console/src/test/java/com/sparta/TestAccount.java ================================================ package com.sparta; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.account.UniformAccountManager; import com.pinecone.hydra.account.entity.GenericAccount; import com.pinecone.hydra.account.entity.GenericDomain; import com.pinecone.hydra.account.entity.GenericGroup; import com.pinecone.hydra.account.ibatis.hydranium.UserMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.pinecone.tritium.Tritium; class Geralt extends Tritium { public Geralt(String[] args, CascadeSystem parent) { this(args, null, parent); } public Geralt(String[] args, String szName, CascadeSystem parent) { super(args, szName, parent); } @Override public void vitalize() throws Exception { KOIMappingDriver koiMappingDriver = new UserMappingDriver( this, (IbatisClient) this.getMiddlewareDirector().getRDBManager().getRDBClientByName("MySQLKingHydranium"), this.getDispenserCenter() ); UniformAccountManager uniformAccountManager = new UniformAccountManager( koiMappingDriver ); //this.testInsert( uniformAccountManager ); this.testQuery( uniformAccountManager ); } public void testInsert( UniformAccountManager uniformAccountManager ){ GenericDomain genericDomain = new GenericDomain(); genericDomain.setName("用户域"); GenericGroup genericGroup = new GenericGroup(); genericGroup.setName("用户组"); GenericAccount genericAccount = new GenericAccount(); genericAccount.setName("用户"); uniformAccountManager.put( genericAccount ); uniformAccountManager.put( genericGroup ); uniformAccountManager.put( genericDomain ); uniformAccountManager.addChildren( genericDomain.getGuid(), genericGroup.getGuid() ); uniformAccountManager.addChildren( genericGroup.getGuid(), genericAccount.getGuid() ); } public void testQuery( UniformAccountManager uniformAccountManager ){ Debug.trace(uniformAccountManager.get(uniformAccountManager.queryGUIDByPath( "用户域/用户组/用户" ))); } } public class TestAccount { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ Geralt Geralt = (Geralt) Pinecone.sys().getTaskManager().add( new Geralt( args, Pinecone.sys() ) ); Geralt.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-core-console/src/test/java/com/sparta/TestBucket.java ================================================ package com.sparta; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.hydra.bucket.ibatis.hydranium.BucketMappingDriver; import com.pinecone.hydra.storage.bucket.TitanBucketInstrument; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.pinecone.tritium.Tritium; class Ken extends Tritium { public Ken( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Ken( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { KOIMappingDriver koiMappingDriver = new BucketMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); TitanBucketInstrument bucketInstrument = new TitanBucketInstrument( koiMappingDriver ); } } public class TestBucket { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ Ken Ken = (Ken) Pinecone.sys().getTaskManager().add( new Ken( args, Pinecone.sys() ) ); Ken.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-core-console/src/test/java/com/sparta/TestDeployTree.java ================================================ package com.sparta; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.hydra.deploy.PhysicalHost; import com.pinecone.hydra.deploy.entity.GenericPhysicalHost; import com.pinecone.hydra.deploy.ibatis.hydranium.DeployMappingDriver; import com.pinecone.hydra.deploy.kom.UniformDeployInstrument; import com.pinecone.hydra.deploy.kom.entity.GenericPhysicalHostElement; import com.pinecone.hydra.deploy.kom.entity.GenericVirtualMachineElement; import com.pinecone.hydra.deploy.kom.entity.PhysicalHostElement; import com.pinecone.hydra.deploy.kom.marshaling.DeployJSONDecoder; import com.pinecone.hydra.service.ibatis.hydranium.ServiceMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.pinecone.tritium.Tritium; import com.pinecone.ulf.util.guid.GUIDs; import com.pinecone.ulf.util.guid.i128.GuidAllocator128; import com.walnut.archcraft.ender.EnderHydra; class Randon extends EnderHydra { public Randon( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Randon( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { KOIMappingDriver koiMappingDriver = new DeployMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); UniformDeployInstrument deployInstrument = new UniformDeployInstrument( koiMappingDriver ); this.testGet( deployInstrument ); } private void testInsert(UniformDeployInstrument instrument) { /* String jsonConfig = "{" + "name: 'dataSyncJob', " + "description: 'Synchronize DB records between clusters', " + "extraInformation: 'retries=3; timeout=5000ms', " + "enable: false" + "}";*/ String jsonConfig = "{" + "name: 'Spark', " + "description: 'Track server health metrics in real-time', " + "extraInformation: 'interval=60s; alertThreshold=90%', " + "enable: false, " + "}"; /*Debug.trace(instrument.queryGUIDByPath("specialTask"));*/ } private void testGet( UniformDeployInstrument instrument ){ /* DeployJSONDecoder decoder = new DeployJSONDecoder( instrument ); decoder.decode( new JSONMaptron( "{ root: { test: { cluster: { metaType: ClusterElement, type:Physic, deployments: { vm1: { metaType: VirtualMachineElement, ipAddress: 192.168.1.1, status: 12222s } } } } } }" ) ); Debug.fmp( 2, instrument.queryElement( "root" ).toJSONObject() ); Debug.greenfs( instrument.queryElement( "root/test/cluster/vm1" ) );*/ PhysicalHostElement physicalHost = new GenericPhysicalHostElement(); physicalHost.setName("testPhysicalHost"); physicalHost.setIpAddress("127.0.0.1"); physicalHost.setHardwareSpecs("Intel i7-7700HQ"); physicalHost.setLocalDomain("localhost"); physicalHost.setWideDomain("wideDomain"); physicalHost.setStatus("OK"); physicalHost.setEnable(true); instrument.put(physicalHost); //Debug.trace(deployInstrument.getPath( GUIDs.GUID72("181e9e4-000395-0000-d4") )); } private void testUpdate( UniformDeployInstrument instrument ) { GenericVirtualMachineElement virtualMachine = new GenericVirtualMachineElement(); virtualMachine.setName("testVirtualMachine08"); virtualMachine.setIpAddress("127.0.0.9"); virtualMachine.setStatus("OK"); virtualMachine.setEnable(true); virtualMachine.setDescription("testVirtualMachine009"); virtualMachine.setMetaGuid(GUIDs.GUID128("2261a1a-000377-0000-78")); virtualMachine.setGuid(GUIDs.GUID128("24e2fc4-00016c-0000-dc")); virtualMachine.setAffiliateHostGuid(GUIDs.GUID128("2261a1a-000377-0000-75")); instrument.update( virtualMachine ); } private void testInsertPhysicalHost(UniformDeployInstrument instrument) { /* GenericPhysicalHostElement physicalHost = new GenericPhysicalHostElement(); physicalHost.setName("testPhysicalHost"); physicalHost.setIpAddress("127.0.0.1"); physicalHost.setHardwareSpecs("Intel i7-7700HQ"); physicalHost.setStatus("OK"); physicalHost.setLocalDomain("testDomain"); //physicalHost.setGuid( GUIDs.GUID72("1b05246-0002cc-0001-f1")); *//* instrument.newPhysicalHost(physicalHost);*//* instrument.put( physicalHost ); Debug.info( "physicalHost: " + physicalHost); */ /* GenericQuickElement quickElement = new GenericQuickElement(); quickElement.setTypeName("testQuickElement009"); Debug.trace(instrument.put(quickElement)) ;*/ //测试quick /* Debug.trace(instrument.get(GUIDs.GUID72("2508594-0002eb-0000-c0"))) ; //测试virtualElement Debug.trace(instrument.get(GUIDs.GUID72("24e2fc4-00016c-0000-dc"))) ; //测试physicalHost Debug.trace(instrument.get(GUIDs.GUID72("2511a12-0003bb-0001-d0"))) ;*/ /* GenericPhysicalHostElement physicalHost = new GenericPhysicalHostElement( instrument); physicalHost.setName("testPhysicalHost"); physicalHost.setIpAddress("127.0.0.1"); physicalHost.setHardwareSpecs("Intel i7-7700HQ"); physicalHost.setStatus("OK"); physicalHost.setLocalDomain("testDomain"); instrument.put( physicalHost );*/ /* Debug.trace(instrument.get(GUIDs.GUID72("2508b12-000080-0000-58"))) ; GenericNamespace namespace = new GenericNamespace(); namespace.setName("testNamespace"); namespace.setDescription("testNamespace"); namespace.setExtraInformation("testNamespace"); instrument.put( namespace );*/ /* TreeNode roodNode = instrument.queryElement("testNamespace" ); Debug.greenfs("根节点信息: " + roodNode.evinceTreeNode().toJSONString()); GenericQuickElement taskElement = new GenericQuickElement( new JSONMaptron("{ name: '特殊服务9', parentGuid: '" + roodNode.getGuid() + "' }") ); instrument.put(taskElement);*/ /* Debug.trace( instrument.( GUIDs.GUID72("250e136-0002ab-0001-bc")) ); */ /* TreeNode root = instrument.queryElement("testNamespace"); Debug.trace(instrument.getChildren(root.getGuid())) ;*/ /*Debug.fmp(2, "完整树结构:\n" + root.toJSONString());*/ /* Debug.trace(instrument.remove();) */ /* GenericQuickElement quickElement = new GenericQuickElement(); quickElement.setName("testQuickElement"); quickElement.setTypeName("testQuickElement01"); quickElement.setDescription("testQuickElement02"); instrument.put( quickElement );*/ /* Debug.trace( instrument.affirmQuick("testQuickElement") ); ElementNode quickElement = instrument.queryElement("testQuickElement"); Debug.trace(quickElement);*/ } private void testInsertVirtualMachine( UniformDeployInstrument instrument ) { GenericVirtualMachineElement virtualMachine = new GenericVirtualMachineElement(); virtualMachine.setName("testVirtualMachine01"); virtualMachine.setIpAddress("127.0.0.5"); virtualMachine.setStatus("OK"); virtualMachine.setEnable(true); virtualMachine.setDescription("testVirtualMachine"); virtualMachine.setMetaGuid(GUIDs.GUID128("2261a1a-000377-0000-78")); virtualMachine.setGuid(GUIDs.GUID128("2261a1a-000377-0000-76")); virtualMachine.setAffiliateHostGuid(GUIDs.GUID128("2261a1a-000377-0000-75")); instrument.put( virtualMachine ); /* GenericVirtualMachineElement virtualMachine = new GenericVirtualMachineElement(); virtualMachine = (GenericVirtualMachineElement)instrument.get( GUIDs.GUID72("24e2fc4-00016c-0000-dc")); Debug.trace(virtualMachine);*/ instrument.get(GUIDs.GUID128("24b1e50-000044-0000-50")); /* GenericVirtualMachine virtualMachine = new GenericVirtualMachine(); virtualMachine.setName("VirtualMachine1"); virtualMachine.setIpAddress("192.168.1.1"); virtualMachine.setStatus("OK"); //virtualMachine.setGuid( GUIDs.GUID72("1b05246-0002cc-0001-f2")); //virtualMachine.setAffiliateHostGuid(GUIDs.GUID72("1b05246-0002cc-0001-f3")); instrument.newVirtualMachine(virtualMachine); Debug.info( "virtualMachine: " + virtualMachine); */ /* GenericVirtualMachineElement virtualMachine = new GenericVirtualMachineElement(); virtualMachine.setName("VirtualMachine1"); virtualMachine.setIpAddress("192.168.1.1"); virtualMachine.setStatus("OK"); virtualMachine.setEnabled(true); virtualMachine.setExtraInformation("extraInformation"); virtualMachine.setDescription("description"); virtualMachine.setGuid(GUIDs.GUID72( "1b05246-0002cc-0001-f3")); virtualMachine.setMetaGuid(GUIDs.GUID72("1b05246-0002cc-0001-f4")); instrument.put(virtualMachine); Debug.trace(virtualMachine.toJSONString());*/ } } public class TestDeployTree { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ Randon Jesse = (Randon) Pinecone.sys().getTaskManager().add( new Randon( args, Pinecone.sys() ) ); Jesse.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-core-console/src/test/java/com/sparta/TestInnerTree.java ================================================ package com.sparta; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.scenario.ibatis.hydranium.ScenarioMappingDriver; import com.pinecone.hydra.scenario.tree.DistributedScenarioMetaTree; import com.pinecone.hydra.scenario.tree.GenericDistributedScenarioMetaTree; import com.pinecone.hydra.service.ibatis.hydranium.ServiceMappingDriver; import com.pinecone.hydra.service.kom.UniformServiceInstrument; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.pinecone.ulf.util.guid.GUIDs; import com.pinecone.tritium.Tritium; class LadyGaga extends Tritium { public LadyGaga( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public LadyGaga( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { // Sparta sparta = new Sparta( "Sparta", this ); // sparta.execute(); // // Thread shutdowner = new Thread(()->{ // Debug.sleep( 5000 ); // sparta.terminate(); // }); // //shutdowner.start(); // // this.getTaskManager().add( sparta ); // this.getTaskManager().syncWaitingTerminated(); } private void testService(){ KOIMappingDriver koiMappingDriver = new ServiceMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); UniformServiceInstrument distributedScopeServiceTree = new UniformServiceInstrument(koiMappingDriver); //Debug.trace(distributedScopeServiceTree.getNode(GUIDs.GUID72("f83ccfc-0002f9-0000-b4")).toString()); Debug.trace(distributedScopeServiceTree.getPath(GUIDs.GUID128("f83ccfc-0002f9-0000-b4"))); } private void testScenario(){ KOIMappingDriver koiMappingDriver = new ScenarioMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); DistributedScenarioMetaTree distributedScenarioMetaTree = new GenericDistributedScenarioMetaTree(koiMappingDriver); // GenericNamespaceNode genericNamespaceNode = new GenericNamespaceNode(); // genericNamespaceNode.setName("瘟疫公司"); // genericNamespaceNode.setNamespaceNodeMeta(new GenericNamespaceNodeMeta()); // genericNamespaceNode.setScenarioCommonData(new GenericScenarioCommonData()); // distributedScenarioMetaTree.insert(genericNamespaceNode); distributedScenarioMetaTree.get(GUIDs.GUID128("1f5bced8-000315-0002-70")); } } public class TestInnerTree { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ LadyGaga ladyGaga = (LadyGaga) Pinecone.sys().getTaskManager().add( new LadyGaga( args, Pinecone.sys() ) ); ladyGaga.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-core-console/src/test/java/com/sparta/TestKOMKing.java ================================================ package com.sparta; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.storage.mfs.MappingFileSystem; import com.pinecone.hydra.storage.mfs.NativeMappingFileSystem; import com.pinecone.hydra.system.ko.runtime.KernelExpressInstrument; import com.pinecone.hydra.system.ko.runtime.GenericRuntimeInstrumentConfig; import com.pinecone.hydra.unit.imperium.entity.EntityNode; import com.pinecone.tritium.Tritium; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.walnut.odin.task.GenericRavenTaskConfig; import com.walnut.odin.task.RavenTaskInstrument; import com.walnut.odin.task.mapper.OdinUniformTaskMappingDriver; class Loki extends Tritium { public Loki( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Loki( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { OdinUniformTaskMappingDriver categoryMappingDriver = new OdinUniformTaskMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); RavenTaskInstrument ravenTaskInstrument = new RavenTaskInstrument( categoryMappingDriver, new GenericRavenTaskConfig() ); KernelExpressInstrument kernelExpressInstrument = new KernelExpressInstrument( this, "", new GenericRuntimeInstrumentConfig()); kernelExpressInstrument.setTargetingName("task1"); kernelExpressInstrument.mount( "task1/afc", ravenTaskInstrument ); MappingFileSystem mappingFileSystem = new NativeMappingFileSystem( "E:/" ); kernelExpressInstrument.directMount( "direct/test", mappingFileSystem); this.testSimple( kernelExpressInstrument ); } private void testSimple( KernelExpressInstrument instrument ) { EntityNode entityNode = instrument.queryNode( "direct/test/MyFiles" ); Debug.trace( entityNode ); //Debug.fmp( 2, entityNode ); // Debug.fmp( 2, instrument.querySystemKernelObjectPath( entityNode.getGuid() ) ); // // Debug.fmp( 2, instrument.getMountedInstrument( "task1/afc" ) ); } } public class TestKOMKing { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ Loki loki = (Loki) Pinecone.sys().getTaskManager().add( new Loki( args, Pinecone.sys() ) ); loki.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-core-console/src/test/java/com/sparta/TestLayer.java ================================================ package com.sparta; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.layer.ibatis.hydranium.LayerMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.unit.vgraph.layer.AtlasLayer; import com.pinecone.hydra.unit.vgraph.layer.AtlasLayerNamespace; import com.pinecone.hydra.unit.vgraph.layer.VLayerInstrument; import com.pinecone.tritium.Tritium; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.pinecone.ulf.util.guid.GUIDs; import java.util.ArrayList; class Louis extends Tritium { public Louis( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Louis( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { KOIMappingDriver koiMappingDriver = new LayerMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); VLayerInstrument vLayerManager = new VLayerInstrument(koiMappingDriver); this.testQuery(vLayerManager); //this.testInsert( vLayerManager ); } public void testInsert(VLayerInstrument vLayerManager) { // AtlasLayer atlasLayer = new AtlasLayer(); // atlasLayer.setName("图层1"); // ArrayList sourceGuids = new ArrayList<>(); // sourceGuids.add( GUIDs.GUID128("01972f7e-1347-7ef4-bdbb-efdc52b7ddf4") ); // sourceGuids.add( GUIDs.GUID128("01972f7e-15c0-72b8-ad49-41fa8027ca32") ); // sourceGuids.add( GUIDs.GUID128("01972f7e-15cd-7dcf-8884-88a587ec2c4e") ); // sourceGuids.add( GUIDs.GUID128("01972f7e-15d9-7565-8ca0-040644fd4493") ); // sourceGuids.add( GUIDs.GUID128("01972f7e-15e4-73be-863f-02fee71bdc6b") ); // sourceGuids.add( GUIDs.GUID128("01972f7e-15ee-71a3-8110-7b61c273e7c7") ); // atlasLayer.setSourceGuids( sourceGuids ); // atlasLayer.setSourceGuids( sourceGuids ); // // ArrayList sinkGuids = new ArrayList<>(); // sinkGuids.add( GUIDs.GUID128("01972f7e-164e-7f80-8e67-a22060a3afd7") ); // atlasLayer.setSinkGuids( sinkGuids ); // // AtlasLayer atlasLayer = new AtlasLayer(); // atlasLayer.setName("图层11"); // // ArrayList sourceGuids = new ArrayList<>(); // sourceGuids.add(GUIDs.GUID128("01972f7e-1347-7ef4-bdbb-efdc52b7ddf4")); // sourceGuids.add(GUIDs.GUID128("01972f7e-15c0-72b8-ad49-41fa8027ca32")); // // atlasLayer.setSourceGuids( sourceGuids ); // // ArrayList sinkGuids = new ArrayList<>(); // sinkGuids.add(GUIDs.GUID128("01972f7e-164e-7f80-8e67-a22060a3afd7")); // atlasLayer.setSinkGuids(sinkGuids); AtlasLayer atlasLayer = new AtlasLayer(); atlasLayer.setName("图层12"); ArrayList sourceGuids = new ArrayList<>(); sourceGuids.add(GUIDs.GUID128("01972f7e-15cd-7dcf-8884-88a587ec2c4e")); sourceGuids.add(GUIDs.GUID128("01972f7e-15d9-7565-8ca0-040644fd4493")); sourceGuids.add(GUIDs.GUID128("01972f7e-15e4-73be-863f-02fee71bdc6b")); sourceGuids.add(GUIDs.GUID128("01972f7e-15ee-71a3-8110-7b61c273e7c7")); atlasLayer.setSourceGuids( sourceGuids ); ArrayList sinkGuids = new ArrayList<>(); sinkGuids.add(GUIDs.GUID128("01972f7e-164e-7f80-8e67-a22060a3afd7")); atlasLayer.setSinkGuids( sinkGuids ); vLayerManager.put(atlasLayer); // AtlasLayerNamespace atlasLayerNamespace = new AtlasLayerNamespace(); // atlasLayerNamespace.setName("这是测试命名空间"); // vLayerManager.put( atlasLayerNamespace ); //vLayerManager.addChild( GUIDs.GUID128("2261a1a-000377-0000-78"), GUIDs.GUID128("2261524-000394-0001-fc") ); } public void testQuery( VLayerInstrument vLayerManager ) { // todo 这种情况使用路径取不出 //Debug.trace(vLayerManager.queryGUIDByPath( "图层1/图层11" )); } } public class TestLayer { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ Louis louis = (Louis) Pinecone.sys().getTaskManager().add( new Louis( args, Pinecone.sys() ) ); louis.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-core-console/src/test/java/com/sparta/TestQueue.java ================================================ package com.sparta; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.queue.ibatis.hydranium.QueueMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.unit.iqueue.ConfigurableMegaDeflectPriorityQueueMeta; import com.pinecone.hydra.unit.iqueue.MagnitudeDPQueue; import com.pinecone.hydra.unit.iqueue.ArchQueueTableMeta; import com.pinecone.hydra.unit.iqueue.MegaDeflectPriorityQueueMeta; import com.pinecone.hydra.unit.iqueue.entity.GenericQueueElement; import com.pinecone.tritium.Tritium; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.pinecone.ulf.util.guid.GUIDs; class Chris extends Tritium { public Chris( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Chris( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { KOIMappingDriver koiMappingDriver = new QueueMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); MegaDeflectPriorityQueueMeta queueTableMeta = new ConfigurableMegaDeflectPriorityQueueMeta(); queueTableMeta.setQueueTableName( "hydra_queue_nodes" ); MagnitudeDPQueue dpQueue = new MagnitudeDPQueue(koiMappingDriver, 6L, "segment_name", "测试队列", queueTableMeta); this.testInsert( dpQueue ); //this.testQuery( dpQueue ); } public void testInsert( MagnitudeDPQueue dpQueue ) { GenericQueueElement element = new GenericQueueElement(); element.setObjectGuid(GUIDs.GUID128("22989c2-000225-0000-4c")); element.setPriority(2); dpQueue.pushBack( element ); } public void testQuery( MagnitudeDPQueue dpQueue ) { Debug.trace("目前的队列是否为空:" + dpQueue.isEmpty()); for( int i = 0; i < 3; i++ ) { Debug.trace( "输出队列头数据:" +dpQueue.popFront() + "目前的位置是:" + dpQueue.currentPosition() ); Debug.trace( "目前队列的size是:" + dpQueue.size() ); } } } public class TestQueue { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ Chris chris = (Chris) Pinecone.sys().getTaskManager().add( new Chris( args, Pinecone.sys() ) ); chris.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-core-console/src/test/java/com/sparta/TestRegistry.java ================================================ package com.sparta; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.hydra.registry.KOMRegistry; import com.pinecone.hydra.registry.GenericKOMRegistry; import com.pinecone.hydra.registry.entity.ElementNode; import com.pinecone.hydra.registry.entity.Properties; import com.pinecone.hydra.registry.ibatis.hydranium.RegistryMappingDriver; import com.pinecone.hydra.registry.marshaling.RegistryJSONDecoder; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.tritium.Tritium; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; class StanMarsh extends Tritium { public StanMarsh( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public StanMarsh( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { KOIMappingDriver koiMappingDriver = new RegistryMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); KOMRegistry registry = new GenericKOMRegistry( koiMappingDriver ); //this.testBasicInsert( registry ); //this.testDeletion( registry ); //this.testDataExtends( registry ); //this.testHardLink( registry ); //this.testCopy( registry ); //this.testMove( registry ); //this.testMisc( registry ); this.testSelector( registry ); //this.testAttributes( registry ); //this.testMarshaling( registry ); } private void testBasicInsert( KOMRegistry registry ) { registry.putProperties( "game/minecraft/wizard1", new JSONMaptron( "{ name:ken, age:22, species:human, job:wizard }" ) ); registry.putProperties( "game/minecraft/sorcerer1", new JSONMaptron( "{ name:dragonking, hp:666, species:dragon, job:sorcerer }" ) ); registry.putProperties( "game/terraria/mob1", new JSONMaptron( "{ name:lural, age:666, species:cthulhu, job:mob }" ) ); registry.putProperties( "game/witcher/mob1", new JSONMaptron( "{ name:witcher_mob1, age:-789, species:'undefined', job:mob }" ) ); registry.putProperties( "game/witcher/mob2", new JSONMaptron( "{ name:wxsdw, age:666, species:cthulhu, job:mob }" ) ); registry.putProperties( "game/witcher/mob3", new JSONMaptron( "{ name:mob3, age:661, species:cthulhu2, job:mob2 }" ) ); registry.putProperties( "game/witcher/people/xxx", new JSONMaptron( "{ name:xxxx, age:999, species:elf, job:warrior }" ) ); registry.putProperties( "game/witcher/people/xx2", new JSONMaptron( "{ name:xxx2, age:992, species:elf, job:warrior }" ) ); registry.putProperties( "game3a/witcher/people/s4/urge", new JSONMaptron( "{ name:darkurge, age:996, species:dragon, job:warrior }" ) ); registry.putTextValue( "game/witcher/jesus", "JSONObject", "{k:p}" ); } private void testDeletion( KOMRegistry registry ) { registry.remove( "game" ); registry.remove("game3a"); // registry.remove( "game/witcher" ); // registry.remove( "game/minecraft" ); // registry.remove("game/terraria"); // registry.remove("game/witcher"); //Debug.fmp( 2, registry.getProperties( registry.queryGUIDByFN( "game.witcher.mob3" ) ).getValue( "name" ) ); //Debug.fmp( 2, registry.get( registry.queryGUIDByFN( "game3a" ) ) ); //registry.remove( "game" ); // registry.affirmProperties( "泰拉瑞亚.灾厄.至尊灾厄" ); // registry.remove( "泰拉瑞亚.灾厄.至尊灾厄" ); // Debug.fmp( 4, registry.getProperties( "泰拉瑞亚.灾厄.至尊灾厄" ).toJSONObject() ); } private void testMove( KOMRegistry registry ) { //registry.move( "game/terraria/mob1", "game/minecraft/mob1" ); //registry.move( "game/minecraft/", "game/terraria/more" ); //registry.move( "game/minecraft/sorcerer1 ", "game/terraria/." ); //Debug.trace( registry.queryElement( "game/terraria/sorcerer1" ), registry.queryElement( "game/minecraft/sorcerer1" ) ); } private void testCopy( KOMRegistry registry ) { //this.testBasicInsert( registry ); //registry.queryElement("game/minecraft/sorcerer1").evinceProperties().copyTo(registry.queryGUIDByPath("game/minecraft/wizard1")); //registry.getProperties( "game/terraria/mob1" ).copyTo( "game/moregame/mmob4" ); //Debug.trace( registry.getProperties( "game/moregame/mmob4" ) ); //registry.getNamespace( "game3a/witcher/" ).copyTo( registry.affirmNamespace( "game/owo" ).getGuid() ); //registry.getNamespace( "game3a/witcher" ).copyTo( registry.affirmNamespace( "game/owo" ).getGuid() ); //Debug.trace( registry.getNamespace( "game/owo" ).getChildren() ); //registry.copy( "game/minecraft/sorcerer1 ", "game/terraria/." ); //Debug.trace( registry.queryElement( "game/terraria/sorcerer1" ), registry.queryElement( "game/minecraft/sorcerer1" ) ); //registry.copy( "game/minecraft", "game/terraria" ); //Debug.trace( registry.queryElement( "game/terraria" ).evinceNamespace().listItem(), registry.queryElement( "game/minecraft" ).evinceNamespace().listItem() ); //registry.copy( "game/minecraft", "game/terraria/" ); //Debug.trace( registry.queryElement( "game/terraria" ).evinceNamespace().listItem(), registry.queryElement( "game/minecraft" ).evinceNamespace().listItem() ); //registry.copy( "game/minecraft", "game/terraria/new" ); //Debug.trace( registry.queryElement( "game/terraria/new" ).evinceNamespace().listItem(), registry.queryElement( "game/minecraft" ).evinceNamespace().listItem() ); } private void testDataExtends( KOMRegistry registry ) { // Debug.trace(registry.fetchRoot()); // registry.setAffinity(new GUID72("1f7c33d6-000309-0000-f8"),new GUID72("1f7c33d6-0003c1-0000-b0")); // registry.setInheritance(); //Debug.trace(registry.queryGUIDByPath("game/terraria/mob1")); //registry.newLinkTag("game/terraria/mob1","game/minecraft","mob1"); GUID guid = registry.queryGUIDByPath("game/terraria/mob1"); Debug.trace(guid); //registry.putProperties( "game/fiction/character/dragonKing", new JSONMaptron( "{ name:DragonKing, age:666, species:dragon, job:sorcerer, hp:999999 }" ) ); //registry.putProperties( "game/3a/character/red-prince", new JSONMaptron( "{ name: RedPrince, species:lizard, job:warrior, force:777777 }" ) ); //registry.setDataAffinity( "game/3a/character/red-prince", "game/fiction/character/dragonKing" ); GUID kingId = registry.queryGUIDByPath( "game/fiction/character/dragonKing" ); GUID princeId = registry.queryGUIDByPath( "game/3a/character/red-prince" ); Debug.fmp( 2, registry.getProperties( "game/fiction/character/dragonKing" ) ); Debug.fmp( 2, registry.getProperties( princeId ) ); //Debug.fmp( 2, registry.getProperties( "game/fiction/character/dragonKing" ).toJSONObject() ); //Debug.fmp( 2, registry.getProperties( princeId ).toJSONObject() ); Properties princePro = registry.getProperties( princeId ); Debug.trace( princePro.getValue( "hp" ) ); Debug.trace( princePro.getValue( "name" ) ); Debug.trace( princePro.containsKey( "hp" ) ); Debug.trace( princePro.hasOwnProperty( "hp" ) ); // Property property = princePro.get( "name" ); // property.setValue( "RedPrince" ); // princePro.update( property ); // princePro.set( "name", "RedPrince" ); // Debug.trace( princePro.getValue( "name" ) ); //princePro.put( "hpc", 999999 ); //Debug.trace( princePro.getValue( "hp" ) ); //princePro.remove( "hpc" ); //princePro.remove( "hp" ); // registry.newHardLink( "game3a/mix/wizard1", "game/witcher" ); //Debug.fmp( 2, registry.getProperties( "game/terraria/more/sorcerer1" ).toJSONObject() ); //registry.move(); // Debug.fmp( 2, registry.getProperties( "game/terraria/mob1" ).toJSONObject() ); //Debug.fmp( 2, registry.getProperties( "game/minecraft/mob1" ).toJSONObject() ); // // registry.putProperties( "movie/terraria/mob1", new JSONMaptron( "{ name:lural, age:666, species:cthulhu, job:mob }" ) ); // Debug.trace(registry.fetchRoot()); } private void testMisc( KOMRegistry registry ) { //registry.putProperties( "game/fiction/character/dragonKing", new JSONMaptron( "{ name:DragonKing, age:666, species:dragon, job:sorcerer, hp:999999 }" ) ); //registry.putProperties( "game/3a/character/red-prince", new JSONMaptron( "{ name: RedPrince, species:lizard, job:warrior, force:777777 }" ) ); //registry.rename( "game/3a/character/red-prince", "red-prince2" ); Debug.trace( registry.getProperties( "game/3a/character/red-prince" ) ); } private void testHardLink( KOMRegistry registry ) { //this.testBasicInsert( registry ); //Debug.trace( registry.queryElement( "game/minecraft" ) ); //registry.newLinkTag( "game/witcher", "game/minecraft", "mount" ); //Debug.trace( registry.getMasterTrieTree().queryAllLinkedCount( registry.queryGUIDByPath( "game/witcher" ) ) ); //Debug.trace( registry.getMasterTrieTree().queryStrongLinkedCount( registry.queryGUIDByPath( "game/witcher" ) ) ); //Debug.fmp( 2, registry.queryElement( "game/minecraft/mount/" ) ); //Debug.fmp( 2, registry.queryElement( "game/minecraft/mount/mob2" ) ); //Debug.fmp( 2, registry.queryElement( "mount" ) ); //Debug.fmp( 2, registry.queryElement( "game/witcher/jesus/" ) ); //registry.remove( "game/minecraft/mount" ); Debug.fmp( 2, registry.queryElement( "game3a" ).evinceNamespace().getEnumId() ); //var children = registry.queryElement( "game" ).evinceNamespace().getChildren(); //var mc = children.get("minecraft").evinceNamespace().getChildren(); // var children = registry.queryElement( "game3a" ).evinceNamespace().getChildren(); // var mc = children.get("witcher").evinceNamespace().getChildren(); // Debug.trace( 2, mc ); //Debug.trace( registry.get ) //Debug.fmp( 2, registry.queryElement( "game/minecraft/" ).evinceNamespace().listItem() ); } private void testSelector( KOMRegistry registry ) { this.testBasicInsert( registry ); //Debug.trace( registry.querySelectorJ( "game.minecraft.wizard1.name" ) ); Debug.fmp( 2, registry.querySelectorJ( "game/witcher/jesus" ) ); } private void testAttributes( KOMRegistry registry ) throws Exception { ElementNode node = registry.queryElement( "game/minecraft/sorcerer1" ); //node.getAttributes().setAttribute( "title", "king" ); //node.getAttributes().clear(); Debug.fmp( 2, node.getAttributes().size() ); } private void testMarshaling( KOMRegistry registry ) { // RegistryJSONEncoder encoder = new RegistryJSONEncoder( registry ); // ElementNode node = registry.queryElement( "game/witcher/jesus" ); // Debug.trace( encoder.encode( node ) ); RegistryJSONDecoder decoder = new RegistryJSONDecoder( registry ); Debug.trace( decoder.decode( new JSONMaptron( "{ game: { character: { " + "Ifan: { name: Ifan, hp:90, species: Human }, RedPrince: { name:RedPrince, hp:100, species: Lizard } " + "}, attr: 1234, file: text_files } }" ), null ).evinceNamespace().toJSONObject() ); Debug.trace( registry.querySelectorJ( "game" ) ); // registry.queryElement( "game/character/Ifan" ).getAttributes().setAttribute( "state", "live" ); // RegistryEncoder encoder = new RegistryDOMEncoder( registry ); // ElementNode node = registry.queryElement( "game" ); // Debug.echo( encoder.encode( node ).toString() ); } } public class TestRegistry { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ StanMarsh ladyGaga = (StanMarsh) Pinecone.sys().getTaskManager().add( new StanMarsh( args, Pinecone.sys() ) ); ladyGaga.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-core-console/src/test/java/com/sparta/TestRemoteProcess.java ================================================ package com.sparta; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.hydra.proc.ProcessManager; import com.pinecone.hydra.proc.UProcess; import com.pinecone.hydra.proc.UniformProcessManager; import com.pinecone.hydra.proc.event.ProcessEvent; import com.pinecone.hydra.proc.event.ProcessEventHandler; import com.pinecone.hydra.proc.image.ArchEntryPointRunnable; import com.pinecone.hydra.proc.image.EntryPointRunnable; import com.pinecone.hydra.proc.image.ExecutionImage; import com.pinecone.hydra.proc.image.LocalHostedClassImage; import com.pinecone.hydra.proc.image.kom.VirtualExeImageInstrument; import com.pinecone.hydra.proc.image.kom.VirtualMappingExeImageInstrument; import com.pinecone.hydra.umc.wolf.client.UlfClient; import com.pinecone.hydra.umc.wolf.client.WolfMCClient; import com.pinecone.hydra.umc.wolf.server.WolfMCServer; import com.walnut.archcraft.ender.EnderHydra; import com.walnut.odin.proc.client.RavenRemoteProcessManagerClient; import com.walnut.odin.proc.client.RemoteProcessManagerClient; import com.walnut.odin.proc.entity.RemoteVitalizationResponse; import com.walnut.odin.proc.entity.UProcessRuntimeMeta; import com.walnut.odin.proc.server.RavenRemoteProcessManagerServer; import com.walnut.odin.proc.server.RemoteProcessManagerServer; import java.net.URI; import java.util.Collection; import java.util.Map; class Dante extends EnderHydra { public Dante( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Dante( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { WolfMCServer wolfKing = new WolfMCServer( "", this, new JSONMaptron("{host: \"0.0.0.0\",\n" + "port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}") ); RemoteProcessManagerServer server = new RavenRemoteProcessManagerServer( this.processManager(), wolfKing ); server.startService(); ProcessManager clientPM = new UniformProcessManager( this, null, "Miao", "", null ); UlfClient ulfClient = new WolfMCClient( this.getSystemGuidAllocator72().nextGUIDi64(), "", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( "Messagers.Messagers.WolfMCKingpin" ) ); RemoteProcessManagerClient client = new RavenRemoteProcessManagerClient( clientPM, ulfClient ); client.startService(); //this.testClientProactiveCreation( server, client ); this.testServerProactiveCreation( server, client ); //this.testImageInstrument(); } private void testImageInstrument() { VirtualExeImageInstrument imageInstrument = new VirtualMappingExeImageInstrument( this, "" ); ProcessManager manager = this.processManager(); ProcessEventHandler eventHandler = new ProcessEventHandler() { @Override public void fired(EntryPointRunnable runnable, ProcessEvent event ) { Debug.bluef( runnable, event ); } }; ExecutionImage image = new LocalHostedClassImage( "image1", new ArchEntryPointRunnable( eventHandler ) { @Override public int main( Map args ) { Debug.greenfs( "Hello, hi, I am `" + this.ownedProcess().getName() + "`!" ); return 0; } }, manager ); imageInstrument.mount( "hola/senorita", image ); Debug.greenfs( imageInstrument.queryImage( "hola/senorita/image1" ).getName() ); } private void testClientProactiveCreation( RemoteProcessManagerServer server, RemoteProcessManagerClient client ) throws Exception { ProcessManager manager = this.processManager(); ProcessEventHandler eventHandler = new ProcessEventHandler() { @Override public void fired( EntryPointRunnable runnable, ProcessEvent event ) { Debug.bluef( runnable, event ); } }; ExecutionImage image = new LocalHostedClassImage( "gay", new ArchEntryPointRunnable( eventHandler ) { @Override public int main( Map args ) { Debug.greenfs( "Hello, hi, I am `" + this.ownedProcess().getName() + "`!" ); Debug.greenfs( this.ownedProcess().getPID() ); Debug.greenfs( this.ownedProcess().getLocalPID() ); Debug.greenfs( this.ownedProcess().getEnvironmentVariables() ); Debug.greenfs( this.ownedProcess().getStartupArguments() ); Debug.bluef( this.ownedProcess().getControllableLevel() ); Debug.bluef( this.ownedProcess().getOwnedProcessManager() ); Debug.greenfs( this.ownedProcess().parentProcess() ); return 0; } }, manager ); //LocalUProcess process = manager.createLocalHostedProcess( image, null, Map.of( "fuck", new String[]{ "you", "she", "he", "it" } ) ); UProcess process = client.createLocalUProcess(image, null, Map.of("fuck", new String[]{"you", "she", "he", "it"}), null); server.startRemoteUProcess( process.getGuid() ); } private void testServerProactiveCreation( RemoteProcessManagerServer server, RemoteProcessManagerClient client ) throws Exception { ProcessManager manager = this.processManager(); ProcessEventHandler eventHandler = new ProcessEventHandler() { @Override public void fired(EntryPointRunnable runnable, ProcessEvent event ) { Debug.bluef( runnable, event ); } }; ExecutionImage image = new LocalHostedClassImage( "image_c", new ArchEntryPointRunnable( eventHandler ) { @Override public int main( Map args ) { Debug.greenfs( "Hello, hi, I am `" + this.ownedProcess().getName() + "`!" ); Debug.sleep( 1000 ); Debug.greenfs( "Miao~" ); return 1984; } }, manager ); client.registerLocalScopeExecutionImage( "hola/senorita", image ); ExecutionImage ic = client.queryExecutionImage( "hola/senorita/image_c" ); ExecutionImage ig = client.queryExecutionImage( "/sys/public/global/exe/images/hola/senorita/image_c" ); Debug.redfs( ic, ig ); ic = client.queryExecutionImage( new URI("uofs:///hola/senorita/image_c") ); ig = client.queryExecutionImage( new URI("uofs:///sys/public/global/exe/images/hola/senorita/image_c") ); Debug.redfs( ic, ig ); RemoteVitalizationResponse response = server.vitalizeRemoteUProcess( client.getClientId(), new URI("uofs:///sys/public/global/exe/images/hola/senorita/image_c"), this.getPID(), Map.of("fuck", new String[]{"you", "she", "he", "it"}), Map.of("kill", new String[]{"you", "she", "he", "it"}) ); Collection ps = server.searchProcessesByName( "image_c" ); UProcess proc = ps.iterator().next(); Debug.greenfs( proc.getName() ); UProcessRuntimeMeta meta = server.queryProcessRuntimeMeta( proc.getPID() ); Debug.warn( meta.getName() ); } } public class TestRemoteProcess { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ Dante dante = (Dante) Pinecone.sys().getTaskManager().add( new Dante( args, Pinecone.sys() ) ); dante.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-core-console/src/test/java/com/sparta/TestRuntime.java ================================================ package com.sparta; import java.util.Map; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.system.regime.arch.Lord; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.hydra.proc.ProcessManager; import com.pinecone.hydra.proc.event.ProcessEvent; import com.pinecone.hydra.proc.event.ProcessEventHandler; import com.pinecone.hydra.proc.event.ProcessLifecycleHandler; import com.pinecone.hydra.proc.image.ArchEntryPointRunnable; import com.pinecone.hydra.proc.image.EntryPointRunnable; import com.pinecone.hydra.proc.image.ExecutionImage; import com.pinecone.hydra.proc.image.LocalHostedClassImage; import com.pinecone.hydra.umc.wolf.client.UlfClient; import com.pinecone.hydra.umc.wolf.client.WolfMCClient; import com.pinecone.hydra.umc.wolf.server.WolfMCServer; import com.walnut.odin.atlas.advance.GenericTapedBFSGraphAdvancer; import com.walnut.odin.atlas.advance.strategy.AtlasPriorityProcessStrategy; import com.walnut.odin.atlas.advance.strategy.MegaInDegreeFirstStrategy; import com.walnut.odin.atlas.graph.RuntimeAtlasInstrument; import com.walnut.odin.atlas.graph.UniformRuntimeAtlas; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.unit.iqueue.ConfigurableMegaDeflectPriorityQueueMeta; import com.pinecone.hydra.unit.iqueue.ConfigurableMegaStratumQueueMeta; import com.pinecone.hydra.unit.iqueue.MagnitudeDPQueue; import com.pinecone.hydra.unit.iqueue.MegaDPStratumQueue; import com.pinecone.hydra.unit.iqueue.MegaDeflectPriorityQueueMeta; import com.pinecone.hydra.unit.iqueue.MegaStratumQueueMeta; import com.pinecone.hydra.unit.vgraph.MagnitudeVectorDAG; import com.pinecone.hydra.unit.vgraph.entity.GraphNode; import com.pinecone.hydra.unit.vgraph.layer.LayerInstrument; import com.pinecone.ulf.util.guid.GUIDs; import com.walnut.archcraft.ender.EnderHydra; import com.walnut.odin.conduct.CollectiveTaskLegionary; import com.walnut.odin.conduct.RavenCollectiveTaskLegionary; import com.walnut.odin.conduct.schedule.RavenTaskScheduler; import com.walnut.odin.system.Odin; import com.walnut.odin.task.CentralizedTaskInstrument; class Rick extends EnderHydra { public Rick( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Rick( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { WolfMCServer wolfKing = new WolfMCServer( "", this, new JSONMaptron("{host: \"0.0.0.0\",\n" + "port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}") ); this.getDispenserCenter().getInstanceDispenser().registerInstance( "TaskWolfKing", wolfKing ); Lord lord = this.getLordFederation().instantiate( "KernelOdinLord", "./system/setup/lords/odin.json5" ); Odin odin = (Odin) lord; odin.vitalize(); LayerInstrument layerInstrument = odin.layerInstrument(); CentralizedTaskInstrument uniformTaskInstrument = odin.taskRegiment().taskInstrument(); RuntimeAtlasInstrument uniformRuntimeAtlas = odin.atlasInstrument(); //this.testInsert(uniformRuntimeAtlas); //this.testQuery( uniformRuntimeAtlas ); //this.testTape( uniformRuntimeAtlas, koiMappingDriver ); //this.testAdvancer( uniformRuntimeAtlas, koiMappingDriver,layerInstrument ); this.testOrchestrator( odin ); } public void testInsert(UniformRuntimeAtlas uniformRuntimeAtlas) { GuidAllocator guidAllocator = uniformRuntimeAtlas.getGuidAllocator(); // for( int i = 1; i<=12; i++ ) { // TaskAtlasNode taskAtlasNode = new TaskAtlasNode(); // taskAtlasNode.setName("测试图节点" + i); // uniformRuntimeAtlas.put( taskAtlasNode ); // } //uniformRuntimeAtlas.put( GUIDs.GUID72("252386a-0000ca-0001-f0"),taskAtlasNode ); uniformRuntimeAtlas.addChild(GUIDs.GUID128("01972f7e-1642-75c5-aa70-82a752fd5e05"),GUIDs.GUID128("01972f7e-164e-7f80-8e67-a22060a3afd7")); //uniformRuntimeAtlas.put(GUIDs.GUID72("20dc3d8-00007b-0000-50"), taskAtlasNode); } public void testQuery(UniformRuntimeAtlas uniformRuntimeAtlas) { // GuidAllocator guidAllocator = uniformRuntimeAtlas.getGuidAllocator(); // TaskGraphNode query = uniformRuntimeAtlas.query(GUIDs.GUID72("20dc3d8-00007b-0000-50")); // Debug.trace(query.toJSONString()); // List path = uniformRuntimeAtlas.getPath(GUIDs.GUID72("210f43c-000017-0000-64")); // Debug.trace(path); GraphNode graphNode = uniformRuntimeAtlas.queryGraphNodeByTaskGuid(GUIDs.GUID128("21164d6-0003e5-000f-50")); Debug.trace(graphNode.toJSONString()); TaskElement taskElement = uniformRuntimeAtlas.queryTaskElementByGuid(GUIDs.GUID128("233e952-000010-0000-c0")); Debug.trace(taskElement.toJSONObject()); } public void testTape( UniformRuntimeAtlas uniformRuntimeAtlas, KOIMappingDriver driver ) { // MagnitudeVectorDAG magnitudeVectorDAG = new MagnitudeVectorDAG( GUIDs.GUID128("22610ea-00002d-0000-a0"),uniformRuntimeAtlas.getMasterManipulator().getVectorGraphMasterManipulator(), uniformRuntimeAtlas.getConfig() ); // GraphStratumTape tapeded = uniformRuntimeAtlas.tapedGraphStratumAdvancer(magnitudeVectorDAG, driver); // //Debug.trace(tapeded.next().toJSONString()); // Debug.trace(tapeded.fetchNodes(2,1)); } public void testAdvancer( UniformRuntimeAtlas uniformRuntimeAtlas, KOIMappingDriver driver, LayerInstrument layerInstrument ) { MagnitudeVectorDAG magnitudeVectorDAG = (MagnitudeVectorDAG) uniformRuntimeAtlas.queryByPath( "l1/l11" ); MegaDeflectPriorityQueueMeta meta1 = new ConfigurableMegaDeflectPriorityQueueMeta(); meta1.setQueueTableName( "hydra_queue_nodes" ); MegaStratumQueueMeta meta2 = new ConfigurableMegaStratumQueueMeta(); meta2.setQueueTableName( "hydra_temporary_queue_nodes" ); MagnitudeDPQueue magnitudeDPQueue = new MagnitudeDPQueue(driver, 0, "segment_name", "测试队列", meta1); MegaDPStratumQueue megaDPStratumQueue = new MegaDPStratumQueue(driver, "segment_name", "测试临时队列", meta2); MegaInDegreeFirstStrategy strategyChain = new MegaInDegreeFirstStrategy(uniformRuntimeAtlas, magnitudeDPQueue, megaDPStratumQueue,layerInstrument); AtlasPriorityProcessStrategy atlasPriorityProcessStrategy = new AtlasPriorityProcessStrategy(); atlasPriorityProcessStrategy.addStrategy( strategyChain ); GenericTapedBFSGraphAdvancer advancer = new GenericTapedBFSGraphAdvancer( uniformRuntimeAtlas, magnitudeDPQueue,atlasPriorityProcessStrategy ); advancer.traverse(magnitudeVectorDAG); } public void testOrchestrator( Odin odin ) throws Exception { odin.taskRegiment().startRemoteProcessServer(); UlfClient ulfClient = new WolfMCClient( this.getSystemGuidAllocator72().nextGUIDi64(), "", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( "Messagers.Messagers.WolfMCKingpin" ) ); CollectiveTaskLegionary regimentClient = new RavenCollectiveTaskLegionary( "jesus", this, ulfClient ); regimentClient.startService(); regimentClient.joinRegiment(); regimentClient.remoteProcessManagerClient().addProcessLifecycleHandler(new ProcessLifecycleHandler() { @Override public void fired(String imageAddress, EntryPointRunnable runnable, ProcessEvent event ) { Debug.greenfs( imageAddress, event ); } }); ProcessManager manager = regimentClient.processManager(); ProcessEventHandler eventHandler = new ProcessEventHandler() { @Override public void fired(EntryPointRunnable runnable, ProcessEvent event ) { Debug.bluef( runnable, event ); } }; ExecutionImage image = new LocalHostedClassImage( "image_c", new ArchEntryPointRunnable( eventHandler ) { @Override public int main( Map args ) { Debug.greenfs( "Hello, hi, I am `" + this.ownedProcess().getName() + "`!" ); Debug.sleep( 1000 ); Debug.greenfs( "Miao~" ); //throw new IrrationalProvokedException(); return 1984; } }, manager ); manager.getImageLoader().registerLocalScopeExecutionImage( "hola/senorita", image ); RavenTaskScheduler scheduler = (RavenTaskScheduler) odin.taskScheduler(); scheduler.fetch(); } } public class TestRuntime { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ Rick rick = (Rick) Pinecone.sys().getTaskManager().add( new Rick( args, Pinecone.sys() ) ); rick.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-core-console/src/test/java/com/sparta/TestSFM.java ================================================ package com.sparta; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer; import com.pinecone.hydra.umb.kafka.WolfMCKafkaClient; import com.pinecone.hydra.umb.wolf.UlfBroadcastControlNode; import com.pinecone.hydra.umb.wolf.WolfMCBClient; import com.pinecone.hydra.umct.WolfMCExpress; import com.pinecone.tritium.Tritium; import com.walnut.sailor.stream.fm.SFMConfig; import com.walnut.sailor.stream.fm.SFMSessionValidatorController; import com.walnut.sailor.stream.fm.SailorFMConfig; import com.walnut.sailor.stream.fm.SailorFMDistributionService; import com.walnut.sailor.stream.fm.SingleStreamFileMultiDistributionService; import com.walnut.sailor.stream.fm.event.SFMEventSubscriber; class Lois extends Tritium { public Lois( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Lois( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { this.testSFM(); } private void testSFM() throws Exception { UlfBroadcastControlNode controlNode = new WolfMCBClient(new WolfMCKafkaClient("b-serverkingpin:9092"), "", this, WolfMCExpress.class); SFMConfig config = new SailorFMConfig( new JSONMaptron("{\n" + " \"fileFrameSize\": 972800,\n" + " \"sessionExpiredTimeMillis\": 7200000,\n" + " \"fileCloudDistributeTransmitTopic\": \"ucdn-file-cloud-distribute-transmit-topic\",\n" + " \"fileServiceTransmitGroup\": \"UCDNFileServiceTransmitGroup\",\n" + " \"storageDirectory\": \"E:/fs/\",\n" + " }") ); SingleStreamFileMultiDistributionService service = new SailorFMDistributionService( controlNode, config ); service.registerDirectionRoute( "major", "E:/fs/" ); service.registerFileTransmitCompleteEventSubscriber(new SFMEventSubscriber() { @Override public void afterEventTriggered( String path, String fileName, String directoryPath ) { Debug.greenfs( "MiaoMiao~", path, fileName, directoryPath ); } }); service.start(); BroadcastControlConsumer consumer = controlNode.createBroadcastControlConsumer( config.getFileCloudDistributeTransmitTopic(), config.getFileServiceTransmitGroup() ); consumer.registerController( new SFMSessionValidatorController() ); service.distributeFile( "Ton Koopman - Toccata and Fugue in D minor, BWV 565 b.flac", "E:/", "major" ); } } public class TestSFM { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ Lois lois = (Lois) Pinecone.sys().getTaskManager().add( new Lois( args, Pinecone.sys() ) ); lois.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-core-console/src/test/java/com/sparta/TestServiceManager.java ================================================ package com.sparta; import com.acorn.redqueen.service.conduct.RedCollectiveServiceRegiment; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.hydra.service.ibatis.hydranium.ServiceMappingDriver; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.kom.UniformServiceInstrument; import com.pinecone.hydra.service.registry.server.ServiceLifecycleIface; import com.pinecone.hydra.service.registry.server.ServiceMetaManipulationIface; import com.pinecone.hydra.service.registry.server.UniformServiceManager; import com.pinecone.hydra.service.registry.client.HuskyServiceClient; import com.pinecone.hydra.service.registry.dto.RegisterServiceDTO; import com.pinecone.hydra.service.registry.dto.ServiceMetaDTO; import com.pinecone.hydra.service.registry.ulf.HuskyServiceAppointServer; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.uma.DuplexAppointClient; import com.pinecone.hydra.uma.HuskyDuplexExpress; import com.pinecone.hydra.uma.wolf.WolvesAppointClient; import com.pinecone.hydra.uma.wolf.WolvesAppointServer; import com.pinecone.hydra.umc.wolf.client.UlfClient; import com.pinecone.hydra.umc.wolf.client.WolfMCClient; import com.pinecone.hydra.umc.wolf.server.WolfMCServer; import com.pinecone.tritium.Tritium; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.pinecone.ulf.util.guid.i64.GuidAllocator72V2; import java.util.List; class Brian extends Tritium { public Brian( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Brian( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { KOIMappingDriver koiMappingDriver = new ServiceMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); UniformServiceInstrument servicesTree = new UniformServiceInstrument( koiMappingDriver ); WolfMCServer wolfKing = new WolfMCServer( "", this, new JSONMaptron("{host: \"0.0.0.0\",\n" + "port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}") ); UniformServiceManager serviceManager = new UniformServiceManager( servicesTree ); serviceManager.hookAppointServer( new HuskyServiceAppointServer( new WolvesAppointServer( wolfKing, HuskyDuplexExpress.class ) )); RedCollectiveServiceRegiment serviceRegiment = new RedCollectiveServiceRegiment(this, servicesTree, serviceManager); serviceRegiment.startServiceManage(); UlfClient ulfClient = new WolfMCClient( new GuidAllocator72V2().nextGUIDi64(), "", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( "Messagers.Messagers.WolfMCKingpin" ) ); HuskyServiceClient managerClient = new HuskyServiceClient( ulfClient, servicesTree.getGuidAllocator() ); managerClient.startService(); this.testUniformServiceRegister_Proactive( managerClient ); //this.oldTest( servicesTree ); } public void testUniformServiceRegister_Proactive( HuskyServiceClient managerClient ) throws Exception { DuplexAppointClient client = managerClient.getAppointNodus(); ServiceMetaManipulationIface metaIface = client.getIface(ServiceMetaManipulationIface.class); ServiceMetaDTO meta = metaIface.queryServiceMetaByPath( "root/test/app/ser" ); Debug.greenfs( meta ); String guid = metaIface.evalCreationStatement( "{ root: { test: { app: { metaType: ApplicationElement, alias:as, services: { test1: { metaType: ServiceElement, type: Microservice } } } } } }" ); ServiceMetaDTO meta1 = metaIface.queryServiceMetaByPath( "root/test/app/test1" ); Debug.greenfs( meta1 ); managerClient.registerService( managerClient.getGuidAllocator().parse(meta1.getGuid()), null ); List serviceMetaDTOS = metaIface.fetchServiceInsMetaByServiceId( meta1.getGuid() ); Debug.bluefs( serviceMetaDTOS ); //managerClient.deregister(); client.close(); //Debug.trace(iface.hasOwnedServiceByServiceId( "181e9e6-000395-0000-94" )); } private void oldTest(ServiceInstrument servicesTree) throws Exception { WolfMCServer wolfKing = new WolfMCServer( "", this, new JSONMaptron("{host: \"0.0.0.0\",\n" + "port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}") ); WolvesAppointServer wolfServer = new WolvesAppointServer( wolfKing, HuskyDuplexExpress.class ); UniformServiceManager serviceManager = new UniformServiceManager( servicesTree ); serviceManager.hookAppointServer( new HuskyServiceAppointServer( wolfServer ) ); wolfKing.execute(); Debug.sleep( 500 ); DuplexAppointClient wolf = new WolvesAppointClient( new WolfMCClient( 2048, "", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( "Messagers.Messagers.WolfMCKingpin" ) ) ); wolf.execute(); wolf.compile( ServiceLifecycleIface.class, false ); wolf.compile( ServiceMetaManipulationIface.class, false ); this.testServiceRegister( wolf ); } public void testServiceRegister( DuplexAppointClient client ) { ServiceLifecycleIface iface = client.getIface( ServiceLifecycleIface.class ); ServiceMetaManipulationIface metaIface = client.getIface(ServiceMetaManipulationIface.class); RegisterServiceDTO serviceDTO1 = new RegisterServiceDTO(); serviceDTO1.setServiceId( "1769872-0002d2-0003-cc" ); serviceDTO1.setClientId( 1234L ); RegisterServiceDTO serviceDTO2 = new RegisterServiceDTO(); serviceDTO2.setServiceId( "181e9e6-000395-0000-94" ); serviceDTO2.setClientId(1235L); iface.registerService( serviceDTO1 ); iface.registerService( serviceDTO2 ); List serviceMetaDTOS = metaIface.fetchServiceInsMetaByServiceId( "1769872-0002d2-0003-cc" ); Debug.trace( serviceMetaDTOS ); iface.deregisterServiceByInstanceId( "181e9e6-000395-0000-94" ); // Debug.trace(iface.hasOwnedServiceByServiceId( "181e9e6-000395-0000-94" )); } } public class TestServiceManager{ public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ Brian brian = (Brian) Pinecone.sys().getTaskManager().add( new Brian( args, Pinecone.sys() ) ); brian.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-core-console/src/test/java/com/sparta/TestServiceTree.java ================================================ package com.sparta; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.hydra.service.ibatis.hydranium.ServiceMappingDriver; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.kom.UniformServiceInstrument; import com.pinecone.hydra.service.kom.entity.GenericServiceElement; import com.pinecone.hydra.service.kom.marshaling.ServiceJSONDecoder; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.tritium.Tritium; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.pinecone.ulf.util.guid.GUIDs; class Jesse extends Tritium { public Jesse( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Jesse( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { KOIMappingDriver koiMappingDriver = new ServiceMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); UniformServiceInstrument servicesTree = new UniformServiceInstrument( koiMappingDriver ); //this.testInsert( servicesTree ); this.testGet( servicesTree ); //this.testDelete( servicesTree ); } private void testInsert( ServiceInstrument serviceInstrument){ // GenericNamespace namespace = new GenericNamespace(); // namespace.setName( "Test1" ); // serviceInstrument.put( namespace ); //Debug.trace( serviceInstrument.get( GUIDs.GUID72("03c2f90-000133-000 0-44") ) ); // GenericApplicationElement applicationNode = new GenericApplicationElement( // new JSONMaptron( "{ name:specialApp, alias:jesus, deploymentMethod:Container, path:'/xxx/xxx/ggg', resourceType:human," + // "type:Social, description: 'This is jesus', extraInformation: 'more', level:'L1', primaryImplLang: java, scenario:'/scenario/dragon/king' }" ) // ); // // applicationNode.apply( new JSONMaptron( "{ name:specialApp2, deploymentMethod:VM }" ) ); // serviceInstrument.put( applicationNode ); GenericServiceElement serviceNode = new GenericServiceElement( new JSONMaptron( "{ name:'特殊服务', alias:jesus, serviceType:System, path:'/xxx/xxx/ggg', resourceType:human," + "type:Social, description: 'This is special', extraInformation: 'more', level:'L1', primaryImplLang: java, scenario:'/scenario/dragon/king' }" ) ); serviceInstrument.put( serviceNode ); } private void testGet( ServiceInstrument serviceInstrument ){ //Debug.trace( serviceInstrument.queryGUIDByPath( "规则1/很好的服务/我的世界" ) ); //Debug.trace( serviceInstrument.getPath(GUIDs.GUID72( "03c4a36-000381-0000-48" ) ) ); //Debug.trace( serviceInstrument.get( GUIDs.GUID72("03e60e8-0000ae-0000-20") ) ); //Debug.trace( serviceInstrument.get( GUIDs.GUID72("03e60e8-0000c5-0000-48") ) ); //Debug.trace( serviceInstrument.get( GUIDs.GUID72("03e60e8-000117-0000-18") ) ); // Debug.trace( servicesTree.get( GUIDs.GUID72( "02be396-0001e9-0000-e4" ) ) ); //Debug.trace( serviceInstrument.affirmApplication( "Test1/App1" ) ); // Debug.trace( serviceInstrument.affirmService( "root/特殊服务" ) ); // Debug.trace( serviceInstrument.affirmApplication( "root/species/orc" ) ); // Debug.trace( serviceInstrument.affirmNamespace("root/species") ); // Debug.trace( serviceInstrument.affirmNamespace( "root" ).fetchChildren() ); // // serviceInstrument.affirmApplication( "root/species/orc" ).addChild( new GenericServiceElement( new JSONMaptron( "{ name: slaughter }" ) ) ); // // Debug.trace( serviceInstrument.affirmApplication( "root/species/orc" ).fetchChildren() ); // // Debug.trace( serviceInstrument.queryElement( "root/species/orc/slaughter" ).toJSONObject() ); // // serviceInstrument.affirmNamespace( "root" ).addChild( new GenericNamespace( new JSONMaptron( "{ name: weapon, scenario: s1, description: d1, level:L1, primaryImplLang:Java }" ) ) ); // // Debug.fmp( 2, serviceInstrument.queryElement( "root/weapon" ).evinceNamespace().toJSONDetails() ); ServiceJSONDecoder decoder = new ServiceJSONDecoder( serviceInstrument ); decoder.decode( new JSONMaptron( "{ root: { test: { app: { metaType: ApplicationElement, alias:as, services: { ser: { metaType: ServiceElement, type: Microservice } } } } } }" ) ); Debug.fmp( 2, serviceInstrument.queryElement( "root" ).toJSONObject() ); //Debug.trace(serviceInstrument.getPath( GUIDs.GUID72("181e9e4-000395-0000-d4") )); } private void testDelete( ServiceInstrument serviceInstrument){ serviceInstrument.remove( GUIDs.GUID128("181e9e4-000395-0000-d4") ); } } public class TestServiceTree { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ Jesse Jesse = (Jesse) Pinecone.sys().getTaskManager().add( new Jesse( args, Pinecone.sys() ) ); Jesse.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-core-console/src/test/java/com/sparta/TestSparta.java ================================================ package com.sparta; import org.springframework.context.ApplicationContextInitializer; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.support.GenericApplicationContext; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.system.functions.Executor; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.hydra.file.ibatis.hydranium.FileMappingDriver; import com.pinecone.hydra.storage.file.FileSystemConfig; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.KernelFileSystemConfig; import com.pinecone.hydra.storage.file.UniformObjectFileSystem; import com.pinecone.hydra.storage.volume.KernelVolumeConfig; import com.pinecone.hydra.storage.volume.UniformVolumeManager; import com.pinecone.hydra.storage.volume.VolumeConfig; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.volume.ibatis.hydranium.VolumeMappingDriver; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.pinecone.tritium.Tritium; import com.walnut.sparta.Sparta; import com.walnut.sparta.SpartaBoot; class JesusChrist extends Tritium { public JesusChrist( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public JesusChrist( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { Sparta sparta = new Sparta( "Sparta", this ); Thread shutdowner = new Thread(()->{ Debug.sleep( 5000 ); sparta.terminate(); }); //shutdowner.start(); KOIMappingDriver koiMappingDriver = new VolumeMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); KOIMappingDriver koiFileMappingDriver = new FileMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); JSONConfig selfConfig = new JSONConfig(); FileSystemConfig fileSystemConfig = new KernelFileSystemConfig( selfConfig.queryJSONObject( "service.PrimaryUniformFileSystem" ) ); VolumeConfig volumeConfig = new KernelVolumeConfig( selfConfig.queryJSONObject( "service.PrimaryUniformVolumeManager" ) ); KOMFileSystem fileSystem = new UniformObjectFileSystem( koiFileMappingDriver, fileSystemConfig ); UniformVolumeManager volumeTree = new UniformVolumeManager( koiMappingDriver, volumeConfig ); sparta.setPrimarySources( SpartaBoot.class ); sparta.setInitializer(new Executor() { @Override public void execute() throws Exception { sparta.getSpringApplication().addInitializers(new ApplicationContextInitializer() { @Override public void initialize( ConfigurableApplicationContext applicationContext ) { GenericApplicationContext genericApplicationContext = (GenericApplicationContext) applicationContext; genericApplicationContext.registerBean("primaryFileSystem", UniformObjectFileSystem.class, () -> (UniformObjectFileSystem)fileSystem); genericApplicationContext.registerBean("primaryVolume", UniformVolumeManager.class, () -> (UniformVolumeManager) volumeTree); } }); } }); sparta.execute(); this.getTaskManager().add( sparta ); this.getTaskManager().syncWaitingTerminated(); } } public class TestSparta { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ JesusChrist jesus = (JesusChrist) Pinecone.sys().getTaskManager().add( new JesusChrist( args, Pinecone.sys() ) ); jesus.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-core-console/src/test/java/com/sparta/TestTaskTree.java ================================================ package com.sparta; import java.util.Map; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.hydra.proc.ProcessManager; import com.pinecone.hydra.proc.event.ProcessEvent; import com.pinecone.hydra.proc.event.ProcessEventHandler; import com.pinecone.hydra.proc.event.ProcessLifecycleHandler; import com.pinecone.hydra.proc.image.ArchEntryPointRunnable; import com.pinecone.hydra.proc.image.EntryPointRunnable; import com.pinecone.hydra.proc.image.ExecutionImage; import com.pinecone.hydra.proc.image.LocalHostedClassImage; import com.pinecone.hydra.task.kom.TaskInstrument; import com.pinecone.hydra.task.kom.entity.GenericTaskElement; import com.pinecone.hydra.task.kom.entity.TaskElement; import com.pinecone.hydra.task.kom.instance.InstanceEntry; import com.pinecone.hydra.task.kom.instance.InstanceInstrument; import com.pinecone.hydra.task.kom.marshaling.TaskJSONDecoder; import com.pinecone.hydra.umc.wolf.client.UlfClient; import com.pinecone.hydra.umc.wolf.client.WolfMCClient; import com.pinecone.hydra.umc.wolf.server.WolfMCServer; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.pinecone.ulf.util.guid.GUIDs; import com.walnut.archcraft.ender.EnderHydra; import com.walnut.odin.conduct.CollectiveTaskRegiment; import com.walnut.odin.conduct.CollectiveTaskLegionary; import com.walnut.odin.conduct.RavenCollectiveTaskRegiment; import com.walnut.odin.conduct.RavenCollectiveTaskLegionary; import com.walnut.odin.conduct.entity.LaunchedContext; import com.walnut.odin.task.CentralizedTaskInstrument; import com.walnut.odin.task.GenericRavenTaskConfig; import com.walnut.odin.task.RavenTaskInstrument; import com.walnut.odin.task.dto.CategoryTag; import com.walnut.odin.task.dto.GenericCategoryTag; import com.walnut.odin.task.mapper.OdinUniformTaskMappingDriver; import com.walnut.odin.task.service.CategoryService; import com.walnut.odin.task.troll.LaunchFeature; class Randy extends EnderHydra { public Randy( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Randy( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { OdinUniformTaskMappingDriver categoryMappingDriver = new OdinUniformTaskMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); RavenTaskInstrument ravenTaskInstrument = new RavenTaskInstrument( categoryMappingDriver, new GenericRavenTaskConfig() ); //this.testCategory( ravenTaskInstrument ); //this.testInsert( ravenTaskInstrument ); //this.testGet( ravenTaskInstrument ); //this.testDelete( instrument ); //this.testInstance( ravenTaskInstrument ); //CollectiveTaskRegiment taskRegiment = new RavenCollectiveTaskRegiment( this, ravenTaskInstrument ); //this.testTaskRegimentBase( taskRegiment ,ravenTaskInstrument); this.testInstanceLaunch( ravenTaskInstrument ); } private void testTaskRegimentBase( CollectiveTaskRegiment regiment , RavenTaskInstrument instrument) { /* TaskElement taskElement = new GenericTaskElement(); taskElement.setName("spartaTest00058"); taskElement.setType("sparta"); taskElement.setResourceType("spartaTest00058"); taskElement.setImagePath("spartaTest0005"); taskElement.setDeploymentMethod("spartaTest0017"); taskElement.setPriority(1); taskElement.setActuallyPriority(1); taskElement.setDryRun(true); RavenTask task = regiment.createTask( taskElement, taskElement.getGuid()); Debug.trace(task); RavenTaskInstance instance = task.createInstance(); Debug.trace(instance); Debug.trace(instrument.queryElement("spartaTest00058"));*/ TaskElement taskElement = (TaskElement) instrument.queryElement("spartaTest00058"); Debug.trace(taskElement); taskElement.setName("spartaTest00059855"); Debug.trace(taskElement); Debug.trace(instrument.getPath( GUIDs.GUID128("01977911-62b1-70f3-bd4f-060e889c088e"))); regiment.affirmTask("spartaTest00058", GUIDs.GUID128("01977911-62b1-70f3-bd4f-060e889c088e"), taskElement); //regiment.purgeTask(GUIDs.GUID128("019776f2-e80a-7675-ba4b-7d1d415d8088")); } private void testCategory( RavenTaskInstrument instrument ) { CategoryService categoryService = instrument.getCategoryService(); CategoryTag tag = new GenericCategoryTag(); tag.setCategoryName( "Data" ); tag.setCategoryType( "System" ); Debug.greenfs( categoryService.setCategoryTag( "root/test/job/task", tag ) ); } private void testInsert( RavenTaskInstrument instrument ) { // GenericNamespace namespace = new GenericNamespace(); // namespace.setName( "Test1" ); // instrument.put( namespace ); //Debug.trace( instrument.get( GUIDs.GUID72("03c2f90-000133-000 0-44") ) ); // GenericApplicationElement applicationNode = new GenericApplicationElement( // new JSONMaptron( "{ name:specialApp, alias:jesus, deploymentMethod:Container, path:'/xxx/xxx/ggg', resourceType:human," + // "type:Social, description: 'This is jesus', extraInformation: 'more', level:'L1', primaryImplLang: java, scenario:'/scenario/dragon/king' }" ) // ); // // applicationNode.apply( new JSONMaptron( "{ name:specialApp2, deploymentMethod:VM }" ) ); // instrument.put( applicationNode ); for( int i = 1; i <=12; i++ ) { GenericTaskElement taskElement = new GenericTaskElement( new JSONMaptron( "{ name:'测试服务"+i+"', alias:jesus, serviceType:System, resourceType:human," + "type:Social, description: 'This is special', extraInformation: 'more', level:'L1', primaryImplLang: java, scenario:'/scenario/dragon/king' }" ) ); instrument.put( taskElement ); } //instrument.affirmOwnedNode( GUIDs.GUID128("01972f5a-7edc-79bd-9655-ea50ae5b0887"),GUIDs.GUID128("01972f5b-4d56-7d1c-811c-b855bfdb5dcb") ); //instrument.newHardLink( GUIDs.GUID128("01972f5a-7edc-79bd-9655-ea50ae5b0887"), GUIDs.GUID128("01972f59-4049-77fc-827f-a9976425c01c") ); } private void testGet( TaskInstrument instrument ){ //Debug.trace( instrument.queryGUIDByPath( "规则1/很好的服务/我的世界" ) ); //Debug.trace( instrument.getPath(GUIDs.GUID72( "03c4a36-000381-0000-48" ) ) ); //Debug.trace( instrument.get( GUIDs.GUID72("03e60e8-0000ae-0000-20") ) ); //Debug.trace( instrument.get( GUIDs.GUID72("03e60e8-0000c5-0000-48") ) ); //Debug.trace( instrument.get( GUIDs.GUID72("03e60e8-000117-0000-18") ) ); // Debug.trace( instrument.get( GUIDs.GUID72( "02be396-0001e9-0000-e4" ) ) ); //Debug.trace( instrument.affirmApplication( "Test1/App1" ) ); // Debug.trace( instrument.affirmService( "root/特殊服务" ) ); // Debug.trace( instrument.affirmApplication( "root/species/orc" ) ); // Debug.trace( instrument.affirmNamespace("root/species") ); // Debug.trace( instrument.affirmNamespace( "root" ).fetchChildren() ); // // instrument.affirmApplication( "root/species/orc" ).addChild( new GenericServiceElement( new JSONMaptron( "{ name: slaughter }" ) ) ); // // Debug.trace( instrument.affirmApplication( "root/species/orc" ).fetchChildren() ); // // Debug.trace( instrument.queryElement( "root/species/orc/slaughter" ).toJSONObject() ); // // instrument.affirmNamespace( "root" ).addChild( new GenericNamespace( new JSONMaptron( "{ name: weapon, scenario: s1, description: d1, level:L1, primaryImplLang:Java }" ) ) ); // // Debug.fmp( 2, instrument.queryElement( "root/weapon" ).evinceNamespace().toJSONDetails() ); TaskJSONDecoder decoder = new TaskJSONDecoder( instrument ); decoder.decode( new JSONMaptron( "{ root: { test: { job: { metaType: AppElement, type:SysJob, tasks: { task: { metaType: TaskElement, type: SparkTask } } } } } }" ) ); Debug.fmp( 2, instrument.queryElement( "root" ).toJSONObject() ); // GUID128 guid128 = GUIDs.GUID128("019714af-e0ec-7f2a-94a3-cd740efccb6c"); // Debug.trace(instrument.getPath( GUIDs.GUID128("019714af-e0ec-7f2a-94a3-cd740efccb6c") )); } private void testDelete( TaskInstrument instrument ) { instrument.remove( GUIDs.GUID128("181e9e4-000395-0000-d4") ); } private void testInstance( TaskInstrument instrument ) { InstanceInstrument instanceInstrument = instrument.getInstanceInstrument(); GUID taskGuid = instrument.queryGUIDByPath( "root/test/job/task" ); //InstanceEntry instanceEntry = new GenericInstanceEntry( new JSONMaptron( "{priority:456, taskType: Spark, instanceName: test123}" ) ); //instanceInstrument.addInstance( taskGuid, instanceEntry ); //Debug.fmp( 2, instanceEntry ); // TaskElement taskElement = (TaskElement) instrument.queryElement( "root/test/job/task" ); // taskElement.setImagePath( "uofs:///sys/public/global/exe/images/hola/senorita/image_c" ); // instrument.update( taskElement ); InstanceEntry instanceEntry = instanceInstrument.makeInstanceEntry( taskGuid ); Debug.fmp( 2, instanceEntry ); } private void testInstanceLaunch( TaskInstrument instrument ) throws Exception { WolfMCServer wolfKing = new WolfMCServer( "", this, new JSONMaptron("{host: \"0.0.0.0\",\n" + "port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}") ); CollectiveTaskRegiment regiment = new RavenCollectiveTaskRegiment( this, (CentralizedTaskInstrument) instrument, wolfKing ); regiment.startRemoteProcessServer(); UlfClient ulfClient = new WolfMCClient( this.getSystemGuidAllocator72().nextGUIDi64(), "", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( "Messagers.Messagers.WolfMCKingpin" ) ); CollectiveTaskLegionary regimentClient = new RavenCollectiveTaskLegionary( "jesus", this, ulfClient ); regimentClient.startService(); regimentClient.joinRegiment(); regimentClient.remoteProcessManagerClient().addProcessLifecycleHandler(new ProcessLifecycleHandler() { @Override public void fired( String imageAddress, EntryPointRunnable runnable, ProcessEvent event ) { Debug.greenfs( imageAddress, event ); } }); // TaskExecutionLauncher launcher = regiment.taskExecutionLauncher(); // GUID taskGuid = instrument.queryGUIDByPath( "root/test/job/task" ); // RavenTask task = regiment.getTaskByGuid( taskGuid ); // RavenTaskInstance instance = task.createInstance(); //ProcessManager manager = this.processManager(); ProcessManager manager = regimentClient.processManager(); ProcessEventHandler eventHandler = new ProcessEventHandler() { @Override public void fired(EntryPointRunnable runnable, ProcessEvent event ) { Debug.bluef( runnable, event ); } }; ExecutionImage image = new LocalHostedClassImage( "image_c", new ArchEntryPointRunnable( eventHandler ) { @Override public int main( Map args ) { Debug.greenfs( "Hello, hi, I am `" + this.ownedProcess().getName() + "`!" ); Debug.sleep( 1000 ); Debug.greenfs( "Miao~" ); //throw new IrrationalProvokedException(); return 1984; } }, manager ); manager.getImageLoader().registerLocalScopeExecutionImage( "hola/senorita", image ); LaunchFeature feature = new LaunchFeature(); //UProcess uProcess = launcher.createLocally( instance, feature ); //UProcess uProcess = launcher.createRemotely( instance, client.getClientId(), feature ); //uProcess.start(); //launcher.launchRemotely( instance, client.getClientId(), feature ); //launcher.launchLocally( instance, feature ); // // Test processor // GenericTaskProcessorEntity processorEntity = new GenericTaskProcessorEntity( // new JSONMaptron("{name:r1, clusterPath:'/r1', clusterName: 'r1', local: false, priority: 100, queueMeta: {" + // "name: r1_q, maxCapacity: 100, minCapacity: 100, runtimeInstanceCapacity: 50}}" + // "}}") // ); // processorEntity.setControlClientId( client.getClientId() ); // // TaskExecutionProcessor processor = new RavenTaskExecutionProcessor(processorEntity, launcher); // processor.pipeLaunch( List.of( TaskLaunchContext.of( feature, instance ) ) ); //TaskDispatcher taskDispatcher = regiment.taskDispatcher(); //UProcess process = taskDispatcher.create( instance, feature ); LaunchedContext context = regiment.create( "root/test/job/task", feature ); Debug.greenfs( context.getProcess().getPID() ); } } public class TestTaskTree { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ Randy Jesse = (Randy) Pinecone.sys().getTaskManager().add( new Randy( args, Pinecone.sys() ) ); Jesse.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-core-console/src/test/java/com/sparta/TestUOFS.java ================================================ package com.sparta; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.framework.util.json.JSONObject; import com.pinecone.hydra.file.ibatis.hydranium.FileMappingDriver; import com.pinecone.hydra.storage.file.FileSystemConfig; import com.pinecone.hydra.storage.file.KernelFileSystemConfig; import com.pinecone.hydra.storage.file.UniformObjectFileSystem; import com.pinecone.hydra.storage.file.external.GenericNativeExternalFolder; import com.pinecone.hydra.storage.file.external.KenExternalFileSystemInstrument; import com.pinecone.hydra.storage.file.entity.ClusterPage; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.io.TitanFileChannelChanface; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.builder.ComponentUOFSBuilder; import com.pinecone.hydra.storage.file.builder.UOFSBuilder; import com.pinecone.hydra.storage.file.entity.FSNodeAllotment; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.file.transmit.exporter.TitanFileExportEntity64; import com.pinecone.hydra.storage.file.transmit.receiver.TitanFileReceiveEntity64; import com.pinecone.hydra.storage.volume.KernelVolumeConfig; import com.pinecone.hydra.storage.volume.UniformVolumeManager; import com.pinecone.hydra.storage.volume.VolumeConfig; import com.pinecone.hydra.storage.volume.VolumeManager; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.volume.ibatis.hydranium.VolumeMappingDriver; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.pinecone.framework.util.id.GuidAllocator; import com.pinecone.tritium.Tritium; import com.pinecone.ulf.util.guid.GUIDs; import com.pinecone.ulf.util.guid.i128.GuidAllocator128; import com.pinecone.ulf.util.guid.i128.GuidAllocator128V1; import com.pinecone.ulf.util.guid.i128.GuidAllocator128V2; import com.pinecone.ulf.util.guid.i128.GuidAllocator128V3; import com.pinecone.ulf.util.guid.i128.GuidAllocator128V4; import com.pinecone.ulf.util.guid.i128.GuidAllocator128V5; import com.pinecone.ulf.util.guid.i128.GuidAllocator128V6; import com.pinecone.ulf.util.guid.i128.GuidAllocator128V7; import com.pinecone.ulf.util.guid.i128.GUID128; import java.io.File; import java.io.IOException; import java.nio.channels.FileChannel; import java.nio.file.StandardOpenOption; class Steve extends Tritium { public Steve( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Steve( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { KOIMappingDriver koiMappingDriver = new FileMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); KOIMappingDriver koiVolumeMappingDriver = new VolumeMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); JSONObject jo = new JSONMaptron( "{ DefaultVolumeGuid:'1788a74-000136-0000-f8', DefaultTempFilePath: 'D:/文件系统/temp/' }" ); FileSystemConfig config = new KernelFileSystemConfig( jo ); VolumeConfig volumeConfig = new KernelVolumeConfig( jo ); UOFSBuilder builder = new ComponentUOFSBuilder( koiMappingDriver, config ); KOMFileSystem fileSystem = new UniformObjectFileSystem( koiMappingDriver, config ); // FileSystemCacheConfig cacheConfig = new MappedFileSystemCacheConfig(new JSONMaptron("{redisHost: \"47.115.216.203\",redisPort: 6379, redisTimeOut: 2000, redisPassword: 1234abcd, redisDatabase: 0}")); // KOMFileSystem fileSystem = builder.registerComponentor( new UOFSCacheComponentor(cacheConfig) ).buildByRegistered(); UniformVolumeManager volumeManager = new UniformVolumeManager(koiVolumeMappingDriver, volumeConfig); GuidAllocator guidAllocator = fileSystem.getGuidAllocator(); //Debug.trace( fileSystem.get( GUIDs.GUID72( "020c8b0-000006-0002-54" ) ) ); this.testInsert( fileSystem ); //this.testUpload(fileSystem); //this.testDelete( fileSystem ); //this.testChannelReceive( fileSystem, volumeManager ); //this.testChannelExport( fileSystem, volumeManager ); //this.testQuery( fileSystem ); //this.testExternal( fileSystem ); //this.testCopy( fileSystem,volumeManager ); //this.testClusterPage( fileSystem ); // GuidAllocator128 guidAllocator128 = new GuidAllocator128V1(); // Debug.trace("Guid128V1:" + guidAllocator128.nextGUID() ); // guidAllocator128 = new GuidAllocator128V2(); // Debug.trace("Guid128V2:" + guidAllocator128.nextGUID() ); // guidAllocator128 = new GuidAllocator128V3(); // Debug.trace("Guid128V3:" + guidAllocator128.nextGUID() ); // guidAllocator128 = new GuidAllocator128V4(); // Debug.trace("Guid128V4:" + guidAllocator128.nextGUID() ); // guidAllocator128 = new GuidAllocator128V5(); // Debug.trace("Guid128V5:" + guidAllocator128.nextGUID() ); // guidAllocator128 = new GuidAllocator128V6(); // Debug.trace("Guid128V6:" + guidAllocator128.nextGUID() ); // guidAllocator128 = new GuidAllocator128V7(); // GUID128 g = (GUID128) guidAllocator128.nextGUID(); // Debug.trace("Guid128V7:" + g, g.toUUID() ); // Debug.trace( guidAllocator128.parse("00000000-0000-0000-0000-000000000000") ); } private void testQuery ( KOMFileSystem fileSystem ){ Debug.trace( fileSystem.queryGUIDByPath("我的文件/总2127.mp4") ); } private void testInsert( KOMFileSystem fileSystem ){ fileSystem.affirmFolder("game/我的世界"); fileSystem.affirmFileNode("game/我的世界/村民"); fileSystem.affirmFileNode("game/我的世界/暮色森林/暮色惡魂"); fileSystem.affirmFileNode("game/泰拉瑞亚/腐化之地/世界吞噬者"); fileSystem.affirmFileNode("movie/生还危机/浣熊市"); } private void testCopy(KOMFileSystem fileSystem, VolumeManager volumeManager) { // fileSystem.copy("我的文件/图片","我的文件/我的文件",volumeManager); FileNode fileNode = fileSystem.getFileNode(GUIDs.GUID128("14bc124-00012c-0004-f8")); Debug.trace( fileNode.getPath() ); } private void testExternal(KOMFileSystem fileSystem){ KenExternalFileSystemInstrument directFileSystemAccess = new KenExternalFileSystemInstrument(fileSystem); // GenericExternalSymbolic externalSymbolic = new GenericExternalSymbolic(); // externalSymbolic.setName("xxx"); // externalSymbolic.setGuid( fileSystem.getGuidAllocator().nextGUID() ); // directFileSystemAccess.insertExternalSymbolic( externalSymbolic ); ElementNode e = fileSystem.queryElement( "red" ); //e.evinceFolder().createExternalSymbolic( "external" ); // ExternalFile externalFile = (GenericExternalFile)directFileSystemAccess.queryElement("我的文件/external/《智育》概要设计.docx"); // Debug.trace(externalFile.getPath()); GenericNativeExternalFolder externalFolder = new GenericNativeExternalFolder(new File("D:/文件")); Debug.trace(externalFolder.getName()); Debug.trace(externalFolder.toJSONString()); } private void testDelete(KOMFileSystem fileSystem ){ fileSystem.remove( "game" ); fileSystem.remove( "movie" ); } private void testChannelReceive( KOMFileSystem fileSystem, UniformVolumeManager volumeManager ) throws IOException { //LogicVolume volume = volumeManager.get(GUIDs.GUID72( "09d62c0-00037e-0006-c8" )); FSNodeAllotment fsNodeAllotment = fileSystem.getFSNodeAllotment(); File file = new File("D:/井盖视频块/我的视频.mp4"); FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.READ); TitanFileChannelChanface titanFileChannelKChannel = new TitanFileChannelChanface( channel ); FileNode fileNode = fsNodeAllotment.newFileNode(); fileNode.setDefinitionSize( file.length() ); fileNode.setName( file.getName() ); String destDirPath = "D:/井盖视频块/我的视频.mp4"; TitanFileReceiveEntity64 receiveEntity = new TitanFileReceiveEntity64( fileSystem, destDirPath, fileNode,titanFileChannelKChannel,volumeManager ); fileSystem.receive( receiveEntity ); } private void testChannelExport( KOMFileSystem fileSystem, UniformVolumeManager volumeManager ) throws IOException { FileNode fileNode = (FileNode) fileSystem.get(fileSystem.queryGUIDByPath("D:/井盖视频块/我的视频.mp4")); File file = new File("D:\\文件系统\\大文件\\我的视频.mp4"); FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND); TitanFileChannelChanface kChannel = new TitanFileChannelChanface( channel ); TitanFileExportEntity64 exportEntity = new TitanFileExportEntity64( fileSystem, volumeManager, fileNode, kChannel ); fileSystem.export( exportEntity ); } private void testClusterPage( KOMFileSystem fileSystem ){ ClusterPage clusterPage = fileSystem.fetchClustersByFileGuid( GUIDs.GUID128( "1632d6e-0001de-0003-e4" ) ); long sum = clusterPage.getClusters(); for ( long i = 0; i < sum; ++i ) { Debug.trace( clusterPage.getLocalCluster( i ) ); } } } public class TestUOFS { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ Steve Steve = (Steve) Pinecone.sys().getTaskManager().add( new Steve( args, Pinecone.sys() ) ); Steve.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-core-console/src/test/java/com/sparta/TestVolume.java ================================================ package com.sparta; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.file.ibatis.hydranium.FileMappingDriver; import com.pinecone.hydra.storage.io.TitanFileChannelChanface; import com.pinecone.hydra.storage.io.TitanInputStreamChanface; import com.pinecone.hydra.storage.io.TitanOutputStreamChanface; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.UniformObjectFileSystem; import com.pinecone.hydra.storage.volume.UnifiedTransmitConstructor; import com.pinecone.hydra.storage.volume.UniformVolumeManager; import com.pinecone.hydra.storage.volume.entity.ExporterEntity; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.entity.MountPoint; import com.pinecone.hydra.storage.volume.entity.ReceiveEntity; import com.pinecone.hydra.storage.volume.entity.SimpleVolume; import com.pinecone.hydra.storage.volume.entity.SpannedVolume; import com.pinecone.hydra.storage.TitanStorageExportIORequest; import com.pinecone.hydra.storage.TitanStorageReceiveIORequest; import com.pinecone.hydra.storage.volume.entity.VolumeAllotment; import com.pinecone.hydra.storage.volume.entity.VolumeCapacity64; import com.pinecone.hydra.storage.volume.entity.local.LocalPhysicalVolume; import com.pinecone.hydra.storage.volume.entity.local.LocalSimpleVolume; import com.pinecone.hydra.storage.volume.entity.local.LocalSpannedVolume; import com.pinecone.hydra.storage.volume.entity.local.LocalStripedVolume; import com.pinecone.hydra.storage.volume.entity.local.simple.export.TitanSimpleExportEntity64; import com.pinecone.hydra.storage.volume.entity.local.simple.recevice.TitanSimpleReceiveEntity64; import com.pinecone.hydra.storage.volume.entity.local.spanned.export.TitanSpannedExportEntity64; import com.pinecone.hydra.storage.volume.entity.local.spanned.receive.SpannedReceiveEntity64; import com.pinecone.hydra.storage.volume.entity.local.spanned.receive.TitanSpannedReceiveEntity64; import com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.volume.ibatis.hydranium.VolumeMappingDriver; import com.pinecone.tritium.Tritium; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.pinecone.ulf.util.guid.GUIDs; import com.pinecone.framework.util.id.GuidAllocator; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.nio.channels.FileChannel; import java.nio.file.StandardOpenOption; import java.sql.SQLException; class Alice extends Tritium { public Alice( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public Alice( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { KOIMappingDriver koiMappingDriver = new VolumeMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); KOIMappingDriver koiFileMappingDriver = new FileMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); //KOMFileSystem fileSystem = new UniformObjectFileSystem( koiFileMappingDriver, null ); UniformVolumeManager volumeTree = new UniformVolumeManager( koiMappingDriver, null ); VolumeAllotment volumeAllotment = volumeTree.getVolumeAllotment(); //this.testSimpleThread(); //this.testDirectReceive( volumeTree ); //this.testDirectExport( volumeTree ); //Debug.trace( volumeTree.queryGUIDByPath( "逻辑卷三/逻辑卷一" ) ); //volumeTree.get( GUIDs.GUID72( "05e44c4-00022b-0006-20" ) ).build(); this.testStripedInsert( volumeTree ); //this.testSpannedInsert( volumeTree ); //this.testStripedReceive( volumeTree ); //this.testStripedExport( volumeTree ); //this.testHash( volumeTree ); //this.testSpannedReceive( volumeTree ); //this.testSpannedExport( volumeTree ); //this.testSimpleReceive( volumeTree ); //this.testSimpleExport( volumeTree ); //this.testConsumer( volumeTree ); } private void testStripedInsert( UniformVolumeManager volumeManager ) throws SQLException { VolumeAllotment volumeAllotment = volumeManager.getVolumeAllotment(); VolumeCapacity64 volumeCapacity1 = volumeAllotment.newVolumeCapacity(); volumeCapacity1.setDefinitionCapacity( 100*1024*1024 ); VolumeCapacity64 volumeCapacity2 = volumeAllotment.newVolumeCapacity(); volumeCapacity2.setDefinitionCapacity( 200*1024*1024 ); LocalPhysicalVolume physicalVolume1 = volumeAllotment.newLocalPhysicalVolume(); physicalVolume1.setType("PhysicalVolume"); physicalVolume1.setVolumeCapacity( volumeCapacity1 ); physicalVolume1.setName( "C" ); MountPoint mountPoint1 = volumeAllotment.newMountPoint(); mountPoint1.setMountPoint("D:/文件系统/簇1"); physicalVolume1.setMountPoint( mountPoint1 ); LocalPhysicalVolume physicalVolume2 = volumeAllotment.newLocalPhysicalVolume(); physicalVolume2.setType("PhysicalVolume"); physicalVolume2.setVolumeCapacity( volumeCapacity2 ); physicalVolume2.setName( "D" ); MountPoint mountPoint2 = volumeAllotment.newMountPoint(); mountPoint2.setMountPoint( "D:/文件系统/簇2" ); physicalVolume2.setMountPoint( mountPoint2 ); VolumeCapacity64 logicVolumeCapacity1 = volumeAllotment.newVolumeCapacity(); logicVolumeCapacity1.setDefinitionCapacity( 100*1024*1024 ); VolumeCapacity64 logicVolumeCapacity2 = volumeAllotment.newVolumeCapacity(); logicVolumeCapacity2.setDefinitionCapacity( 200*1024*1024 ); VolumeCapacity64 logicVolumeCapacity3 = volumeAllotment.newVolumeCapacity(); logicVolumeCapacity3.setDefinitionCapacity( 300*1024*1024 ); volumeManager.insertPhysicalVolume( physicalVolume1 ); volumeManager.insertPhysicalVolume( physicalVolume2 ); LocalSimpleVolume simpleVolume1 = volumeAllotment.newLocalSimpleVolume(); simpleVolume1.setName( "简单卷一" ); simpleVolume1.setType( "SimpleVolume" ); simpleVolume1.setVolumeCapacity( logicVolumeCapacity1 ); LocalSimpleVolume simpleVolume2 = volumeAllotment.newLocalSimpleVolume(); simpleVolume2.setName( "简单卷二" ); simpleVolume2.setVolumeCapacity( logicVolumeCapacity2 ); simpleVolume2.setType( "SimpleVolume" ); LocalStripedVolume stripedVolume = volumeAllotment.newLocalStripedVolume(); stripedVolume.setName( "条带卷" ); stripedVolume.setVolumeCapacity( logicVolumeCapacity3 ); stripedVolume.setType( "StripedVolume" ); simpleVolume1.build(); simpleVolume2.build(); stripedVolume.build(); simpleVolume1.extendLogicalVolume( physicalVolume1.getGuid() ); simpleVolume2.extendLogicalVolume( physicalVolume2.getGuid() ); stripedVolume.storageExpansion( simpleVolume1.getGuid() ); stripedVolume.storageExpansion( simpleVolume2.getGuid() ); //stripedVolume.storageExpansion( GUIDs.GUID72("0a21870-000251-0006-f0") ); } private void testSpannedInsert( UniformVolumeManager volumeManager ) throws SQLException { VolumeAllotment volumeAllotment = volumeManager.getVolumeAllotment(); VolumeCapacity64 volumeCapacity1 = volumeAllotment.newVolumeCapacity(); volumeCapacity1.setDefinitionCapacity( 300*1024*1024 ); VolumeCapacity64 volumeCapacity2 = volumeAllotment.newVolumeCapacity(); volumeCapacity2.setDefinitionCapacity( 400*1024*1024 ); LocalPhysicalVolume physicalVolume1 = volumeAllotment.newLocalPhysicalVolume(); physicalVolume1.setType("PhysicalVolume"); physicalVolume1.setVolumeCapacity( volumeCapacity1 ); physicalVolume1.setName( "E" ); MountPoint mountPoint1 = volumeAllotment.newMountPoint(); mountPoint1.setMountPoint("D:/文件系统/簇4"); physicalVolume1.setMountPoint( mountPoint1 ); LocalPhysicalVolume physicalVolume2 = volumeAllotment.newLocalPhysicalVolume(); physicalVolume2.setType("PhysicalVolume"); physicalVolume2.setVolumeCapacity( volumeCapacity2 ); physicalVolume2.setName( "F" ); MountPoint mountPoint2 = volumeAllotment.newMountPoint(); mountPoint2.setMountPoint( "D:/文件系统/簇5" ); physicalVolume2.setMountPoint( mountPoint2 ); VolumeCapacity64 logicVolumeCapacity1 = volumeAllotment.newVolumeCapacity(); logicVolumeCapacity1.setDefinitionCapacity( 300*1024*1024 ); VolumeCapacity64 logicVolumeCapacity2 = volumeAllotment.newVolumeCapacity(); logicVolumeCapacity2.setDefinitionCapacity( 400*1024*1024 ); VolumeCapacity64 logicVolumeCapacity3 = volumeAllotment.newVolumeCapacity(); logicVolumeCapacity3.setDefinitionCapacity( 700*1024*1024 ); LocalSimpleVolume simpleVolume1 = volumeAllotment.newLocalSimpleVolume(); simpleVolume1.setName( "简单卷四" ); simpleVolume1.setType( "SimpleVolume" ); simpleVolume1.setVolumeCapacity( logicVolumeCapacity1 ); LocalSimpleVolume simpleVolume2 = volumeAllotment.newLocalSimpleVolume(); simpleVolume2.setName( "简单卷五" ); simpleVolume2.setVolumeCapacity( logicVolumeCapacity2 ); simpleVolume2.setType( "SimpleVolume" ); LocalSpannedVolume spannedVolume = volumeAllotment.newLocalSpannedVolume(); spannedVolume.setName( "跨区卷" ); spannedVolume.setVolumeCapacity( logicVolumeCapacity3 ); spannedVolume.setType( "spannedVolume" ); volumeManager.insertPhysicalVolume( physicalVolume1 ); volumeManager.insertPhysicalVolume( physicalVolume2 ); simpleVolume1.build(); simpleVolume2.build(); simpleVolume1.extendLogicalVolume( physicalVolume1.getGuid() ); simpleVolume2.extendLogicalVolume( physicalVolume2.getGuid() ); spannedVolume.storageExpansion( simpleVolume1.getGuid() ); spannedVolume.storageExpansion( simpleVolume2.getGuid() ); spannedVolume.build(); } void testStripedReceive( UniformVolumeManager volumeManager ) throws IOException { GuidAllocator guidAllocator = volumeManager.getGuidAllocator(); LogicVolume volume = volumeManager.get(volumeManager.queryGUIDByPath("条带卷")); TitanStorageReceiveIORequest titanReceiveStorageObject = new TitanStorageReceiveIORequest(); File file = new File("D:/井盖视频块/我的视频.mp4"); titanReceiveStorageObject.setName( "我的视频" ); titanReceiveStorageObject.setSize( file.length() ); titanReceiveStorageObject.setStorageObjectGuid( guidAllocator.nextGUID() ); FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.READ); TitanFileChannelChanface kChannel = new TitanFileChannelChanface( channel ); // FileInputStream stream = new FileInputStream( file ); // TitanInputStreamChanface kChannel = new TitanInputStreamChanface(stream); UnifiedTransmitConstructor unifiedTransmitConstructor = new UnifiedTransmitConstructor(); ReceiveEntity entity = unifiedTransmitConstructor.getReceiveEntity(volume.getClass(), volumeManager, titanReceiveStorageObject, kChannel, volume); //TitanStripedReceiveEntity64 receiveEntity = new TitanStripedReceiveEntity64( volumeManager, titanReceiveStorageObject, kChannel, (StripedVolume) volume); volume.receive( entity ); //StorageIOResponse storageIOResponse = volume.channelReceive(titanReceiveStorageObject, titanKChannel); } void testSpannedReceive( UniformVolumeManager volumeManager ) throws IOException { GuidAllocator guidAllocator = volumeManager.getGuidAllocator(); LogicVolume volume = volumeManager.get(volumeManager.queryGUIDByPath("跨区卷")); TitanStorageReceiveIORequest titanReceiveStorageObject = new TitanStorageReceiveIORequest(); File file = new File("D:/井盖视频块/我的视频.mp4"); titanReceiveStorageObject.setName( "视频" ); titanReceiveStorageObject.setSize( file.length() ); titanReceiveStorageObject.setStorageObjectGuid( guidAllocator.nextGUID() ); FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.READ); TitanFileChannelChanface kChannel = new TitanFileChannelChanface( channel ); SpannedReceiveEntity64 receiveEntity = new TitanSpannedReceiveEntity64( volumeManager, titanReceiveStorageObject, kChannel, (SpannedVolume) volume); volume.receive( receiveEntity ); } void testSimpleReceive( UniformVolumeManager volumeManager ) throws IOException { GuidAllocator guidAllocator = volumeManager.getGuidAllocator(); LogicVolume volume = volumeManager.get(GUIDs.GUID128("12146c0-0000ca-0000-8c")); TitanStorageReceiveIORequest titanReceiveStorageObject = new TitanStorageReceiveIORequest(); File file = new File("C:/Users/29796/OneDrive/图片/R-C.jpg"); titanReceiveStorageObject.setName( "视频" ); titanReceiveStorageObject.setSize( file.length() ); titanReceiveStorageObject.setStorageObjectGuid( guidAllocator.nextGUID() ); // FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.READ); // TitanFileChannelKChannel kChannel = new TitanFileChannelKChannel(channel); FileInputStream fileInputStream = new FileInputStream( file ); TitanInputStreamChanface kChannel = new TitanInputStreamChanface( fileInputStream ); TitanSimpleReceiveEntity64 receiveEntity = new TitanSimpleReceiveEntity64( volumeManager, titanReceiveStorageObject, kChannel, (SimpleVolume) volume); volume.randomReceive( receiveEntity,0,file.length() ); } void testStripedExport( UniformVolumeManager volumeManager ) throws Exception { File file = new File("D:\\文件系统\\大文件\\我的视频.mp4"); File originalFile = new File( "D:/井盖视频块/我的视频.mp4" ); FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND); TitanFileChannelChanface kChannel = new TitanFileChannelChanface( channel ); // FileOutputStream stream = new FileOutputStream( file ); // TitanOutputStreamChanface kChannel = new TitanOutputStreamChanface(stream); LogicVolume volume = volumeManager.get(volumeManager.queryGUIDByPath("条带卷")); TitanStorageExportIORequest titanExportStorageObject = new TitanStorageExportIORequest(); Debug.trace(originalFile.length()); titanExportStorageObject.setSize( originalFile.length() ); titanExportStorageObject.setStorageObjectGuid( GUIDs.GUID128("0d96fa2-000013-0001-f0") ); //titanExportStorageObject.setSourceName("D:/文件系统/簇1/文件夹/视频_0662cf6-0000cd-0001-10.storage"); //volume.channelExport( titanExportStorageObject, titanFileChannelKChannel); UnifiedTransmitConstructor unifiedTransmitConstructor = new UnifiedTransmitConstructor(); ExporterEntity entity = unifiedTransmitConstructor.getExportEntity(volume.getClass(), volumeManager, titanExportStorageObject, kChannel, volume); //TitanStripedExportEntity64 exportEntity = new TitanStripedExportEntity64( volumeManager, titanExportStorageObject, kChannel, (StripedVolume) volume); volume.export( entity ); } void testSpannedExport( UniformVolumeManager volumeManager ) throws IOException { File file = new File("D:\\文件系统\\大文件\\我的视频.mp4"); File originalFile = new File( "D:/井盖视频块/我的视频.mp4" ); FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND); TitanFileChannelChanface kChannel = new TitanFileChannelChanface( channel ); LogicVolume volume = volumeManager.get(volumeManager.queryGUIDByPath("跨区卷")); TitanStorageExportIORequest titanExportStorageObject = new TitanStorageExportIORequest(); titanExportStorageObject.setSize( originalFile.length() ); titanExportStorageObject.setStorageObjectGuid( GUIDs.GUID128("0dc08ee-000129-0001-d0") ); //titanExportStorageObject.setSourceName("D:\\文件系统\\簇4\\视频_09ab8ac-0003d7-0001-04.storage"); TitanSpannedExportEntity64 exportEntity = new TitanSpannedExportEntity64( volumeManager, titanExportStorageObject, kChannel, (SpannedVolume) volume); volume.export( exportEntity ); } void testSimpleExport( UniformVolumeManager volumeManager ) throws IOException { File file = new File("D:\\文件系统\\大文件\\我的图片2.jpg"); File originalFile = new File( "C:/Users/29796/OneDrive/图片/R-C.jpg" ); // FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND); // TitanFileChannelKChannel kChannel = new TitanFileChannelKChannel(channel); LogicVolume volume = volumeManager.get(GUIDs.GUID128("12146c0-0000ca-0000-8c")); TitanStorageExportIORequest titanExportStorageObject = new TitanStorageExportIORequest(); titanExportStorageObject.setSize( originalFile.length() - 1024 * 200 ); titanExportStorageObject.setStorageObjectGuid( GUIDs.GUID128("1567f8c-000038-0006-ac") ); titanExportStorageObject.setSourceName( "D:/文件系统/簇1/R-C.jpg_1567f8c-000038-0006-ac.storage" ); FileOutputStream fileOutputStream = new FileOutputStream( file ); TitanOutputStreamChanface kChannel = new TitanOutputStreamChanface( fileOutputStream ); TitanSimpleExportEntity64 exportEntity = new TitanSimpleExportEntity64( volumeManager, titanExportStorageObject, kChannel,(SimpleVolume) volume); //volume.export( exportEntity,0,originalFile.length() - 1024 * 200 ); volume.export( exportEntity,originalFile.length() - 1024 * 200, 1024 * 200 ); } void testHash( UniformVolumeManager volumeManager ){ KenVolumeFileSystem kenVolumeFileSystem = new KenVolumeFileSystem(volumeManager); // for( int i = 0; i < 1000000; i++ ){ // GUID128 guid72 = GUIDs.Dummy128(); // int hash = kenVolumeFileSystem.hashStorageObjectID(guid72, 2); // if( hash != 0 && hash != 1 ){ // Debug.trace( guid72 ); // } // } Debug.trace( kenVolumeFileSystem.hashStorageObjectID( GUIDs.GUID128( "0860ff4-0003ac-0000-cc" ), 2 ) ); } void testConsumer( UniformVolumeManager volumeManager ) { LogicVolume volume = volumeManager.get(volumeManager.queryGUIDByPath("条带卷")); UnifiedTransmitConstructor unifiedTransmitConstructor = new UnifiedTransmitConstructor(); Debug.trace( volume.getClass() ); Debug.trace( unifiedTransmitConstructor.getReceiveEntity( volume.getClass() ) ); } } public class TestVolume { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ Alice Alice = (Alice) Pinecone.sys().getTaskManager().add( new Alice( args, Pinecone.sys() ) ); Alice.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-core-console/src/test/java/test.java ================================================ import org.junit.Test; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.ContextConfiguration; @SpringBootTest @ContextConfiguration(locations = { "classpath:uid/default-uid-spring.xml" }) public class test { @Test public void test(){ } } ================================================ FILE: Sparta/sparta-uac-console/pom.xml ================================================ sparta com.walnuts.sparta 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.walnut.sparta.account.console sparta-uac-console 2.1.0 UTF-8 junit junit 3.8.1 test com.pinecone pinecone 2.5.1 compile com.pinecone.hydra.kom.driver.default hydra-kom-default-driver 2.1.0 compile com.walnut.sparta.api.uac sparta-api-uac 2.1.0 compile com.pinecone.tritium hydra-system-tritium 2.1.0 test com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.pinecone.hydra.kernel hydra-framework-config 2.1.0 compile com.pinecone.summer.springram springram 2.1.0 compile com.pinecone.hydra.sdk.thrift hydra-lib-thrift-sdk 1.2.1 compile org.springframework.boot spring-boot-test test org.testng testng RELEASE test org.apache.thrift libthrift 0.18.0 compile io.jsonwebtoken jjwt-impl 0.11.2 runtime io.jsonwebtoken jjwt-jackson 0.11.2 runtime io.jsonwebtoken jjwt-api 0.11.2 ================================================ FILE: Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/SpartaAccountService.java ================================================ package com.walnut.sparta.account; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.servgram.Servgram; import com.pinecone.hydra.system.component.Slf4jTraceable; import com.pinecone.summer.spring.Springron; import java.io.IOException; import java.nio.file.Path; public class SpartaAccountService extends Springron implements Slf4jTraceable { public SpartaAccountService( String szName, Processum parent, String[] springbootArgs ) { super( szName, parent, springbootArgs ); this.mSpringKernel.setPrimarySources( SpartaBoot.class ); } public SpartaAccountService( String szName, Processum parent ) { this( szName, parent, new String[0] ); } @Override protected void loadConfig() { this.mServgramList = this.getAttachedOrchestrator().getSectionConfig().getChild( Servgram.ConfigServgramsKey ); Object dyServgramConf = this.mServgramList.get( this.gramName() ); if( dyServgramConf instanceof String ) { try{ this.mServgramConf = this.mServgramList.getChildFromPath( Path.of((String) dyServgramConf) ); } catch ( IOException ignore ) { this.getLogger().info( "[Notice] Spring will use the default config `application.yaml`." ); } } else { this.mServgramConf = this.mServgramList.getChild( this.gramName() ); } } } ================================================ FILE: Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/SpartaBoot.java ================================================ package com.walnut.sparta.account; import org.springframework.boot.autoconfigure.SpringBootApplication; @SpringBootApplication public class SpartaBoot { } ================================================ FILE: Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/api/controller/v2/AccountController.java ================================================ package com.walnut.sparta.account.api.controller.v2; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.JSON; import com.pinecone.hydra.account.AccountManager; import com.pinecone.hydra.account.entity.ACNodeAllotment; import com.pinecone.hydra.account.entity.Account; import com.pinecone.hydra.account.entity.Domain; import com.pinecone.hydra.account.entity.GenericAccount; import com.pinecone.hydra.account.entity.GenericAuthorization; import com.pinecone.hydra.account.entity.GenericCredential; import com.pinecone.hydra.account.entity.GenericDomain; import com.pinecone.hydra.account.entity.GenericGroup; import com.pinecone.hydra.account.entity.GenericPrivilege; import com.pinecone.hydra.account.entity.GenericRole; import com.pinecone.hydra.account.entity.Group; import com.pinecone.hydra.account.entity.Privilege; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.pinecone.ulf.util.guid.GUIDs; import com.walnut.sparta.account.domian.vo.UserLoginVO; import com.walnut.sparta.account.interceptor.RequiresAuthentication; import com.walnut.sparta.account.properties.JwtProperties; import com.walnut.sparta.account.util.JwtUtil; import com.walnut.sparta.account.api.response.BasicResultResponse; import com.walnut.sparta.account.domian.vo.AccountLoginVO; import org.springframework.beans.BeanUtils; import org.springframework.web.bind.annotation.*; import javax.annotation.Resource; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; @RestController @RequestMapping( "/api/v2/account" ) @CrossOrigin public class AccountController { @Resource private AccountManager primaryAccount; private JwtProperties jwtProperties; public AccountController(JwtProperties jwtProperties) { this.jwtProperties = jwtProperties; } @PutMapping("/create/domain") @RequiresAuthentication public BasicResultResponse createDomain( @RequestParam("doMainName") String doMainName ){ ACNodeAllotment allotment = this.primaryAccount.getAllotment(); Domain domain = allotment.newDomain(); domain.setName( doMainName ); this.primaryAccount.put(domain); return BasicResultResponse.success(); } @PutMapping("/update/domain") @RequiresAuthentication public BasicResultResponse updateDomain( @RequestParam("guid") String guid, @RequestParam("name") String name) { // 查询域是否存在 GenericDomain domain = new GenericDomain(); domain.setGuid(GUIDs.GUID128(guid)); domain.setName(name); // 更新域名称 this.primaryAccount.updateDomain(domain); // 保存更新 return BasicResultResponse.success(true); } @DeleteMapping("remove/domain") @RequiresAuthentication public BasicResultResponse removeDomain( @RequestParam("domainGuid") String domainGuid ){ Collection children = this.primaryAccount.getChildren( GUIDs.GUID128(domainGuid)); for (TreeNode treeNode : children) { this.primaryAccount.remove(treeNode.getGuid()); } this.primaryAccount.remove( GUIDs.GUID128(domainGuid) ); return BasicResultResponse.success(); } @PutMapping("/create/group") @RequiresAuthentication public BasicResultResponse createGroup( @RequestParam("parentGuid") String parentGuid, @RequestParam("groupName") String groupName ){ GenericGroup genericGroup = new GenericGroup(); genericGroup.setName(groupName); this.primaryAccount.put( genericGroup ); this.primaryAccount.addChildren(GUIDs.GUID128(parentGuid), genericGroup.getGuid() ); return BasicResultResponse.success(); } @DeleteMapping("/remove/group") @RequiresAuthentication public BasicResultResponse removeGroup( @RequestParam("groupGuid") String groupGuid ){ Collection children = this.primaryAccount.getChildren(GUIDs.GUID128(groupGuid)); System.out.println(children.isEmpty()); System.out.println(groupGuid); if (children.isEmpty()) { this.primaryAccount.remove(GUIDs.GUID128(groupGuid)); return BasicResultResponse.success("删除成功"); } return BasicResultResponse.error("Group is not empty"); } @PutMapping("/update/group") @RequiresAuthentication public BasicResultResponse updateGroup( @RequestParam("groupGuid") String groupGuid, @RequestParam("groupName") String groupName) { // 查询组是否存在 Group group = this.primaryAccount.queryGroupByGroupGuid(GUIDs.GUID128(groupGuid)); if (group != null) { group.setName(groupName); // 更新组名称 this.primaryAccount.updateGroup(group); // 保存更新 return BasicResultResponse.success(true); } else { return BasicResultResponse.error("Group not found"); } } @GetMapping("/query/users/byGroup") @RequiresAuthentication public String queryUsersByGroup(@RequestParam("groupGuid") String groupGuid) { List accounts = new ArrayList<>(); Collection guids = this.primaryAccount.fetchChildrenGuids(GUIDs.GUID128(groupGuid)); for (GUID guid : guids) { accounts.add((GenericAccount) this.primaryAccount.queryAccountByUserGuid(guid)); } return BasicResultResponse.success(accounts).toJSONString(); } @GetMapping("/query/path") @RequiresAuthentication public String queryNodeByPath( @RequestParam("path") String path ){ GUID guid = this.primaryAccount.queryGUIDByPath(path); return BasicResultResponse.success(this.primaryAccount.get(guid)).toJSONString(); } @PutMapping("/create/account") @RequiresAuthentication public String createAccount( @RequestParam("userName") String userName, @RequestParam("nickName") String nickName, @RequestParam("kernelCredential") String kernelCredential, @RequestParam("kernelGroupType") String kernelGroupType, @RequestParam("role") String role, @RequestParam("parentGuid") String parentGuid) { LocalDateTime now = LocalDateTime.now(); DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); String formattedTime = now.format(formatter); System.out.println("Account created at: " + formattedTime); GenericAccount account = new GenericAccount(this.primaryAccount); account.setName(userName); System.out.println(account.getName()); if (this.primaryAccount.queryAccountGuidByName(account.getName()).isEmpty()) { account.setNickName(nickName); GenericCredential credential = new GenericCredential( this.primaryAccount.getGuidAllocator().nextGUID(), userName, kernelCredential, now, now, "TextPassword" ); this.primaryAccount.insertCredential(credential); account.setCredentialGuid(credential.getGuid()); account.setKernelCredential(kernelCredential); account.setKernelGroupType(kernelGroupType); account.setCreateTime(now); account.setUpdateTime(now); account.setRole(role); this.primaryAccount.put(account); List list = this.primaryAccount.queryAllRoles(); for (GenericRole roles : list) { if (roles.getName().equals(role)) { String[] privilegeGuids = roles.getPrivilegeGuids().split(","); for (String privilegeGuid : privilegeGuids) { GenericAuthorization authorization = new GenericAuthorization( account.getGuid(), account.getName(), credential.getGuid(), kernelGroupType, now, now ); authorization.setPrivilegeGuid(GUIDs.GUID128(privilegeGuid.trim())); // 去除可能的空格 authorization.setGuid(this.primaryAccount.getGuidAllocator().nextGUID()); this.primaryAccount.insertAuthorization(authorization); } break; } } this.primaryAccount.addChildren(GUIDs.GUID128(parentGuid), account.getGuid()); return BasicResultResponse.success(account).toJSONString(); } return BasicResultResponse.error("Account already exists").toJSONString(); } @PutMapping("/update/account") @RequiresAuthentication public BasicResultResponse updateAccount( @RequestParam("userGuid") String userGuid, @RequestParam("nickName") String nickName, @RequestParam("kernelCredential") String kernelCredential, @RequestParam("kernelGroupType") String kernelGroupType, @RequestParam("role") String role, @RequestParam("newUsername") String newUsername ) { LocalDateTime now = LocalDateTime.now(); Account account = this.primaryAccount.queryAccountByUserGuid(GUIDs.GUID128(userGuid)); if (account != null) { account.setNickName(nickName); account.setKernelCredential(kernelCredential); account.setKernelGroupType(kernelGroupType); account.setCreateTime(now); account.setUpdateTime(now); account.setRole(role); account.setName(newUsername); // 允许修改用户名 account.setGuid(GUIDs.GUID128(userGuid)); System.out.println(account); this.primaryAccount.updateAccount(account); } return BasicResultResponse.success(); } @DeleteMapping("/remove/account") @RequiresAuthentication public BasicResultResponse removeAccount( @RequestParam("userGuid") String userGuid ) { Account account=this.primaryAccount.queryAccountByUserGuid(GUIDs.GUID128(userGuid)); List authorizations = this.primaryAccount.queryAuthorizationByUserGuid(account.getGuid()); for (GenericAuthorization authorization : authorizations) { this.primaryAccount.remove(authorization.getGuid()); } this.primaryAccount.remove(account.getGuid()); return BasicResultResponse.success(true); } @PutMapping("/login") public String login( @RequestParam("userName") String userName, @RequestParam("kernelCredential") String kernelCredential ) { // 查询用户 GUID List userGuidList = this.primaryAccount.queryAccountGuidByName(userName); if (userGuidList == null || userGuidList.isEmpty()) { return BasicResultResponse.error("Account not found").toJSONString(); } GUID userGuid = userGuidList.get(0); // 用户名是唯一的 boolean isLogin = this.primaryAccount.queryAccountByGuid(userGuid, kernelCredential); if (!isLogin) { return BasicResultResponse.error("Account or kernelCredential error").toJSONString(); } // 用户登录成功,生成 JWT 令牌 Map claims = new HashMap<>(); claims.put("userId", userGuid.toString()); // 假设 userGuid 是用户的唯一标识 // 从配置文件中读取 JWT 配置 String userSecretKey = "1212121hsodhsdhasdhsaldhsalhdlsahdlsad"; // 替换为实际的密钥 long userTtl = 3600000; // 替换为实际的过期时间 System.out.println(this.jwtProperties.getUserSecretKey()); System.out.println("User Secret Key: " + userSecretKey); System.out.println("User TTL: " + userTtl); String token = JwtUtil.createJWT(userSecretKey, userTtl, claims); UserLoginVO userLoginVo = new UserLoginVO(); userLoginVo.setUserid(userGuid.toString()); userLoginVo.setUserName(userName); userLoginVo.setUserToken(token); System.out.println(userLoginVo); return BasicResultResponse.success(userLoginVo).toJSONString(); } @GetMapping("/query/allAccount") @RequiresAuthentication public BasicResultResponse queryAllAccount() { List accounts = this.primaryAccount.queryAllAccount(); return BasicResultResponse.success(accounts.toString()); } @PutMapping("/query/Authorization/ByUserName") @RequiresAuthentication public String queryAuthorizationByUserName( @RequestParam("userName") String userName ) { List userGuidList =this.primaryAccount.queryAccountGuidByName(userName); if (userGuidList.isEmpty()) { return BasicResultResponse.error("Account not found").toJSONString(); } GUID userGuid = userGuidList.get(0); // 假设用户名是唯一的 List authorizations = this.primaryAccount.queryAuthorizationByUserGuid(userGuid); return BasicResultResponse.success(authorizations).toJSONString(); } @GetMapping("/query/domain") @RequiresAuthentication public BasicResultResponse queryDomain() { List domains = this.primaryAccount.queryAllDomain(); return BasicResultResponse.success(domains.toString()); } @GetMapping("/query/account") @RequiresAuthentication public BasicResultResponse queryAccount( @RequestParam("userName") String userName) { List userGuidList =this.primaryAccount.queryAccountGuidByName(userName); System.out.println(userGuidList); if (userGuidList.isEmpty()) { return BasicResultResponse.error("Account not found"); } GUID userGuid = userGuidList.get(0); // 假设用户名是唯一的 GenericAccount account = (GenericAccount) this.primaryAccount.get(userGuid); AccountLoginVO accountLoginVo = new AccountLoginVO(); BeanUtils.copyProperties(account,accountLoginVo); return BasicResultResponse.success(accountLoginVo.toJSONString()); } @GetMapping("/query/domain/groups") @RequiresAuthentication public BasicResultResponse queryDomainGroups( @RequestParam("domainGuid") String domainGuid) { try { Collection children = this.primaryAccount.getChildren(GUIDs.GUID128(domainGuid)); List> groups = new ArrayList<>(); for (TreeNode child : children) { if (child instanceof Group) { Group group = this.primaryAccount.queryGroupByGroupGuid(child.getGuid()); Map groupInfo = new HashMap<>(); groupInfo.put("domainName", this.primaryAccount.queryDomainNameByGuid(GUIDs.GUID128(domainGuid))); System.out.println(this.primaryAccount.queryDomainNameByGuid(GUIDs.GUID128(domainGuid))); groupInfo.put("groupName", group.getName()); groupInfo.put("groupGuid", group.getGuid().toString()); groups.add(groupInfo); } } return BasicResultResponse.success(JSON.stringify(groups)); } catch (Exception e) { return BasicResultResponse.error("Failed to query groups: " + e.getMessage()); } } @GetMapping("/query/group") @RequiresAuthentication public BasicResultResponse queryDomainGroup( @RequestParam("domainGuid") String domainGuid ){ Collection children = this.primaryAccount.getChildren(GUIDs.GUID128(domainGuid)); List groups = new ArrayList<>(); for (TreeNode child : children) { if (child instanceof Group) { groups.add( this.primaryAccount.queryGroupByGroupGuid(child.getGuid())); } } return BasicResultResponse.success(groups.toString()); } @PutMapping("/create/privilege") @RequiresAuthentication public BasicResultResponse createPrivilege( @RequestParam("token") String token, @RequestParam("name") String name, @RequestParam("privilegeCode") String privilegeCode, @RequestParam("type") String type, @RequestParam(value = "parentPrivGuid", required = false) String parentPrivGuid) { System.out.println(token); GenericPrivilege privilege = new GenericPrivilege( this.primaryAccount.getGuidAllocator().nextGUID(), token, name, privilegeCode, LocalDateTime.now(), LocalDateTime.now(), type ); // 检查parentPrivGuid是否为空或空字符串 if (parentPrivGuid != null && !parentPrivGuid.isEmpty()) { privilege.setParentPrivGuid(GUIDs.GUID128(parentPrivGuid)); } else { privilege.setParentPrivGuid(null); } System.out.println(privilege.getParentPrivGuid()); this.primaryAccount.insertPrivilege(privilege); return BasicResultResponse.success(); } @PutMapping("/update/privilege") @RequiresAuthentication public String updatePrivilege( @RequestParam("guid") String guid, @RequestParam("name") String name, @RequestParam("token") String token, @RequestParam("type") String type, @RequestParam("privilegeCode") String privilegeCode) { Privilege privilege = this.primaryAccount.queryPrivilegeByGuid(GUIDs.GUID128(guid)); if (privilege != null) { privilege.setName(name); privilege.setToken(token); privilege.setType(type); privilege.setPrivilegeCode(privilegeCode); this.primaryAccount.updatePrivilege(privilege); return BasicResultResponse.success(privilege).toJSONString(); } return BasicResultResponse.error("权限不存在").toJSONString(); } @DeleteMapping("/remove/privilege") @RequiresAuthentication public BasicResultResponse removePrivilege( @RequestParam("privilegeGuid") String privilegeGuid) { this.primaryAccount.removePrivilege(GUIDs.GUID128(privilegeGuid)); return BasicResultResponse.success(); } @GetMapping("/List/privilege") @RequiresAuthentication public String listPrivilege( ) { List privileges = this.primaryAccount.queryAllPrivileges(); return BasicResultResponse.success(privileges).toJSONString(); } @PutMapping("/create/role") @RequiresAuthentication public BasicResultResponse createRole( @RequestParam("roleName") String roleName, @RequestParam("roleType") String roleType, @RequestParam("privilegeGuids") String privilegeGuids) { GenericRole role = new GenericRole( roleName, privilegeGuids, LocalDateTime.now(), LocalDateTime.now(), roleType ); this.primaryAccount.insertRole(role); return BasicResultResponse.success(); } @PutMapping("/update/role") @RequiresAuthentication public BasicResultResponse updateRole( @RequestParam("roleName") String roleName, @RequestParam("roleType") String roleType, @RequestParam("privilegeGuids") String privilegeGuids) { GenericRole role = new GenericRole( roleName, privilegeGuids, LocalDateTime.now(), LocalDateTime.now(), roleType ); this.primaryAccount.updateRole(role); return BasicResultResponse.success(); } @GetMapping("/query/all/role") @RequiresAuthentication public String queryAllRole() { List roles = this.primaryAccount.queryAllRoles(); System.out.println(roles); GenericRole role = roles.get(0); System.out.println(role.getPrivilegeGuids()); return BasicResultResponse.success(roles).toJSONString(); } @PutMapping("/create/Authorization") @RequiresAuthentication public BasicResultResponse createAuthorization( @RequestParam("userName") String userName, @RequestParam("privilegeToken") String privilegeToken, @RequestParam("privilegeGuid") String privilegeGuids) { List userGuidList=this.primaryAccount.queryAccountGuidByName(userName); GUID userGuid= userGuidList.get(0); Account account = this.primaryAccount.queryAccountByUserGuid(userGuid); GUID credentialGuid = account.getCredentialGuid(); System.out.println(userGuid); if (userGuidList.isEmpty()) { return BasicResultResponse.error("Account not found"); } GenericAuthorization authorization = new GenericAuthorization( userGuid, userName, credentialGuid, privilegeToken, LocalDateTime.now(), LocalDateTime.now() ); authorization.setGuid(this.primaryAccount.getGuidAllocator().nextGUID()); authorization.setPrivilegeGuid(GUIDs.GUID128(privilegeGuids)); this.primaryAccount.insertAuthorization(authorization); return BasicResultResponse.success(); } @DeleteMapping("/delete/Authorization") @RequiresAuthentication public BasicResultResponse deleteAuthorization( @RequestParam ("authorizationGuid") String Guid) { this.primaryAccount.removeAuthorizationByGuid(GUIDs.GUID128(Guid)); return BasicResultResponse.success(); } @DeleteMapping("/remove/role") @RequiresAuthentication public BasicResultResponse removeRole( @RequestParam("id") int id) { this.primaryAccount.removeRole(id); return BasicResultResponse.success(); } @GetMapping("/query/Authorization") @RequiresAuthentication public String queryAuthorization( ) { List authorizations = this.primaryAccount.queryAllAuthorization(); return BasicResultResponse.success(authorizations).toJSONString(); } @PutMapping("/update/authorization") @RequiresAuthentication public BasicResultResponse updateAuthorization( @RequestParam("guid") String guid ) { try { // 更新授权信息的逻辑 this.primaryAccount.updateAuthorization(GUIDs.GUID128(guid)); return BasicResultResponse.success(); } catch (Exception e) { return BasicResultResponse.error("更新授权失败: " + e.getMessage()); } } } ================================================ FILE: Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/api/controller/xxx.java ================================================ package com.walnut.sparta.account.api.controller; public class xxx { } ================================================ FILE: Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/api/response/BasicResultResponse.java ================================================ package com.walnut.sparta.account.api.response; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.json.JSONEncoder; import org.springframework.http.HttpStatus; import java.io.Serializable; public class BasicResultResponse implements Pinenut, Serializable { private Integer code = HttpStatus.OK.value(); private String msg; //错误信息 private T data; //数据 public static BasicResultResponse success() { BasicResultResponse result = new BasicResultResponse<>(); result.code = HttpStatus.OK.value(); return result; } public static BasicResultResponse successMsg( String msg ) { BasicResultResponse result = new BasicResultResponse<>(); result.msg = msg; result.code = HttpStatus.OK.value(); return result; } public static BasicResultResponse success( T object ) { BasicResultResponse result = new BasicResultResponse<>(); result.data = object; result.code = HttpStatus.OK.value(); return result; } public static BasicResultResponse error( String msg ) { BasicResultResponse result = new BasicResultResponse<>(); result.msg = msg; result.code = HttpStatus.INTERNAL_SERVER_ERROR.value(); return result; } /** * 获取 * @return code */ public Integer getCode() { return this.code; } /** * 设置 * @param code */ public void setCode(Integer code) { this.code = code; } /** * 获取 * @return msg */ public String getMsg() { return this.msg; } /** * 设置 * @param msg */ public void setMsg(String msg) { this.msg = msg; } /** * 获取 * @return data */ public T getData() { return this.data; } /** * 设置 * @param data */ public void setData(T data) { this.data = data; } @Override public String toJSONString() { return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "code" , this.code ), new KeyValue<>( "msg" , this.msg ), new KeyValue<>( "data" , this.data ) } ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/config/WebConfig.java ================================================ package com.walnut.sparta.account.config; import com.walnut.sparta.account.interceptor.AuthenticationInterceptor; import org.springframework.context.annotation.Configuration; import org.springframework.web.servlet.config.annotation.InterceptorRegistry; import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; @Configuration public class WebConfig implements WebMvcConfigurer { @Override public void addInterceptors(InterceptorRegistry registry) { registry.addInterceptor(new AuthenticationInterceptor()) .addPathPatterns("/**") // 拦截所有路径 .excludePathPatterns("/api/v2/account/login"); // 排除登录接口 } } ================================================ FILE: Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/domian/vo/AccountLoginVO.java ================================================ package com.walnut.sparta.account.domian.vo; import com.pinecone.framework.system.prototype.Pinenut; public class AccountLoginVO implements Pinenut { private String userName; private String nickName; public AccountLoginVO(String userName, String nickName) { this.userName = userName; this.nickName = nickName; } public String getUserName() { return userName; } public void setUserName(String userName) { this.userName = userName; } public String getNickName() { return nickName; } public void setNickName(String nickName) { this.nickName = nickName; } public AccountLoginVO() { } } ================================================ FILE: Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/domian/vo/QueryallUserVO.java ================================================ package com.walnut.sparta.account.domian.vo; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.account.AccountManager; import com.pinecone.hydra.account.entity.Account; import com.pinecone.hydra.account.entity.ArchElementNode; import java.time.LocalDateTime; public class QueryallUserVO extends ArchElementNode implements Account{ protected long enumId; protected String name; protected GUID guid; protected String nickName; protected String kernelGroupType; protected String role; protected LocalDateTime createTime; protected LocalDateTime updateTime; public QueryallUserVO( String name, GUID guid, String nickName, String kernelGroupType, String role, LocalDateTime createTime, LocalDateTime updateTime ) { this.name = name; this.guid = guid; this.nickName = nickName; this.kernelGroupType = kernelGroupType; this.role = role; this.createTime = createTime; this.updateTime = updateTime; } public QueryallUserVO(){ super(); } public QueryallUserVO(AccountManager accountManager){ super(accountManager); } @Override public String getRole() { return this.role; } @Override public void setRole(String role) { this.role = role; } @Override public String getNickName() { return this.nickName; } @Override public void setNickName(String nickName) { this.nickName = nickName; } @Override public String getKernelCredential() { return null; } @Override public void setKernelCredential(String kernelCredential) { } @Override public GUID getCredentialGuid() { return null; } @Override public void setCredentialGuid(GUID credentialGuid) { } @Override public String getKernelGroupType() { return this.kernelGroupType; } @Override public void setKernelGroupType(String kernelGroupType) { this.kernelGroupType = kernelGroupType; } @Override public LocalDateTime getCreateTime() { return this.createTime; } @Override public void setCreateTime(LocalDateTime createTime) { this.createTime = createTime; } @Override public LocalDateTime getUpdateTime() { return this.updateTime; } @Override public void setUpdateTime(LocalDateTime updateTime) { this.updateTime = updateTime; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/domian/vo/UserLoginVO.java ================================================ package com.walnut.sparta.account.domian.vo; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; public class UserLoginVO { private String userid; private String userName; private String UserToken; public String getUserid() { return userid; } public void setUserid(String userid) { this.userid = userid; } public String getUserName() { return userName; } public void setUserName(String userName) { this.userName = userName; } public String getUserToken() { return UserToken; } public void setUserToken(String userToken) { UserToken = userToken; } public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } public String toString() { return this.toJSONString(); } } ================================================ FILE: Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/interceptor/AuthenticationInterceptor.java ================================================ package com.walnut.sparta.account.interceptor; import com.walnut.sparta.account.util.JwtUtil; import io.jsonwebtoken.Claims; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.web.method.HandlerMethod; import org.springframework.web.servlet.HandlerInterceptor; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.lang.reflect.Method; public class AuthenticationInterceptor implements HandlerInterceptor { private final Logger log = LoggerFactory.getLogger(AuthenticationInterceptor.class); private final String userSecretKey = "1212121hsodhsdhasdhsaldhsalhdlsahdlsad"; // 应与生成Token的密钥一致 @Override public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception { // 检查是否需要认证 if (!(handler instanceof HandlerMethod)) { return true; } HandlerMethod handlerMethod = (HandlerMethod) handler; Method method = handlerMethod.getMethod(); // 判断方法或类是否有@RequiresAuthentication注解 RequiresAuthentication classAnnotation = handlerMethod.getBeanType().getAnnotation(RequiresAuthentication.class); RequiresAuthentication methodAnnotation = method.getAnnotation(RequiresAuthentication.class); if (classAnnotation == null && methodAnnotation == null) { return true; // 无需认证 } // 获取Token String authHeader = request.getHeader("Authorization"); if (authHeader == null || !authHeader.startsWith("Bearer ")) { response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "未提供认证Token"); log.warn("Unauthorized access attempt: Missing or invalid Authorization header"); return false; } String token = authHeader.substring(7); // 打印 Token log.info("Received Token: {}", token); // 验证Token try { boolean isValid = JwtUtil.verifyToken(token, userSecretKey); if (!isValid) { response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "无效的Token"); log.warn("Unauthorized access attempt: Invalid token"); return false; } System.out.println("Token验证"+isValid); Claims claims = JwtUtil.parseJWT(userSecretKey,token ); request.setAttribute("userId", claims.get("userId")); } catch (Exception e) { response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Token验证失败:" + e.getMessage()); log.error("Unauthorized access attempt: Token verification failed", e); return false; } return true; } } /* public class AuthenticationInterceptor implements HandlerInterceptor { private final String userSecretKey = "1212121hsodhsdhasdhsaldhsalhdlsahdlsad"; // 应与生成Token的密钥一致 @Override public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception { // 检查是否需要认证 if (!(handler instanceof HandlerMethod)) { return true; } HandlerMethod handlerMethod = (HandlerMethod) handler; Method method = handlerMethod.getMethod(); // 判断方法或类是否有@RequiresAuthentication注解 RequiresAuthentication classAnnotation = handlerMethod.getBeanType().getAnnotation(RequiresAuthentication.class); RequiresAuthentication methodAnnotation = method.getAnnotation(RequiresAuthentication.class); if (classAnnotation == null && methodAnnotation == null) { return true; // 无需认证 } // 获取Token String authHeader = request.getHeader("Authorization"); if (authHeader == null || !authHeader.startsWith("Bearer ")) { response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "未提供认证Token"); return false; } String token = authHeader.substring(7); // 验证Token try { boolean isValid = JwtUtil.verifyToken(token, userSecretKey); if (!isValid) { response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "无效的Token"); return false; } // 可以解析Claims并设置到请求属性中,供后续使用 Claims claims = JwtUtil.parseJWT(token, userSecretKey); request.setAttribute("userId", claims.get("userId")); } catch (Exception e) { response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Token验证失败:" + e.getMessage()); return false; } return true; } }*/ ================================================ FILE: Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/interceptor/RequiresAuthentication.java ================================================ package com.walnut.sparta.account.interceptor; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Target({ElementType.METHOD, ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) public @interface RequiresAuthentication { } ================================================ FILE: Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/properties/JwtProperties.java ================================================ package com.walnut.sparta.account.properties; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.stereotype.Component; @Component @ConfigurationProperties(prefix = "jwt") public class JwtProperties { private long userTtl; private String userTokenName; private String userSecretKey; public JwtProperties() { } /** * 生成jwt令牌相关配置 */ public JwtProperties(String userSecretKey, long userTtl, String userTokenName) { this.userSecretKey = userSecretKey; this.userTtl = userTtl; this.userTokenName = userTokenName; } public String getUserSecretKey() { return userSecretKey; } public void setUserSecretKey(String userSecretKey) { this.userSecretKey = userSecretKey; } public long getUserTtl() { return userTtl; } public void setUserTtl(long userTtl) { this.userTtl = userTtl; } public String getUserTokenName() { return userTokenName; } public void setUserTokenName(String userTokenName) { this.userTokenName = userTokenName; } } ================================================ FILE: Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/rpc/thrift/AccountIfaceImpl.java ================================================ package com.walnut.sparta.account.rpc.thrift; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.account.AccountManager; import com.walnut.sparta.account.api.response.BasicResultResponse; import org.apache.thrift.TException; import org.springframework.stereotype.Component; import javax.annotation.Resource; @Component public class AccountIfaceImpl implements AccountIface.Iface { @Resource private AccountManager primaryAccount; @Override public String queryNodeByPath(String path) throws TException { GUID guid = this.primaryAccount.queryGUIDByPath(path); return BasicResultResponse.success(this.primaryAccount.get(guid)).toJSONString(); } } ================================================ FILE: Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/rpc/thrift/AccountRPCService.java ================================================ package com.walnut.sparta.account.rpc.thrift; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.hydra.thrift.server.MultiplexedServer; import org.springframework.stereotype.Component; import javax.annotation.PostConstruct; import javax.annotation.Resource; @Component public class AccountRPCService { @Resource private AccountIfaceImpl accountIfaceImpl; @PostConstruct public void init(){ MultiplexedServer multiplexedServer = new MultiplexedServer( new JSONMaptron("{host: \"0.0.0.0\",\n" + "port: 16701, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}") ); multiplexedServer.registerProcessor( "Account", new AccountIface.Processor<>(accountIfaceImpl) ); multiplexedServer.start(); } } ================================================ FILE: Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/util/JwtUtil.java ================================================ package com.walnut.sparta.account.util; import io.jsonwebtoken.*; import io.jsonwebtoken.security.Keys; import io.jsonwebtoken.security.SignatureException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.crypto.SecretKey; import java.nio.charset.StandardCharsets; import java.util.Date; import java.util.Map; public class JwtUtil { /** * 生成jwt * 使用Hs256算法, 私匙使用固定秘钥 * * @param secretKey jwt秘钥 * @param ttlMillis jwt过期时间(毫秒) * @param claims 设置的信息 * @return */ public static String createJWT(String secretKey, long ttlMillis, Map claims) { // 指定签名的时候使用的签名算法,也就是header那部分 SignatureAlgorithm signatureAlgorithm = SignatureAlgorithm.HS256; // 生成JWT的时间 long expMillis = System.currentTimeMillis() + ttlMillis; Date exp = new Date(expMillis); // 设置jwt的body JwtBuilder builder = Jwts.builder() // 如果有私有声明,一定要先设置这个自己创建的私有的声明,这个是给builder的claim赋值,一旦写在标准的声明赋值之后,就是覆盖了那些标准的声明的 .setClaims(claims) // 设置签名使用的签名算法和签名使用的秘钥 .signWith(signatureAlgorithm, secretKey.getBytes(StandardCharsets.UTF_8)) // 设置过期时间 .setExpiration(exp); return builder.compact(); } /* public static String createJWT(String secretKey, long ttlMillis, Map claims) { // 使用 Keys.hmacShaKeyFor 生成 SecretKey SecretKey key = Keys.hmacShaKeyFor(secretKey.getBytes(StandardCharsets.UTF_8)); // 设置 JWT 的过期时间 long expMillis = System.currentTimeMillis() + ttlMillis; Date exp = new Date(expMillis); // 构建 JWT JwtBuilder builder = Jwts.builder() .setClaims(claims) // 注意:这里需要传入 SecretKey 和算法 .signWith(key, SignatureAlgorithm.HS256) .setExpiration(exp); return builder.compact(); }*/ /** * Token解密 * * @param secretKey jwt秘钥 此秘钥一定要保留好在服务端, 不能暴露出去, 否则sign就可以被伪造, 如果对接多个客户端建议改造成多个 * @param token 加密后的token * @return */ private static final Logger log = LoggerFactory.getLogger(JwtUtil.class); /** * 解析 JWT Token * @param secretKey 密钥 * @param token JWT Token * @return 解析后的 Claims */ public static Claims parseJWT(String secretKey, String token) { System.out.println("Token: " + token); if (token == null || token.isEmpty() || token.split("\\.").length != 3) { log.error("JWT格式错误: Token为空或格式不正确"); throw new IllegalArgumentException("无效的JWT令牌"); } try { // 使用与生成时相同的密钥 byte[] keyBytes = secretKey.getBytes(StandardCharsets.UTF_8); SecretKey key = Keys.hmacShaKeyFor(keyBytes); return Jwts.parserBuilder() .setSigningKey(key) .build() .parseClaimsJws(token) .getBody(); } catch (JwtException e) { log.error("JWT解析失败: {}", e.getMessage()); throw new IllegalArgumentException("无效的JWT令牌"); } } /** * 验证JWT的有效性 * * @param token 加密后的token * @param userSecretKey 用户的私钥 * @return 如果token有效返回true,否则返回false */ public static boolean verifyToken(String token, String userSecretKey) { try { // 使用用户的私钥解析JWT SecretKey secretKey = Keys.hmacShaKeyFor(userSecretKey.getBytes(StandardCharsets.UTF_8)); Jwts.parserBuilder() .setSigningKey(secretKey) .build() .parseClaimsJws(token); // 如果解析成功,说明token有效 return true; } catch (ExpiredJwtException e) { // token已过期 System.out.println("Token has expired: " + e.getMessage()); } catch (UnsupportedJwtException e) { // 不支持的JWT格式 System.out.println("Unsupported JWT: " + e.getMessage()); } catch (MalformedJwtException e) { // JWT格式错误 System.out.println("Invalid JWT string: " + e.getMessage()); } catch (SignatureException e) { // 签名验证失败 System.out.println("Invalid JWT signature: " + e.getMessage()); } catch (IllegalArgumentException e) { // JWT字符串为空或null System.out.println("JWT string is null or empty or only whitespace: " + e.getMessage()); } catch (Exception e) { // 其他异常 System.out.println("Other error: " + e.getMessage()); } // 如果捕获到异常,说明token无效 return false; } } ================================================ FILE: Sparta/sparta-uac-console/src/main/resources/Account.thrift ================================================ service AccountIface { string queryNodeByPath(1: string path); } ================================================ FILE: Sparta/sparta-uac-console/src/test/java/com/walnut/sparta/account/TestSpartaAccount.java ================================================ package com.walnut.sparta.account; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.system.functions.Executor; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.account.AccountManager; import com.pinecone.hydra.account.UniformAccountManager; import com.pinecone.hydra.account.ibatis.hydranium.UserMappingDriver; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.tritium.Tritium; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import org.springframework.context.ApplicationContextInitializer; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.support.GenericApplicationContext; class JesusChrist extends Tritium { public JesusChrist( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public JesusChrist( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { SpartaAccountService sparta = new SpartaAccountService( "SpartaAccountService", this ); Thread shutdowner = new Thread(()->{ Debug.sleep( 5000 ); sparta.terminate(); }); //shutdowner.start(); sparta.setPrimarySources( SpartaBoot.class ); KOIMappingDriver koiAccountMappingDriver = new UserMappingDriver( sparta, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); AccountManager accountManager = new UniformAccountManager( koiAccountMappingDriver ); sparta.setInitializer(new Executor() { @Override public void execute() throws Exception { sparta.getSpringApplication().addInitializers(new ApplicationContextInitializer() { @Override public void initialize( ConfigurableApplicationContext applicationContext ) { GenericApplicationContext genericApplicationContext = (GenericApplicationContext) applicationContext; genericApplicationContext.registerBean("primaryAccount", AccountManager.class, () -> (AccountManager) accountManager); } }); } }); sparta.execute(); this.getTaskManager().add( sparta ); this.getTaskManager().syncWaitingTerminated(); } } public class TestSpartaAccount { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ JesusChrist jesus = (JesusChrist) Pinecone.sys().getTaskManager().add( new JesusChrist( args, Pinecone.sys() ) ); jesus.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-ucdn-console/pom.xml ================================================ 4.0.0 sparta com.walnuts.sparta 2.5.1 org.springframework.boot spring-boot-maven-plugin package repackage true com.walnut.sparta.ucdn.console.UCDNBoot org.apache.maven.plugins maven-compiler-plugin 11 11 com.walnut.sparta.ucdn.console sparta-ucdn-console 2.1.0 junit junit 3.8.1 test com.pinecone.summer.springram springram 2.1.0 compile com.pinecone.hydra.kom.driver.default hydra-kom-default-driver 2.1.0 compile org.springframework.boot spring-boot-starter-websocket 3.4.3 com.pinecone.tritium hydra-system-tritium 2.1.0 compile com.walnut.sparta.api.uofs sparta-api-uofs 2.1.0 compile com.walnut.redstone.kernel redstone-architecture 2.1.0 com.walnut.redstone.stones redstone-message-stones 2.1.0 org.springframework.boot spring-boot-starter-test test org.mybatis.spring.boot mybatis-spring-boot-starter 2.2.2 com.auth0 java-jwt 4.4.0 com.pinecone.hydra.kernel hydra-service-control 2.1.0 compile ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/SpartaBoot.java ================================================ package com.walnut.sparta.ucdn.console; import org.springframework.boot.autoconfigure.SpringBootApplication; @SpringBootApplication public class SpartaBoot { } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/UCDNBoot.java ================================================ package com.walnut.sparta.ucdn.console; import com.pinecone.Pinecone; import com.walnut.sparta.ucdn.console.infrastructure.UCDNContentDelivery; public class UCDNBoot { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ UCDNContentDelivery ucdn = (UCDNContentDelivery) Pinecone.sys().getTaskManager().add( new UCDNContentDelivery( args, Pinecone.sys() ) ); ucdn.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/api/controller/v2/CDNFileController.java ================================================ package com.walnut.sparta.ucdn.console.api.controller.v2; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.FileTreeNode; import com.pinecone.hydra.storage.version.VersionManage; import com.pinecone.ulf.util.guid.GUIDs; import com.walnut.archcraft.redstone.response.GenericResultResponse; import com.walnut.sparta.ucdn.console.domain.service.FileSystemService; import com.walnut.sparta.ucdn.console.infrastructure.dto.RenameDTO; import com.walnut.sparta.ucdn.console.infrastructure.dto.UpdateFileNameDTO; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import javax.annotation.Resource; @RestController @RequestMapping( "/api/v2/ucdn/file" ) @CrossOrigin public class CDNFileController { @Resource private KOMFileSystem primaryFileSystem; @Resource private FileSystemService fileSystemService; @Resource private VersionManage primaryVersion; /** * 创建文件 * @param filePath 文件路径 * @return 返回操作状态 */ @GetMapping("/create") public GenericResultResponse createFile(@RequestParam String filePath ){ this.primaryFileSystem.affirmFileNode( filePath ); return GenericResultResponse.success(); } /** * 获取文件或文件夹属性 * @param nodeGuid 文件或文件夹guid * @return 返回属性信息 */ @GetMapping("/attribute") public GenericResultResponse attribute(@RequestParam("nodeGuid") String nodeGuid ){ FileTreeNode fileTreeNode = this.primaryFileSystem.get(GUIDs.GUID128(nodeGuid)); return GenericResultResponse.success( fileTreeNode ); } /** * 移除文件夹或者文件 * @param fileGuid 文件夹或者文件guid * @return 返回操作结果 */ @DeleteMapping("/remove") public GenericResultResponse removeFile(String fileGuid ){ this.fileSystemService.remove( GUIDs.GUID128( fileGuid ) ); this.primaryFileSystem.remove( GUIDs.GUID128( fileGuid ) ); return GenericResultResponse.success(); } /** * 重命名文件或文件夹 * @param dto 信息 * @return 返回操作信息 */ @PostMapping("/rename") public GenericResultResponse renameFile(@RequestBody RenameDTO dto){ this.primaryFileSystem.renameFile( dto.getPath(), dto.getNewName() ); return GenericResultResponse.success(); } /** * 重命名接口 * @param dto 重命名数据 * @return */ @PostMapping("/updateFileName") public GenericResultResponse updateFileName(@RequestBody UpdateFileNameDTO dto){ this.primaryFileSystem.renameFile( dto.getFilePath(), dto.getNewFileName() ); return GenericResultResponse.success(); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/api/controller/v2/CDNFolderController.java ================================================ package com.walnut.sparta.ucdn.console.api.controller.v2; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.bucket.BucketInstrument; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.FileTreeNode; import com.pinecone.hydra.storage.file.entity.Folder; import com.pinecone.hydra.storage.version.VersionManage; import com.pinecone.ulf.util.guid.GUIDs; import com.walnut.archcraft.redstone.response.GenericResultResponse; import com.walnut.sparta.ucdn.console.infrastructure.UCDNConstants; import com.walnut.sparta.ucdn.console.mapper.ClusterFileSyncMapper; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import javax.annotation.Resource; import java.util.List; @RestController @RequestMapping( "/api/v2/ucdn/folder" ) @CrossOrigin public class CDNFolderController { @Resource private KOMFileSystem primaryFileSystem; @Resource private VersionManage versionManage; @Resource private ClusterFileSyncMapper fileSyncMapper; @Resource private BucketInstrument bucketInstrument; /** * 获取文件夹下所有内容 * @param folderGuid 文件夹guid * @returnS */ @GetMapping("/folder/listItem") public String listItem(@RequestParam String folderGuid ){ Folder folder = this.primaryFileSystem.getFolder(GUIDs.GUID128(folderGuid)); List fileTreeNodes = folder.listItem(); for ( FileTreeNode fileTreeNode : fileTreeNodes ) { if ( this.versionManage.queryIsManage(fileTreeNode.getGuid()) ){ List versions = versionManage.fetchVersions(fileTreeNode.getGuid()); GUID firstVersion = versions.get(0); FileTreeNode firstVersionFileTreeNode = this.primaryFileSystem.get(firstVersion); String fileName = firstVersionFileTreeNode.getName(); String fileExtension = ""; if (fileName.contains(UCDNConstants.period)) { fileExtension = fileName.substring(fileName.lastIndexOf(UCDNConstants.period) + 1); } fileTreeNode.setName(fileTreeNode.getName()+UCDNConstants.period+fileExtension); Integer syncState = this.fileSyncMapper.queryState(fileTreeNode.getGuid()); if( syncState == null ){ fileTreeNode.evinceFolder().setSyncState( 0 ); } else { fileTreeNode.evinceFolder().setSyncState( 1 ); } } } return GenericResultResponse.success(fileTreeNodes).toJSONString() ; } /** * 创建文件夹 * @param destDirPath 文件夹路径 * @return 返回操作状态 */ @GetMapping("/create") public GenericResultResponse createFolder(@RequestParam("destDirPath") String destDirPath ){ this.primaryFileSystem.affirmFolder( destDirPath ); return GenericResultResponse.success(); } /** * 获取文件或文件夹属性 * @param nodeGuid 文件或文件夹guid * @return 返回属性信息 */ @GetMapping("/attribute") public GenericResultResponse< FileTreeNode > attribute(@RequestParam("nodeGuid") String nodeGuid ){ FileTreeNode fileTreeNode = this.primaryFileSystem.get(GUIDs.GUID128(nodeGuid)); return GenericResultResponse.success( fileTreeNode ); } /** * 获取所有根文件夹 * @return 返回根信息 */ @GetMapping("/list/root") public String listRoot(){ List roots = this.primaryFileSystem.fetchRoot(); return GenericResultResponse.success( roots ).toJSONString(); } /** * 移除文件夹或者文件 * @param fileGuid 文件夹或者文件guid * @return 返回操作结果 */ @DeleteMapping("/remove/file") public GenericResultResponse removeFile(String fileGuid ){ this.primaryFileSystem.remove( GUIDs.GUID128( fileGuid ) ); return GenericResultResponse.success(); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/api/controller/v2/ClientController.java ================================================ package com.walnut.sparta.ucdn.console.api.controller.v2; import com.walnut.archcraft.redstone.response.GenericResultResponse; import com.walnut.sparta.ucdn.console.domain.service.NodeFileDistributionService; import org.springframework.beans.factory.annotation.Value; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.multipart.MultipartFile; import javax.annotation.Resource; import java.io.File; import java.io.IOException; import java.util.UUID; @RestController @CrossOrigin @RequestMapping( "/api/v2/ucdn/client" ) public class ClientController { @Resource protected NodeFileDistributionService service; @Value("${service.LocalUploadTemporaryWorkingDirectory}") private String majorTemporaryClusterFileDirectory; @Value("${service.TemporaryFileExtends}") private String temporaryFileExtends; /** * * @param filePath 文件要上传的路径 * @param file 文件本体 * @return */ @PostMapping("/upload") public GenericResultResponse upload(@RequestParam("filePath") String filePath, @RequestParam("file") MultipartFile file, @RequestParam("topic") String topic ) throws IOException, InterruptedException { File tempFile = new File(majorTemporaryClusterFileDirectory+ UUID.randomUUID()+temporaryFileExtends); if( !tempFile.createNewFile() ){ throw new IOException( "Creating file compromised, what :" + tempFile.toPath() ); } file.transferTo(tempFile); this.service.upload( filePath,tempFile,topic ); if( !tempFile.delete() ){ throw new IOException( "Purging temporary file compromised, what :" + tempFile.toPath() ); } return GenericResultResponse.success(); } @GetMapping("/testDistribution") public void testDistribution( @RequestParam("path") String path, @RequestParam("topic") String topic ) throws IOException, InterruptedException { this.service.testDistribution( path,topic ); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/api/controller/v2/SiteController.java ================================================ package com.walnut.sparta.ucdn.console.api.controller.v2; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.storage.bucket.BucketInstrument; import com.pinecone.hydra.storage.bucket.entity.GenericSite; import com.pinecone.hydra.storage.bucket.entity.Site; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.Folder; import com.walnut.archcraft.redstone.response.GenericResultResponse; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PutMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import javax.annotation.Resource; import java.util.List; @RestController @RequestMapping( "/api/v2/ucdn/site" ) @CrossOrigin public class SiteController implements Pinenut { @Resource private BucketInstrument bucketInstrument; @Resource private KOMFileSystem primaryFileSystem; /** * 创建站点 * @param siteName 站点名 * @return 返回操作结果 */ @PutMapping("/create") public GenericResultResponse createSite(@RequestParam("siteName") String siteName){ System.out.println(siteName); Folder folder = this.primaryFileSystem.affirmFolder( siteName ); GenericSite site = new GenericSite(); site.setSiteName( siteName ); site.setMountPointGuid(folder.getGuid()); this.bucketInstrument.createSite( site ); return GenericResultResponse.success(); } /** * 删除站点 * @param siteName 站点名 * @return 操作结果 */ @DeleteMapping("/delete") public GenericResultResponse removeSite(@RequestParam("siteName") String siteName ){ this.bucketInstrument.removeSite(siteName); return GenericResultResponse.success(); } /** * 获取全部站点 * @return 返回全部站点 */ @GetMapping("/list") public String listSite(){ List sites = this.bucketInstrument.listSite(); return GenericResultResponse.success(sites).toJSONString(); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/api/controller/v2/SiteNodeController.java ================================================ package com.walnut.sparta.ucdn.console.api.controller.v2; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.kom.entity.ServiceElement; import com.pinecone.hydra.service.registry.server.ServiceLifecycleIface; import com.pinecone.hydra.service.registry.dto.RegisterServiceDTO; import com.pinecone.hydra.storage.bucket.BucketInstrument; import com.pinecone.hydra.storage.bucket.entity.GenericSiteNode; import com.pinecone.hydra.storage.bucket.entity.SiteNode; import com.pinecone.ulf.util.guid.GUIDs; import com.walnut.archcraft.redstone.response.GenericResultResponse; import com.walnut.sparta.ucdn.console.infrastructure.dto.SiteNodeDTO; import com.walnut.sparta.ucdn.console.infrastructure.vo.SiteNodeVO; import com.walnut.sparta.ucdn.console.infrastructure.service.UCDNServiceManager; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import javax.annotation.Resource; import java.util.ArrayList; import java.util.List; @RestController @RequestMapping( "/api/v2/ucdn/siteNode" ) @CrossOrigin public class SiteNodeController { @Resource private BucketInstrument bucketInstrument; @Resource private UCDNServiceManager UCDNServiceManager; @Resource private ServiceInstrument primaryService; @GetMapping("/query/siteGuid") public String querySiteNodeBySiteGuid(@RequestParam("siteGuid") String siteGuid){ ServiceLifecycleIface lifecycleIface = this.UCDNServiceManager.getLifecycleIface(); ArrayList siteNodeVOS = new ArrayList<>(); List siteNodes = this.bucketInstrument.querySiteNodeBySiteGuid(GUIDs.GUID128( siteGuid )); for( SiteNode siteNode : siteNodes ){ if( lifecycleIface.hasOwnedServiceByServiceId( siteNode.getRelatedService().toString() ) ){ siteNode.setState( 1 ); }else { siteNode.setState( 0 ); } SiteNodeVO siteNodeVO = new SiteNodeVO(siteNode); siteNodeVO.setRelatedServicePath( this.primaryService.getPath( siteNode.getRelatedService() ) ); siteNodeVOS.add( siteNodeVO ); } return GenericResultResponse.success(siteNodeVOS).toJSONString(); } @DeleteMapping("/remove/siteNodeGuid") public GenericResultResponse removeSiteNode(@RequestParam("siteNodeGuid") String siteNodeGuid ){ ServiceLifecycleIface lifecycleIface = this.UCDNServiceManager.getLifecycleIface(); SiteNode siteNode = this.bucketInstrument.querySiteNode(GUIDs.GUID128(siteNodeGuid)); lifecycleIface.deregisterServiceByInstanceId( siteNode.getRelatedService().toString() ); this.bucketInstrument.removeSiteNode( GUIDs.GUID128( siteNodeGuid ) ); return GenericResultResponse.success(); } @PostMapping("/create") public GenericResultResponse createSiteNode(@RequestBody SiteNodeDTO dto){ GenericSiteNode siteNode = new GenericSiteNode(); siteNode.setSiteGuid( GUIDs.GUID128( dto.getSiteGuid() ) ); siteNode.setNodeName( dto.getNodeName() ); siteNode.setRelatedService( GUIDs.GUID128( dto.getRelatedService() ) ); GUID guid = this.bucketInstrument.createSiteNode(siteNode); return GenericResultResponse.success(guid.toString()); } @PostMapping("/update") public GenericResultResponse updateSiteNode(@RequestBody SiteNodeDTO dto ){ GenericSiteNode siteNode = new GenericSiteNode(); siteNode.setNodeName( dto.getNodeName() ); siteNode.setNodeGuid( GUIDs.GUID128( dto.getNodeGuid() ) ); siteNode.setState( dto.getState() ); siteNode.setIsEnabled( dto.getIsEnabled() ); siteNode.setSiteGuid( GUIDs.GUID128( dto.getSiteGuid() ) ); this.bucketInstrument.updateSiteNode( siteNode ); return GenericResultResponse.success(); } @GetMapping("/fetch/allService") public String fetchAllService(){ List serviceElements = this.primaryService.fetchAllService(); return GenericResultResponse.success(serviceElements).toJSONString(); } @PostMapping("/test/registerService") public GenericResultResponse testRegisterService(@RequestBody RegisterServiceDTO dto ){ this.UCDNServiceManager.getLifecycleIface().registerService( dto ); Debug.trace( "是否存在" + this.UCDNServiceManager.getLifecycleIface().hasOwnedServiceByServiceId( dto.getServiceId() ) ); return GenericResultResponse.success(); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/api/controller/v2/TransmitController.java ================================================ package com.walnut.sparta.ucdn.console.api.controller.v2; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.bucket.BucketInstrument; import com.pinecone.hydra.storage.bucket.entity.Site; import com.pinecone.hydra.storage.bucket.source.SiteManipulator; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.external.ExternalFile; import com.pinecone.hydra.storage.file.external.GenericNativeExternalFile; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.FSNodeAllotment; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.file.entity.FileTreeNode; import com.pinecone.hydra.storage.file.entity.Folder; import com.pinecone.hydra.storage.file.entity.GenericFileNode; import com.pinecone.hydra.storage.file.transmit.exporter.TitanFileExportEntity64; import com.pinecone.hydra.storage.file.transmit.receiver.TitanFileReceiveEntity64; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.io.TitanFileChannelChanface; import com.pinecone.hydra.storage.io.TitanOutputStreamChanface; import com.pinecone.hydra.storage.version.VersionManage; import com.pinecone.hydra.storage.version.entity.TitanVersion; import com.pinecone.hydra.storage.version.entity.TitanVersionMapping; import com.pinecone.hydra.storage.volume.UniformVolumeManager; import com.pinecone.ulf.util.guid.GUIDs; import com.walnut.archcraft.redstone.response.GenericResultResponse; import com.walnut.sparta.ucdn.console.domain.service.NodeFileDistributionService; import com.walnut.sparta.ucdn.console.infrastructure.UCDNConsoleContents; import com.walnut.sparta.ucdn.console.infrastructure.dto.ClusterFileSyncDTO; import org.springframework.beans.factory.annotation.Value; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.multipart.MultipartFile; import javax.annotation.Resource; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.nio.channels.FileChannel; import java.nio.file.StandardOpenOption; import java.util.Map; import java.util.UUID; @RestController @RequestMapping( "/api/v2/ucdn/transmit" ) @CrossOrigin public class TransmitController { @Resource private KOMFileSystem primaryFileSystem; @Resource private UniformVolumeManager primaryVolume; @Resource private BucketInstrument bucketInstrument; @Resource private VersionManage primaryVersion; @Resource private VersionManage versionManage; @Resource private NodeFileDistributionService fileDistributionService; @Value("${service.LocalUploadTemporaryWorkingDirectory}") private String majorTemporaryClusterFileDirectory; @Value("${service.TemporaryFileExtends}") private String temporaryFileExtends; @GetMapping("/download/guid") public void getFile(HttpServletRequest request, HttpServletResponse response) throws IOException { Map parameterMap = request.getParameterMap(); String[] guids = parameterMap.get("guid"); GUID storageObjectGuid = null; if( guids != null ){ storageObjectGuid = GUIDs.GUID128( guids[0] ); } ServletOutputStream outputStream = response.getOutputStream(); TitanOutputStreamChanface kChannel = new TitanOutputStreamChanface(outputStream); FileNode storageObject = (FileNode) this.primaryFileSystem.get(storageObjectGuid); TitanFileExportEntity64 entity = new TitanFileExportEntity64(this.primaryFileSystem, this.primaryVolume, storageObject, kChannel); this.primaryFileSystem.export( entity ); } /** * 上传文件 * @param filePath 目标路径 * @param version 版本号 * @param file 文件 * @param siteName 站点 * @return 返回操作结果 */ @PostMapping("/upload") public GenericResultResponse CDNUpload(@RequestParam("siteName") String siteName, @RequestParam("filePath") String filePath, @RequestParam("version") String version, @RequestParam("file") MultipartFile file) throws IOException { SiteManipulator siteManipulator = this.bucketInstrument.getSiteManipulator(); Site site = siteManipulator.querySiteByName(siteName); if( site == null ){ return GenericResultResponse.error("站点不存在"); } int dotIndex = filePath.lastIndexOf(UCDNConsoleContents.PERIOD); String baseName = filePath.substring(0, dotIndex); String extension = filePath.substring(dotIndex + 1); String realFilePath = this.primaryFileSystem.getPath(site.getMountPointGuid()) + UCDNConsoleContents.FORWARD_SLASH + baseName; FSNodeAllotment fsNodeAllotment = this.primaryFileSystem.getFSNodeAllotment(); Folder node = this.primaryFileSystem.affirmFolder(realFilePath); String storageObjectPath = realFilePath + UCDNConsoleContents.VERSION_PREFIX+ UCDNConsoleContents.FORWARD_SLASH + version +UCDNConsoleContents.PERIOD+ extension; File tempFile = new File(majorTemporaryClusterFileDirectory+ UUID.randomUUID()+temporaryFileExtends); if( !tempFile.createNewFile() ){ throw new IOException( "Creating file compromised, what :" + tempFile.toPath() ); } file.transferTo(tempFile); FileChannel channel = FileChannel.open(tempFile.toPath(), StandardOpenOption.READ); TitanFileChannelChanface titanFileChannelKChannel = new TitanFileChannelChanface( channel ); FileNode fileNode = fsNodeAllotment.newFileNode(); fileNode.setDefinitionSize( tempFile.length() ); fileNode.setName( tempFile.getName() ); TitanFileReceiveEntity64 receiveEntity = new TitanFileReceiveEntity64( this.primaryFileSystem,storageObjectPath, fileNode,titanFileChannelKChannel,this.primaryVolume ); this.primaryFileSystem.receive( receiveEntity ); FileTreeNode storageObject = this.primaryFileSystem.get(this.primaryFileSystem.queryGUIDByPath(storageObjectPath)); TitanVersion titanVersion = new TitanVersion(); titanVersion.setVersion( version ); titanVersion.setFileGuid( node.getGuid() ); titanVersion.setTargetStorageObjectGuid( storageObject.getGuid() ); titanVersion.setVersionGuid( this.primaryFileSystem.getGuidAllocator().nextGUID() ); TitanVersionMapping versionMapping = new TitanVersionMapping(); versionMapping.setFileGuid(titanVersion.getFileGuid()); versionMapping.setEnableVersionGuid(titanVersion.getTargetStorageObjectGuid()); versionMapping.setVersionGuid((titanVersion.getVersionGuid())); this.versionManage.insertVesionMapping(versionMapping); this.primaryVersion.insert( titanVersion ); if( !tempFile.delete() ){ throw new IOException( "Purging temporary file compromised, what :" + tempFile.toPath() ); } return GenericResultResponse.success(); } /** * 使用文件路径下载文件 */ @GetMapping("/download/path") public void getFileByPath(HttpServletRequest request, HttpServletResponse response) throws IOException { Map parameterMap = request.getParameterMap(); String[] paths = parameterMap.get("path"); String path = null; if(paths != null){ path = paths[0]; } ServletOutputStream outputStream = response.getOutputStream(); TitanOutputStreamChanface kChannel = new TitanOutputStreamChanface(outputStream); ElementNode elementNode = this.primaryFileSystem.queryElement(path); if(elementNode instanceof GenericNativeExternalFile){ ExternalFile externalFile = (ExternalFile) elementNode; File nativeFile = externalFile.getNativeFile(); try (FileInputStream fileInputStream = new FileInputStream(nativeFile)) { byte[] buffer = new byte[1024]; int bytesRead; while ((bytesRead = fileInputStream.read(buffer)) != -1) { outputStream.write(buffer, 0, bytesRead); } // 刷新输出流 outputStream.flush(); return; } catch (IOException e) { // 处理异常,比如记录日志等 e.printStackTrace(); } } if( elementNode instanceof GenericFileNode){ FileNode fileNode = (FileNode) elementNode; TitanFileExportEntity64 entity = new TitanFileExportEntity64(this.primaryFileSystem, this.primaryVolume, fileNode, kChannel); this.primaryFileSystem.export( entity ); } } @PostMapping("/clusterFileSync") public void clusterFileSync(@RequestBody ClusterFileSyncDTO dto) throws IOException, InterruptedException { this.fileDistributionService.clusterFileSync( dto ); } private Chanface getKChannel( File file ) throws IOException { FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.READ); return new TitanFileChannelChanface( channel ); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/api/controller/v2/VersionController.java ================================================ package com.walnut.sparta.ucdn.console.api.controller.v2; import com.pinecone.hydra.storage.version.VersionManage; import com.pinecone.hydra.storage.version.entity.TitanVersion; import com.pinecone.hydra.storage.version.entity.TitanVersionMapping; import com.pinecone.hydra.storage.version.entity.VersionMapping; import com.pinecone.ulf.util.guid.GUIDs; import com.walnut.archcraft.redstone.response.GenericResultResponse; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PutMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import javax.annotation.Resource; @RestController @RequestMapping( "/api/v2/ucdn/version" ) @CrossOrigin public class VersionController { @Resource private VersionManage versionManage; @PutMapping("/create/VersionMapping") public GenericResultResponse createVersionMapping(@RequestParam("fileGuid") String fileGuid, @RequestParam("enableVersionGuid") String enableVersionGuid ) { TitanVersion titanVersion = this.versionManage.queryByTargetStorageObjectGuid(GUIDs.GUID128(enableVersionGuid)); TitanVersionMapping versionMapping = new TitanVersionMapping(); versionMapping.setFileGuid(GUIDs.GUID128(fileGuid)); versionMapping.setEnableVersionGuid(GUIDs.GUID128(enableVersionGuid)); versionMapping.setVersionGuid((titanVersion.getVersionGuid())); if (!this.versionManage.isExistEnableVersionMapping(versionMapping.getEnableVersionGuid())){ this.versionManage.insertVesionMapping(versionMapping); } else this.versionManage.UpdateVesionMapping(versionMapping); return GenericResultResponse.success(); } @GetMapping("/query/VersionMapping") public String queryVersionMapping(@RequestParam("fileGuid") String fileGuid) { VersionMapping versionMapping = this.versionManage.queryVersionMapping(GUIDs.GUID128(fileGuid)); TitanVersion version =new TitanVersion(); if (versionMapping != null){ version=this.versionManage.queryByTargetStorageObjectGuid(versionMapping.getEnableVersionGuid()); } return GenericResultResponse.success(version).toJSONString(); } @PutMapping("/update/VersionMapping") public GenericResultResponse updateVersionMapping( @RequestParam("fileGuid") String fileGuid, @RequestParam("enableVersionGuid") String enableVersionGuid) { VersionMapping versionMapping = this.versionManage.queryVersionMapping(GUIDs.GUID128(fileGuid)); versionMapping.setEnableVersionGuid(GUIDs.GUID128(enableVersionGuid)); this.versionManage.UpdateVesionMapping(versionMapping); return GenericResultResponse.success(); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/api/controller/xx.java ================================================ package com.walnut.sparta.ucdn.console.api.controller; public class xx { } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/config/AppCDNMyBatisConfig.java ================================================ package com.walnut.sparta.ucdn.console.config; import org.apache.ibatis.session.SqlSessionFactory; import org.mybatis.spring.SqlSessionFactoryBean; import org.mybatis.spring.annotation.MapperScan; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.io.support.PathMatchingResourcePatternResolver; import org.springframework.core.io.support.ResourcePatternResolver; import javax.sql.DataSource; @Configuration @MapperScan("com.walnut.sparta.ucdn.console.mapper") public class AppCDNMyBatisConfig { @Bean public SqlSessionFactory sqlSessionFactory( DataSource dataSource ) throws Exception { SqlSessionFactoryBean sqlSessionFactoryBean = new SqlSessionFactoryBean(); sqlSessionFactoryBean.setDataSource(dataSource); ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver(); sqlSessionFactoryBean.setTypeHandlersPackage( "com.pinecone.hydra.entity.ibatis" ); sqlSessionFactoryBean.setMapperLocations(resolver.getResources("classpath*:mapper/*.xml")); return sqlSessionFactoryBean.getObject(); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/config/BeanConfig.java ================================================ package com.walnut.sparta.ucdn.console.config; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.registry.dto.RegisterServiceDTO; import com.pinecone.hydra.umb.kafka.WolfMCKafkaClient; import com.pinecone.hydra.umb.rocket.WolfMCRocketClient; import com.pinecone.hydra.umb.wolf.UlfBroadcastControlNode; import com.pinecone.hydra.umb.wolf.WolfMCBClient; import com.pinecone.hydra.umct.WolfMCExpress; import com.walnut.sparta.ucdn.console.infrastructure.UCDNContentDelivery; import com.walnut.sparta.ucdn.console.infrastructure.UCDNConstants; import com.walnut.sparta.ucdn.console.ufm.FileMultiDistributionIface; import com.walnut.sparta.ucdn.console.ufm.SessionValidator; import com.walnut.sparta.ucdn.console.infrastructure.service.UCDNCentralServiceManager; import com.walnut.sparta.ucdn.console.infrastructure.service.UCDNServiceManager; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import javax.annotation.Resource; @Configuration public class BeanConfig { @Resource private UCDNContentDelivery UCDNContentDelivery; @Resource private ServiceInstrument primaryService; @Bean( name = "kafkaFileServiceClient") public UlfBroadcastControlNode kafkaFileServiceClient(){ UlfBroadcastControlNode client = new WolfMCBClient(new WolfMCKafkaClient(UCDNConstants.KafkaServer), "", this.UCDNContentDelivery, WolfMCExpress.class); client.compile( FileMultiDistributionIface.class,false ); return client; } @Bean( name = "rocketFileServiceClient") public UlfBroadcastControlNode rocketFileServiceClient(){ UlfBroadcastControlNode client = new WolfMCBClient(new WolfMCRocketClient(UCDNConstants.RocketServer,UCDNConstants.UCDNFileServiceTransmitGroup), "", this.UCDNContentDelivery, WolfMCExpress.class); client.compile( SessionValidator.class,false ); return client; } @Bean public UCDNServiceManager ucdnServiceManager() throws Exception { UCDNCentralServiceManager ucdnServiceManager = new UCDNCentralServiceManager(this.UCDNContentDelivery); ucdnServiceManager.getLifecycleIface().registerService( new RegisterServiceDTO( UCDNConstants.clientId, UCDNConstants.serviceId, UCDNConstants.deployId )); return ucdnServiceManager; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/config/WebConfig.java ================================================ package com.walnut.sparta.ucdn.console.config; import com.walnut.sparta.ucdn.console.interceptor.JWTInterceptor; import org.springframework.context.annotation.Configuration; import org.springframework.web.servlet.config.annotation.InterceptorRegistry; import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; import javax.annotation.Resource; @Configuration public class WebConfig implements WebMvcConfigurer { @Resource private JWTInterceptor jwtInterceptor; @Override public void addInterceptors(InterceptorRegistry registry) { registry.addInterceptor(jwtInterceptor).addPathPatterns("/**"); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/config/WebSocketConfig.java ================================================ package com.walnut.sparta.ucdn.console.config; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.messaging.simp.config.MessageBrokerRegistry; import org.springframework.web.socket.config.annotation.EnableWebSocketMessageBroker; import org.springframework.web.socket.config.annotation.StompEndpointRegistry; import org.springframework.web.socket.config.annotation.WebSocketMessageBrokerConfigurer; import org.springframework.web.socket.server.standard.ServerEndpointExporter; @Configuration public class WebSocketConfig { @Bean public ServerEndpointExporter serverEndpointExporter(){ return new ServerEndpointExporter(); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/FileSystemService.java ================================================ package com.walnut.sparta.ucdn.console.domain.service; import com.pinecone.framework.util.id.GUID; public interface FileSystemService { void remove( GUID fileGuid ); } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/NodeFileDistributionService.java ================================================ package com.walnut.sparta.ucdn.console.domain.service; import com.walnut.sparta.ucdn.console.infrastructure.dto.ClusterFileSyncDTO; import java.io.File; import java.io.IOException; public interface NodeFileDistributionService { void upload( String path, File file, String topic ) throws IOException, InterruptedException; void testDistribution( String path, String topic ) throws IOException, InterruptedException; void clusterFileSync( ClusterFileSyncDTO dto ) throws IOException, InterruptedException; } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/cluster/ClusterFileSyncTransaction.java ================================================ package com.walnut.sparta.ucdn.console.domain.service.cluster; import com.pinecone.framework.system.prototype.Pinenut; public interface ClusterFileSyncTransaction extends Pinenut { int getClusterNodeCount(); void setClusterNodeCount( int clusterNodeCount ); int checkRemainingCount(); int decreaseRemainingCount(); } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/cluster/ClusterFileSyncTransactionManager.java ================================================ package com.walnut.sparta.ucdn.console.domain.service.cluster; import com.pinecone.framework.util.id.GUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; public class ClusterFileSyncTransactionManager implements ClusterFileTransactionManager { private ConcurrentMap< GUID, ConcurrentMap> transactionMap; public ClusterFileSyncTransactionManager(){ this.transactionMap = new ConcurrentHashMap<>(); } @Override public void register(GUID fileGuid, ConcurrentMap transactions) { this.transactionMap.put( fileGuid, transactions ); } @Override public ConcurrentMap getTransactions(GUID fileGuid ) { return this.transactionMap.get( fileGuid ); } @Override public void removeTransactions( GUID fileGuid ) { this.transactionMap.remove( fileGuid ); } @Override public boolean checkTransactionFinished( GUID fileGuid ) { ConcurrentMap transactions = this.getTransactions(fileGuid); for( ClusterFileSyncTransaction clusterFileSyncTransaction : transactions.values() ){ if( clusterFileSyncTransaction.checkRemainingCount() != 0 ){ return false; } } return true; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/cluster/ClusterFileSynchronizationService.java ================================================ package com.walnut.sparta.ucdn.console.domain.service.cluster; import com.pinecone.framework.system.prototype.Pinenut; public interface ClusterFileSynchronizationService extends Pinenut { } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/cluster/ClusterFileSynchronizationServiceImpl.java ================================================ package com.walnut.sparta.ucdn.console.domain.service.cluster; import javax.annotation.Resource; public class ClusterFileSynchronizationServiceImpl implements ClusterFileSynchronizationService { @Resource private UFMTransactionSynchronizedNotifier webSocketService; private ClusterFileTransactionManager transactionManager; } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/cluster/ClusterFileTransactionManager.java ================================================ package com.walnut.sparta.ucdn.console.domain.service.cluster; import com.pinecone.framework.system.regime.arch.Manager; import com.pinecone.framework.util.id.GUID; import java.util.concurrent.ConcurrentMap; public interface ClusterFileTransactionManager extends Manager { void register( GUID fileGuid, ConcurrentMap transactions ); ConcurrentMap getTransactions(GUID fileGuid ); void removeTransactions( GUID fileGuid ); boolean checkTransactionFinished( GUID fileGuid ); } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/cluster/FileSynchronizedEventSubscriber.java ================================================ package com.walnut.sparta.ucdn.console.domain.service.cluster; import java.io.IOException; import java.util.concurrent.ConcurrentMap; import javax.websocket.Session; import com.pinecone.framework.system.ProvokeHandleException; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.bucket.BucketInstrument; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.version.VersionManage; import com.walnut.sparta.ucdn.console.infrastructure.vo.SyncFinishedVO; import com.walnut.sparta.ucdn.console.mapper.ClusterFileSyncMapper; import com.walnut.sparta.ucdn.console.ufm.event.UFMEventSubscriber; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class FileSynchronizedEventSubscriber implements UFMEventSubscriber { private Logger logger; private VersionManage versionManage; private ClusterFileTransactionManager transactionManager; private UFMTransactionSynchronizedNotifier transactionSynchronizedNotifier; private ClusterFileSyncMapper clusterFileSyncMapper; public FileSynchronizedEventSubscriber( VersionManage versionManage, ClusterFileTransactionManager transactionManager, UFMTransactionSynchronizedNotifier transactionSynchronizedNotifier, ClusterFileSyncMapper clusterFileSyncMapper ) { this.logger = LoggerFactory.getLogger( this.getClass() ); this.versionManage = versionManage; this.transactionManager = transactionManager; this.transactionSynchronizedNotifier = transactionSynchronizedNotifier; this.clusterFileSyncMapper = clusterFileSyncMapper; } @Override public void afterEventTriggered( String path, String serviceId, FileNode fileNode ) { try { GUID versionFileGuid = this.versionManage.getVersionFileByGuid(fileNode.getGuid()); ConcurrentMap map = this.transactionManager.getTransactions(versionFileGuid); ClusterFileSyncTransaction clusterFileSyncTransaction = map.get(fileNode.getGuid()); clusterFileSyncTransaction.decreaseRemainingCount(); Session session = this.transactionSynchronizedNotifier.getSession(); SyncFinishedVO finishedVO = new SyncFinishedVO(path, serviceId, 1); session.getBasicRemote().sendText(finishedVO.toJSONString()); if( this.transactionManager.checkTransactionFinished( versionFileGuid ) ){ this.logger.info( "File {} synchronized done.", versionFileGuid ); this.clusterFileSyncMapper.insert( versionFileGuid, 1,null ); session.close(); this.transactionManager.removeTransactions( versionFileGuid ); } } catch ( IOException e ) { throw new ProvokeHandleException( e ); } } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/cluster/MultiClusterFileSyncTransaction.java ================================================ package com.walnut.sparta.ucdn.console.domain.service.cluster; import java.util.concurrent.atomic.AtomicInteger; public class MultiClusterFileSyncTransaction implements ClusterFileSyncTransaction { private int clusterNodeCount; private AtomicInteger remainingCount; public MultiClusterFileSyncTransaction( int clusterNodeCount ) { this.clusterNodeCount = clusterNodeCount; this.remainingCount = new AtomicInteger( clusterNodeCount ); } public MultiClusterFileSyncTransaction() { } @Override public int getClusterNodeCount() { return clusterNodeCount; } @Override public void setClusterNodeCount( int clusterNodeCount ) { this.clusterNodeCount = clusterNodeCount; } @Override public int checkRemainingCount() { return this.remainingCount.getAcquire(); } @Override public int decreaseRemainingCount() { return this.remainingCount.decrementAndGet(); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/cluster/UFMTransactionSynchronizedNotifier.java ================================================ package com.walnut.sparta.ucdn.console.domain.service.cluster; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import javax.websocket.OnClose; import javax.websocket.OnError; import javax.websocket.OnMessage; import javax.websocket.OnOpen; import javax.websocket.Session; import javax.websocket.server.ServerEndpoint; @ServerEndpoint( value = "/websocket/ucdn/monitor/nodes/transactionSynchronized" ) @Component public class UFMTransactionSynchronizedNotifier { private Logger log = LoggerFactory.getLogger( this.getClass() ); private static Session session; @OnOpen public void onOpen( Session session ){ UFMTransactionSynchronizedNotifier.session = session; } @OnMessage public void onMessage( String msg, Session session ){ this.log.info(msg); } @OnClose public void onClose(){ this.log.info( "TransactionSynchronized notifier has been successfully shutdown." ); } @OnError public void onError( Session session, Throwable error ){ this.log.error( "TransactionSynchronized error: ", error ); } public Session getSession(){ return session; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/impl/FileSystemServiceImpl.java ================================================ package com.walnut.sparta.ucdn.console.domain.service.impl; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.ClusterPage; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.file.entity.FileTreeNode; import com.pinecone.hydra.storage.file.entity.Folder; import com.pinecone.hydra.storage.file.entity.LocalCluster; import com.pinecone.hydra.storage.volume.UniformVolumeManager; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.walnut.sparta.ucdn.console.domain.service.FileSystemService; import org.springframework.stereotype.Service; import javax.annotation.Resource; import java.sql.SQLException; import java.util.List; @Service public class FileSystemServiceImpl implements FileSystemService { @Resource private KOMFileSystem primaryFileSystem; @Resource private UniformVolumeManager primaryVolume; @Override public void remove( GUID fileGuid ){ FileTreeNode fileTreeNode = this.primaryFileSystem.get(fileGuid); if( fileTreeNode instanceof Folder){ Folder folder = (Folder) fileTreeNode; List children = this.primaryFileSystem.getChildren(folder.getGuid()); for( TreeNode treeNode : children ){ this.remove( treeNode.getGuid() ); } } else if( fileTreeNode instanceof FileNode){ FileNode fileNode = (FileNode) fileTreeNode; ClusterPage clusterPage = this.primaryFileSystem.fetchClustersByFileGuid( fileNode.getGuid() ); long fileClusterNum = clusterPage.getClusters(); for( long i = 0; i < fileClusterNum; i++ ){ LocalCluster frame = clusterPage.getLocalCluster( i ); try { this.primaryVolume.removeStorageObject( frame ); } catch (SQLException e) { throw new RuntimeException(e); } } } } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/impl/NodeFileDistributionServiceImpl.java ================================================ package com.walnut.sparta.ucdn.console.domain.service.impl; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.service.registry.server.ServiceLifecycleIface; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.FSNodeAllotment; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.file.entity.Folder; import com.pinecone.hydra.storage.file.transmit.receiver.TitanFileReceiveEntity64; import com.pinecone.hydra.storage.io.TitanFileChannelChanface; import com.pinecone.hydra.storage.version.VersionManage; import com.pinecone.hydra.storage.volume.UniformVolumeManager; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.ulf.util.guid.GUIDs; import com.walnut.sparta.ucdn.console.domain.service.cluster.ClusterFileSyncTransaction; import com.walnut.sparta.ucdn.console.domain.service.cluster.ClusterFileSyncTransactionManager; import com.walnut.sparta.ucdn.console.domain.service.cluster.ClusterFileTransactionManager; import com.walnut.sparta.ucdn.console.domain.service.cluster.FileSynchronizedEventSubscriber; import com.walnut.sparta.ucdn.console.domain.service.cluster.UFMTransactionSynchronizedNotifier; import com.walnut.sparta.ucdn.console.domain.service.cluster.MultiClusterFileSyncTransaction; import com.walnut.sparta.ucdn.console.infrastructure.dto.ClusterFileSyncDTO; import com.walnut.sparta.ucdn.console.mapper.ClusterFileSyncMapper; import com.walnut.sparta.ucdn.console.ufm.FileMultiDistributionService; import com.walnut.sparta.ucdn.console.domain.service.NodeFileDistributionService; import com.walnut.sparta.ucdn.console.ufm.UOFSFileMultiDistributionService; import com.walnut.sparta.ucdn.console.infrastructure.UCDNContentDelivery; import com.walnut.sparta.ucdn.console.infrastructure.service.UCDNServiceManager; import org.springframework.stereotype.Service; import javax.annotation.PostConstruct; import javax.annotation.Resource; import java.io.File; import java.io.IOException; import java.nio.channels.FileChannel; import java.nio.file.StandardOpenOption; import java.util.List; import java.util.concurrent.ConcurrentHashMap; @Service public class NodeFileDistributionServiceImpl implements NodeFileDistributionService { @Resource private KOMFileSystem primaryFileSystem; @Resource private UniformVolumeManager primaryVolume; private FileMultiDistributionService fileMultiDistributionService; @Resource private UCDNContentDelivery UCDNContentDelivery; @Resource private UCDNServiceManager ucdnServiceManager; @Resource private VersionManage primaryVersion; private ClusterFileTransactionManager clusterFileTransactionManager; @Resource private UFMTransactionSynchronizedNotifier ufmTransactionSynchronizedNotifier; @Resource private ClusterFileSyncMapper clusterFileSyncMapper; @PostConstruct private void init() throws UMBServiceException { this.clusterFileTransactionManager = new ClusterFileSyncTransactionManager(); this.fileMultiDistributionService = new UOFSFileMultiDistributionService( this.UCDNContentDelivery.getSpartaUCDNService() ); this.fileMultiDistributionService.registerFileTransmitCompleteEventSubscriber( new FileSynchronizedEventSubscriber( this.primaryVersion, this.clusterFileTransactionManager,this.ufmTransactionSynchronizedNotifier, this.clusterFileSyncMapper ) ); this.fileMultiDistributionService.start(); } @Override public void upload( String path, File file, String topic ) throws IOException, InterruptedException { FSNodeAllotment fsNodeAllotment = this.primaryFileSystem.getFSNodeAllotment(); FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.READ); TitanFileChannelChanface titanFileChannelKChannel = new TitanFileChannelChanface( channel ); FileNode fileNode = fsNodeAllotment.newFileNode(); fileNode.setDefinitionSize( file.length() ); fileNode.setName( file.getName() ); TitanFileReceiveEntity64 receiveEntity = new TitanFileReceiveEntity64( this.primaryFileSystem,path, fileNode,titanFileChannelKChannel,this.primaryVolume ); this.primaryFileSystem.receive( receiveEntity ); if( !topic.isBlank() ){ this.fileMultiDistributionService.fileDistribution( fileNode, topic ); } } @Override public void testDistribution(String path, String topic) throws IOException, InterruptedException { FileNode fileNode = (FileNode)this.primaryFileSystem.queryElement(path); this.fileMultiDistributionService.fileDistribution( fileNode, topic ); } @Override public void clusterFileSync( ClusterFileSyncDTO dto ) throws IOException, InterruptedException { Folder folder = this.primaryFileSystem.getFolder( GUIDs.GUID128(dto.getFileGuid()) ); List guids = this.primaryVersion.fetchVersions(folder.getGuid()); ServiceLifecycleIface lifecycleIface = this.ucdnServiceManager.getLifecycleIface(); int serviceNum = lifecycleIface.countRegisteredService(); ConcurrentHashMap map = new ConcurrentHashMap<>(); for ( GUID guid : guids ){ MultiClusterFileSyncTransaction transaction = new MultiClusterFileSyncTransaction(serviceNum); map.put( guid, transaction ); } this.clusterFileTransactionManager.register( folder.getGuid(), map ); for( GUID guid : guids ){ FileNode fileNode = this.primaryFileSystem.getFileNode(guid); this.fileMultiDistributionService.fileDistribution( fileNode, this.fileMultiDistributionService.getConfig().getFileCloudDistributeTransmitTopic() ); } } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/ClusterLock.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure; import com.pinecone.framework.system.prototype.Pinenut; import java.util.concurrent.atomic.AtomicInteger; public class ClusterLock implements Pinenut { private AtomicInteger waitThreatNum; public ClusterLock(){ this.waitThreatNum = new AtomicInteger(0); } public AtomicInteger getWaitThreatNum(){ return this.waitThreatNum; } public void increment(){ this.waitThreatNum.getAndIncrement(); } public void decrement(){ this.waitThreatNum.getAndDecrement(); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/FSContentDeliveryService.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure; import com.pinecone.framework.system.prototype.Pinenut; public interface FSContentDeliveryService extends Pinenut { } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/SpartaUCDNService.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.system.functions.Executor; import com.pinecone.framework.util.Debug; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.hydra.bucket.ibatis.hydranium.BucketMappingDriver; import com.pinecone.hydra.file.ibatis.hydranium.FileMappingDriver; import com.pinecone.hydra.servgram.Servgram; import com.pinecone.hydra.service.ibatis.hydranium.ServiceMappingDriver; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.kom.UniformServiceInstrument; import com.pinecone.hydra.service.registry.server.UniformServiceManager; import com.pinecone.hydra.service.registry.ulf.HuskyServiceAppointServer; import com.pinecone.hydra.storage.bucket.TitanBucketInstrument; import com.pinecone.hydra.storage.file.FileSystemConfig; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.KernelFileSystemConfig; import com.pinecone.hydra.storage.file.UniformObjectFileSystem; import com.pinecone.hydra.storage.version.TitanVersionManage; import com.pinecone.hydra.storage.version.VersionManage; import com.pinecone.hydra.storage.volume.KernelVolumeConfig; import com.pinecone.hydra.storage.volume.UniformVolumeManager; import com.pinecone.hydra.storage.volume.VolumeConfig; import com.pinecone.hydra.system.component.ComponentInitializationException; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.uma.DuplexAppointClient; import com.pinecone.hydra.version.ibatis.hydranium.VersionMappingDriver; import com.pinecone.hydra.volume.ibatis.hydranium.VolumeMappingDriver; import com.pinecone.tritium.Tritium; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.pinecone.summer.spring.Springron; import com.walnut.archcraft.redstone.messge.PrimaryMessageWareStone; import com.walnut.sparta.ucdn.console.SpartaBoot; import com.walnut.sparta.ucdn.console.ufm.UCFMConfig; import com.walnut.sparta.ucdn.console.ufm.UFMConfig; import org.springframework.context.ApplicationContextInitializer; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.support.GenericApplicationContext; import java.io.IOException; import java.nio.file.Path; public class SpartaUCDNService extends Springron implements UCDNService { protected KOIMappingDriver koiMappingDriver; protected KOIMappingDriver koiFileMappingDriver; protected KOIMappingDriver koiBucketMappingDriver; protected KOIMappingDriver koiVersionMappingDriver; protected KOIMappingDriver koiServiceMappingDriver; protected KOMFileSystem fileSystem; protected UniformVolumeManager volumeTree; protected TitanBucketInstrument bucketInstrument; protected TitanVersionManage versionManage; protected ServiceInstrument serviceInstrument; protected PrimaryMessageWareStone primaryMessageWareStone; protected UniformServiceManager serviceManager; protected UFMConfig clusterFileSynchronizationConfig; protected void initKOMSubsystem() throws ComponentInitializationException { this.koiMappingDriver = new VolumeMappingDriver( this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.parentSystem().getDispenserCenter() ); this.koiFileMappingDriver = new FileMappingDriver( this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.parentSystem().getDispenserCenter() ); this.koiBucketMappingDriver = new BucketMappingDriver( this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.parentSystem().getDispenserCenter() ); this.koiVersionMappingDriver = new VersionMappingDriver( this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.parentSystem().getDispenserCenter() ); this.koiServiceMappingDriver = new ServiceMappingDriver( this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.parentSystem().getDispenserCenter() ); JSONConfig selfConfig = (JSONConfig) this.getConfig(); FileSystemConfig fileSystemConfig = new KernelFileSystemConfig( selfConfig.queryJSONObject( "service.PrimaryUniformFileSystem" ) ); this.fileSystem = new UniformObjectFileSystem( this.koiFileMappingDriver, fileSystemConfig ); VolumeConfig volumeConfig = new KernelVolumeConfig( selfConfig.queryJSONObject( "service.PrimaryUniformVolumeManager" ) ); this.volumeTree = new UniformVolumeManager( this.koiMappingDriver, volumeConfig ); this.bucketInstrument = new TitanBucketInstrument( this.koiBucketMappingDriver ); this.versionManage = new TitanVersionManage( this.koiVersionMappingDriver ); this.serviceInstrument = new UniformServiceInstrument( this.koiServiceMappingDriver ); } protected void initMessageWares() throws ComponentInitializationException { this.primaryMessageWareStone = new WolfKingMessageWareStone( this ); } protected void initModules() throws ComponentInitializationException { this.serviceManager = new UniformServiceManager( this.serviceInstrument ); this.serviceManager.hookAppointServer( new HuskyServiceAppointServer( this.primaryMessageWareStone.getWolfKingAppointServer() ) ); JSONConfig selfConfig = (JSONConfig) this.getConfig(); this.clusterFileSynchronizationConfig = new UCFMConfig( selfConfig.queryJSONObject( "service.ClusterFileSynchronizationConfig" ) ); } protected void startGlobalMiddlewares() throws ComponentInitializationException { try { this.getPrimaryMessageMiddlewareDirector().getWolfKingAppointServer().execute(); Debug.sleep( 500 ); this.getPrimaryMessageMiddlewareDirector().getWolfAppointClient().execute(); } catch ( Exception e ) { throw new ComponentInitializationException( e ); } } protected void initSpringBeanFactorySubsystem() throws ComponentInitializationException { this.setPrimarySources( SpartaBoot.class ); this.setInitializer(new Executor() { @Override public void execute() throws Exception { SpartaUCDNService.this.getSpringApplication().addInitializers(new ApplicationContextInitializer() { @Override public void initialize( ConfigurableApplicationContext applicationContext ) { GenericApplicationContext genericApplicationContext = (GenericApplicationContext) applicationContext; genericApplicationContext.registerBean("primaryFileSystem", UniformObjectFileSystem.class, () -> (UniformObjectFileSystem) fileSystem); genericApplicationContext.registerBean("primaryVolume", UniformVolumeManager.class, () -> (UniformVolumeManager) volumeTree); genericApplicationContext.registerBean("primaryBucket", TitanBucketInstrument.class, () -> (TitanBucketInstrument) bucketInstrument); genericApplicationContext.registerBean("primaryVersion", VersionManage.class, () -> (VersionManage) versionManage); genericApplicationContext.registerBean("primaryService", ServiceInstrument.class, () -> serviceInstrument); genericApplicationContext.registerBean("primaryWolfDuplexAppointClient", DuplexAppointClient.class, () -> primaryMessageWareStone.getWolfAppointClient()); genericApplicationContext.registerBean("uofsContentDelivery", UCDNContentDelivery.class, () -> (UCDNContentDelivery) SpartaUCDNService.this.parentSystem()); } }); } }); } protected void initSubsystem() throws ComponentInitializationException { this.initKOMSubsystem(); this.initMessageWares(); this.initModules(); this.startGlobalMiddlewares(); this.initSpringBeanFactorySubsystem(); } public SpartaUCDNService( String szName, Processum parent, String[] springbootArgs ) throws ComponentInitializationException { super( szName, parent, springbootArgs ); this.mSpringKernel.setPrimarySources( SpartaBoot.class ); this.initSubsystem(); } public SpartaUCDNService( String szName, Processum parent ) throws ComponentInitializationException { this( szName, parent, new String[0] ); } @Override protected void loadConfig() { this.mServgramList = this.getAttachedOrchestrator().getSectionConfig().getChild( Servgram.ConfigServgramsKey ); Object dyServgramConf = this.mServgramList.get( this.gramName() ); if( dyServgramConf instanceof String ) { try{ this.mServgramConf = this.mServgramList.getChildFromPath( Path.of((String) dyServgramConf) ); } catch ( IOException ignore ) { this.getLogger().info( "[Notice] Spring will use the default config `application.yaml`." ); } } else { this.mServgramConf = this.mServgramList.getChild( this.gramName() ); } } @Override public Tritium parentSystem() { return (Tritium)super.parentSystem(); } @Override public KOMFileSystem getKOMFileSystem() { return this.fileSystem; } @Override public UniformVolumeManager getUniformVolumeManager() { return this.volumeTree; } @Override public TitanBucketInstrument getTitanBucketInstrument() { return this.bucketInstrument; } @Override public TitanVersionManage getTitanVersionManage() { return this.versionManage; } @Override public ServiceInstrument getServiceInstrument() { return this.serviceInstrument; } @Override public PrimaryMessageWareStone getPrimaryMessageMiddlewareDirector() { return this.primaryMessageWareStone; } @Override public UniformServiceManager getUniformServiceManager() { return this.serviceManager; } @Override public UFMConfig getClusterFileSynchronizationConfig() { return this.clusterFileSynchronizationConfig; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/UCDNConsoleContents.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure; import com.pinecone.framework.system.prototype.Pinenut; public class UCDNConsoleContents implements Pinenut { public static String VOLUME_TYPE_PHYSICAL = "PhysicalVolume"; public static String VOLUME_TYPE_SIMPLE = "SimpleVolume"; public static String VOLUME_TYPE_SPANNED = "SpannedVolume"; public static String VOLUME_TYPE_STRIPED = "StripedVolume"; public static String VERSION_PREFIX = "/$version"; public static String FORWARD_SLASH = "/"; public static String PERIOD = "."; } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/UCDNConstants.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure; public class UCDNConstants { public static String KafkaServer = "localhost:9092"; public static String RocketServer = "localhost:9876"; public static String UCDNFileServiceTransmitGroup = "UCDNFileServiceTransmitGroup"; public static String serviceId = "1769872-0002d2-0003-cc"; public static String deployId = "1769872-0002d2-0003-cc"; public static long clientId = 1; public static String period = "."; } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/UCDNContentDelivery.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.tritium.Tritium; public class UCDNContentDelivery extends Tritium implements FSContentDeliveryService { protected SpartaUCDNService spartaUCDNService; public UCDNContentDelivery(String[] args, CascadeSystem parent ) { this( args, null, parent ); } public UCDNContentDelivery(String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } public SpartaUCDNService getSpartaUCDNService(){ return this.spartaUCDNService; } @Override public void vitalize () throws Exception { this.spartaUCDNService = new SpartaUCDNService( "SpartaUCDNService", this ); this.spartaUCDNService.execute(); this.getTaskManager().add(this.spartaUCDNService); this.getTaskManager().syncWaitingTerminated(); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/UCDNService.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.registry.server.UniformServiceManager; import com.pinecone.hydra.storage.bucket.TitanBucketInstrument; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.version.TitanVersionManage; import com.pinecone.hydra.storage.volume.UniformVolumeManager; import com.pinecone.hydra.system.component.Slf4jTraceable; import com.walnut.archcraft.redstone.messge.PrimaryMessageWareStone; import com.walnut.sparta.ucdn.console.ufm.UFMConfig; public interface UCDNService extends Slf4jTraceable { KOMFileSystem getKOMFileSystem(); UniformVolumeManager getUniformVolumeManager(); TitanBucketInstrument getTitanBucketInstrument(); TitanVersionManage getTitanVersionManage(); ServiceInstrument getServiceInstrument(); // TODO, For next, that will to systemically integrate the Primary-Middleware-Stone into the uniform-director. PrimaryMessageWareStone getPrimaryMessageMiddlewareDirector(); UniformServiceManager getUniformServiceManager(); UFMConfig getClusterFileSynchronizationConfig(); } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/WolfKingMessageWareStone.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.util.json.JSONMaptron; import com.pinecone.hydra.service.registry.server.ServiceLifecycleIface; import com.pinecone.hydra.service.registry.server.ServiceMetaManipulationIface; import com.pinecone.hydra.system.component.ComponentInitializationException; import com.pinecone.hydra.uma.DuplexAppointClient; import com.pinecone.hydra.uma.DuplexAppointServer; import com.pinecone.hydra.uma.HuskyDuplexExpress; import com.pinecone.hydra.uma.wolf.WolvesAppointClient; import com.pinecone.hydra.uma.wolf.WolvesAppointServer; import com.pinecone.hydra.umb.kafka.WolfMCKafkaClient; import com.pinecone.hydra.umb.rocket.WolfMCRocketClient; import com.pinecone.hydra.umb.wolf.UlfBroadcastControlNode; import com.pinecone.hydra.umb.wolf.WolfMCBClient; import com.pinecone.hydra.umc.wolf.client.UlfClient; import com.pinecone.hydra.umc.wolf.client.WolfMCClient; import com.pinecone.hydra.umc.wolf.server.UlfServer; import com.pinecone.hydra.umc.wolf.server.WolfMCServer; import com.pinecone.hydra.umct.WolfMCExpress; import com.pinecone.tritium.Tritium; import com.walnut.archcraft.redstone.messge.PrimaryMessageWareStone; import com.walnut.sparta.ucdn.console.ufm.FileMultiDistributionIface; import com.walnut.sparta.ucdn.console.ufm.SessionValidator; public class WolfKingMessageWareStone implements PrimaryMessageWareStone { protected DuplexAppointServer wolfKingAppointServer; protected DuplexAppointClient wolfAppointClient; protected UlfBroadcastControlNode primaryKafkaClient; protected UlfBroadcastControlNode primaryRocketClient; protected Processum parentProcess; public WolfKingMessageWareStone( Processum parentProcess ) throws ComponentInitializationException { this.parentProcess = parentProcess; this.initSelf(); } private void initPrimaryAppointClientSegment() throws Exception { UlfClient embedRPCClient = new WolfMCClient( 2048, "PrimaryWolfMCClient", this.parentSystem(), this.parentSystem().getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( "Messagers.Messagers.WolfMCKingpin" ) ); this.wolfAppointClient = new WolvesAppointClient( embedRPCClient ); this.wolfAppointClient.compile( ServiceLifecycleIface.class, false ); this.wolfAppointClient.compile( ServiceMetaManipulationIface.class, false ); } private void initPrimaryAppointServerSegment() throws Exception { UlfServer embedRPCServer = new WolfMCServer( "WolfKingMCServer", this.parentSystem(), new JSONMaptron("{host: \"0.0.0.0\",\n" + "port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}") ); this.wolfKingAppointServer = new WolvesAppointServer( embedRPCServer, HuskyDuplexExpress.class ); //this.serviceManager = new UniformServiceManager( serviceInstrument, wolfServer ); } private void initPrimaryBroadcastSegment() throws Exception { this.primaryKafkaClient = new WolfMCBClient(new WolfMCKafkaClient(UCDNConstants.KafkaServer), "", this.parentSystem(), WolfMCExpress.class); this.primaryKafkaClient.compile( FileMultiDistributionIface.class,false ); this.primaryRocketClient = new WolfMCBClient(new WolfMCRocketClient(UCDNConstants.RocketServer,UCDNConstants.UCDNFileServiceTransmitGroup), "", this.parentSystem(), WolfMCExpress.class); this.primaryRocketClient.compile(SessionValidator.class,false); } private void initSelf() throws ComponentInitializationException { try { this.initPrimaryAppointServerSegment(); this.initPrimaryAppointClientSegment(); this.initPrimaryBroadcastSegment(); } catch ( Exception e ) { throw new ComponentInitializationException( e ); } } @Override public Processum getParentProcess() { return this.parentProcess; } @Override public DuplexAppointServer getWolfKingAppointServer() { return this.wolfKingAppointServer; } @Override public DuplexAppointClient getWolfAppointClient() { return this.wolfAppointClient; } @Override public UlfBroadcastControlNode getPrimaryKafkaClient() { return this.primaryKafkaClient; } @Override public UlfBroadcastControlNode getPrimaryRocketClient() { return this.primaryRocketClient; } @Override public Tritium parentSystem() { return (Tritium)this.parentProcess.parentSystem(); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/dto/ClusterFileSyncDTO.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure.dto; public class ClusterFileSyncDTO { private String fileGuid; public String getFileGuid() { return fileGuid; } public void setFileGuid(String fileGuid) { this.fileGuid = fileGuid; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/dto/DownloadObjectByChannelDTO.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure.dto; public class DownloadObjectByChannelDTO { private String destDirPath; private String targetPath; public DownloadObjectByChannelDTO() { } public DownloadObjectByChannelDTO(String destDirPath, String targetPath) { this.destDirPath = destDirPath; this.targetPath = targetPath; } public String getDestDirPath() { return destDirPath; } public void setDestDirPath(String destDirPath) { this.destDirPath = destDirPath; } public String getTargetPath() { return targetPath; } public void setTargetPath(String targetPath) { this.targetPath = targetPath; } public String toString() { return "downloadObjectByChannelDto{destDirPath = " + destDirPath + ", targetPath = " + targetPath + "}"; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/dto/LogicVolumeDTO.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure.dto; import com.pinecone.framework.system.prototype.Pinenut; public class LogicVolumeDTO implements Pinenut { private String name; private long definitionCapacity; private String extConfig; public LogicVolumeDTO() { } public LogicVolumeDTO(String name, long definitionCapacity, String extConfig) { this.name = name; this.definitionCapacity = definitionCapacity; this.extConfig = extConfig; } public String getName() { return name; } public void setName(String name) { this.name = name; } public long getDefinitionCapacity() { return definitionCapacity; } public void setDefinitionCapacity(long definitionCapacity) { this.definitionCapacity = definitionCapacity; } public String getExtConfig() { return extConfig; } public void setExtConfig(String extConfig) { this.extConfig = extConfig; } public String toString() { return "SimpleVolumeDTO{name = " + name + ", definitionCapacity = " + definitionCapacity + ", extConfig = " + extConfig + "}"; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/dto/PhysicalVolumeDTO.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure.dto; import com.pinecone.framework.system.prototype.Pinenut; public class PhysicalVolumeDTO implements Pinenut { private String name; private long definitionCapacity; private String extConfig; private String mountPoint; public PhysicalVolumeDTO() { } public PhysicalVolumeDTO(String name, long definitionCapacity, String extConfig, String mountPoint) { this.name = name; this.definitionCapacity = definitionCapacity; this.extConfig = extConfig; this.mountPoint = mountPoint; } public String getName() { return name; } public void setName(String name) { this.name = name; } public long getDefinitionCapacity() { return definitionCapacity; } public void setDefinitionCapacity(long definitionCapacity) { this.definitionCapacity = definitionCapacity; } public String getExtConfig() { return extConfig; } public void setExtConfig(String extConfig) { this.extConfig = extConfig; } public String getMountPoint() { return mountPoint; } public void setMountPoint(String mountPoint) { this.mountPoint = mountPoint; } public String toString() { return "PhysicalVolumeDTO{name = " + name + ", definitionCapacity = " + definitionCapacity + ", extConfig = " + extConfig + ", mountPoint = " + mountPoint + "}"; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/dto/RenameDTO.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure.dto; public class RenameDTO { private String path; private String newName; public String getPath() { return path; } public void setPath(String path) { this.path = path; } public String getNewName() { return newName; } public void setNewName(String newName) { this.newName = newName; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/dto/SiteNodeDTO.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure.dto; import com.pinecone.framework.util.id.GUID; public class SiteNodeDTO { protected long enumId; protected String nodeName; protected String nodeGuid; protected String siteGuid; protected int state; protected int isEnabled; protected String relatedService; public long getEnumId() { return enumId; } public void setEnumId(long enumId) { this.enumId = enumId; } public String getNodeName() { return nodeName; } public void setNodeName(String nodeName) { this.nodeName = nodeName; } public String getNodeGuid() { return nodeGuid; } public void setNodeGuid(String nodeGuid) { this.nodeGuid = nodeGuid; } public String getSiteGuid() { return siteGuid; } public void setSiteGuid(String siteGuid) { this.siteGuid = siteGuid; } public int getState() { return state; } public void setState(int state) { this.state = state; } public int getIsEnabled() { return isEnabled; } public void setIsEnabled(int isEnabled) { this.isEnabled = isEnabled; } public String getRelatedService() { return relatedService; } public void setRelatedService(String relatedService) { this.relatedService = relatedService; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/dto/StorageExpansionDTO.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure.dto; import com.pinecone.framework.system.prototype.Pinenut; public class StorageExpansionDTO implements Pinenut { public String logicGuid; public String childGuid; public StorageExpansionDTO() { } public StorageExpansionDTO(String logicGuid, String childGuid) { this.logicGuid = logicGuid; this.childGuid = childGuid; } public String getLogicGuid() { return logicGuid; } public void setLogicGuid(String logicGuid) { this.logicGuid = logicGuid; } public String getChildGuid() { return childGuid; } public void setChildGuid(String childGuid) { this.childGuid = childGuid; } public String toString() { return "StorageExpansionDTO{logicGuid = " + logicGuid + ", physicalGuid = " + childGuid + "}"; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/dto/UpdateFileNameDTO.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure.dto; public class UpdateFileNameDTO { private String filePath; private String newFileName; public String getFilePath() { return this.filePath; } public void setFilePath(String filePath) { this.filePath = filePath; } public String getNewFileName() { return this.newFileName; } public void setNewFileName(String newFileName) { this.newFileName = newFileName; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/dto/UpdateObjectByChannelDTO.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure.dto; import org.springframework.web.multipart.MultipartFile; public class UpdateObjectByChannelDTO { private String volumeGuid; private String destDirPath; private MultipartFile object; public UpdateObjectByChannelDTO() { } public UpdateObjectByChannelDTO(String volumeGuid, String destDirPath, MultipartFile object) { this.volumeGuid = volumeGuid; this.destDirPath = destDirPath; this.object = object; } public String getVolumeGuid() { return volumeGuid; } public void setVolumeGuid(String volumeGuid) { this.volumeGuid = volumeGuid; } public String getDestDirPath() { return destDirPath; } public void setDestDirPath(String destDirPath) { this.destDirPath = destDirPath; } public MultipartFile getObject() { return object; } public void setObject(MultipartFile object) { this.object = object; } public String toString() { return "updateObjectDto{volumeGuid = " + volumeGuid + ", destDirPath = " + destDirPath + ", object = " + object + "}"; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/service/UCDNCentralServiceManager.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure.service; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.registry.server.ServiceLifecycleIface; import com.pinecone.hydra.service.registry.server.ServiceMetaManipulationIface; import com.pinecone.hydra.service.registry.server.UniformServiceManager; import com.pinecone.hydra.uma.DuplexAppointClient; import com.pinecone.hydra.uma.DuplexAppointServer; import com.walnut.sparta.ucdn.console.infrastructure.UCDNService; import com.walnut.sparta.ucdn.console.infrastructure.UCDNContentDelivery; public class UCDNCentralServiceManager implements UCDNServiceManager { protected DuplexAppointServer serviceControlAppointServer; protected DuplexAppointClient serviceRecallAppointClient; protected UniformServiceManager serviceManager; protected ServiceInstrument serviceInstrument; protected ServiceLifecycleIface serviceRegistryLifecycleIface; protected ServiceMetaManipulationIface serviceMateIface; protected UCDNService ucdnService; public UCDNCentralServiceManager( UCDNContentDelivery UCDNContentDelivery) { this.ucdnService = UCDNContentDelivery.getSpartaUCDNService(); this.serviceInstrument = this.ucdnService.getServiceInstrument(); this.serviceManager = ucdnService.getUniformServiceManager(); this.serviceControlAppointServer = this.ucdnService.getPrimaryMessageMiddlewareDirector().getWolfKingAppointServer(); this.serviceRecallAppointClient = this.ucdnService.getPrimaryMessageMiddlewareDirector().getWolfAppointClient(); this.serviceRegistryLifecycleIface = this.serviceRecallAppointClient.getIface( ServiceLifecycleIface.class ); this.serviceMateIface = this.serviceRecallAppointClient.getIface( ServiceMetaManipulationIface.class ); } @Override public DuplexAppointServer getWolfServer() { return this.serviceControlAppointServer; } @Override public ServiceInstrument getServiceInstrument() { return this.serviceInstrument; } @Override public DuplexAppointClient getDuplexAppointClient() { return this.serviceRecallAppointClient; } @Override public ServiceLifecycleIface getLifecycleIface() { return this.serviceRegistryLifecycleIface; } @Override public ServiceMetaManipulationIface getMateIface() { return this.serviceMateIface; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/service/UCDNServiceManager.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure.service; import com.pinecone.framework.system.regime.arch.Manager; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.registry.server.ServiceLifecycleIface; import com.pinecone.hydra.service.registry.server.ServiceMetaManipulationIface; import com.pinecone.hydra.uma.DuplexAppointClient; import com.pinecone.hydra.uma.DuplexAppointServer; public interface UCDNServiceManager extends Manager { DuplexAppointServer getWolfServer(); ServiceInstrument getServiceInstrument(); DuplexAppointClient getDuplexAppointClient(); ServiceLifecycleIface getLifecycleIface(); ServiceMetaManipulationIface getMateIface(); } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/vo/FolderContentVo.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure.vo; import com.pinecone.hydra.storage.file.entity.FileTreeNode; import java.util.List; public class FolderContentVo { private List< FileTreeNode > fileTreeNodes; public FolderContentVo() { } public FolderContentVo(List fileTreeNodes) { this.fileTreeNodes = fileTreeNodes; } public List getFileTreeNodes() { return fileTreeNodes; } public void setFileTreeNodes(List fileTreeNodes) { this.fileTreeNodes = fileTreeNodes; } public String toString() { return "FolderContentVo{fileTreeNodes = " + fileTreeNodes + "}"; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/vo/SiteNodeVO.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure.vo; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; import com.pinecone.hydra.storage.bucket.entity.SiteNode; public class SiteNodeVO implements Pinenut { protected long enumId; protected String nodeName; protected GUID nodeGuid; protected GUID siteGuid; protected int state; protected int isEnabled; protected GUID relatedService; protected String relatedServicePath; public SiteNodeVO(){} public SiteNodeVO(SiteNode siteNode){ this.enumId = siteNode.getEnumId(); this.nodeGuid = siteNode.getNodeGuid(); this.nodeName = siteNode.getNodeName(); this.relatedService = siteNode.getRelatedService(); this.isEnabled = siteNode.getIsEnabled(); this.siteGuid = siteNode.getSiteGuid(); this.state = siteNode.getState(); } public long getEnumId() { return this.enumId; } public void setEnumId(long enumId) { this.enumId = enumId; } public String getNodeName() { return this.nodeName; } public void setNodeName(String nodeName) { this.nodeName = nodeName; } public GUID getNodeGuid() { return this.nodeGuid; } public void setNodeGuid(GUID nodeGuid) { this.nodeGuid = nodeGuid; } public GUID getSiteGuid() { return this.siteGuid; } public void setSiteGuid(GUID siteGuid) { this.siteGuid = siteGuid; } public int getState() { return this.state; } public void setState(int state) { this.state = state; } public int getIsEnabled() { return this.isEnabled; } public void setIsEnabled(int isEnabled) { this.isEnabled = isEnabled; } public GUID getRelatedService() { return this.relatedService; } public void setRelatedService(GUID relatedService) { this.relatedService = relatedService; } public String getRelatedServicePath(){ return this.relatedServicePath; } public void setRelatedServicePath(String relatedServicePath){ this.relatedServicePath = relatedServicePath; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/vo/SyncFinishedVO.java ================================================ package com.walnut.sparta.ucdn.console.infrastructure.vo; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.json.homotype.BeanJSONEncoder; public class SyncFinishedVO implements Pinenut { private String path; private String serviceId; private int syncState; public SyncFinishedVO(){} public SyncFinishedVO( String path, String serviceId, int syncState ){ this.path = path; this.serviceId = serviceId; this.syncState = syncState; } public String getPath() { return this.path; } public void setPath(String path) { this.path = path; } public String getServiceId() { return this.serviceId; } public void setServiceId(String serviceId) { this.serviceId = serviceId; } public int getSyncState() { return this.syncState; } public void setSyncState(int syncState) { this.syncState = syncState; } @Override public String toJSONString() { return BeanJSONEncoder.BasicEncoder.encode( this ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/interceptor/JWTInterceptor.java ================================================ package com.walnut.sparta.ucdn.console.interceptor; import com.alibaba.fastjson.JSONObject; import com.walnut.archcraft.redstone.response.GenericResultResponse; import com.walnut.sparta.ucdn.console.util.JWTUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpMethod; import org.springframework.stereotype.Component; import org.springframework.util.StringUtils; import org.springframework.web.servlet.HandlerInterceptor; @Component public class JWTInterceptor implements HandlerInterceptor { private Logger log = LoggerFactory.getLogger( this.getClass() ); @Override public boolean preHandle(javax.servlet.http.HttpServletRequest request, javax.servlet.http.HttpServletResponse response, Object handler) throws Exception { String url=request.getRequestURI(); if (request.getMethod().equals(HttpMethod.OPTIONS.name())) { return true; } //log.info("请求的路径是:"+ url); if (url.contains("login")||url.contains("register")||url.contains("send_code")||url.contains("download")){ log.info("Allow login or registration operations"); return true; } String jwt=request.getHeader("Token"); if (!StringUtils.hasLength(jwt)){ log.info("The request header Token is empty"); GenericResultResponse error = GenericResultResponse.error("not login"); String jsonString = JSONObject.toJSONString(error); response.getWriter().write(jsonString); return false; } try { JWTUtil.ParseJWt(jwt); } catch (Exception e){ log.info("Token parsing failed"); GenericResultResponse error = GenericResultResponse.error("Not logged in"); String jsonString = JSONObject.toJSONString(error); response.getWriter().write(jsonString); return false; } return true; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/mapper/ClusterFileSyncMapper.java ================================================ package com.walnut.sparta.ucdn.console.mapper; import com.pinecone.framework.util.id.GUID; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; @Mapper public interface ClusterFileSyncMapper { @Insert("INSERT INTO `hydra_ucdn_fmd_sync_status` (`file_guid`, `state`, `site_guid`) VALUES ( #{fileGuid},#{state},#{siteGuid})") void insert(@Param("fileGuid") GUID fileGuid, @Param("state") int state, @Param("siteGuid") GUID siteGuid); @Delete("DELETE FROM `hydra_ucdn_fmd_sync_status` WHERE `file_guid` = #{fileGuid}") void remove( GUID fileGuid ); @Update("UPDATE `hydra_ucdn_fmd_sync_status` SET `state` = #{state} WHERE `file_guid` = #{fileGuid}") void updateState(@Param("fileGuid") GUID fileGuid, @Param("state") int state ); @Select("SELECT `state` FROM `hydra_ucdn_fmd_sync_status` WHERE `file_guid` = #{fileGuid}") Integer queryState( @Param("fileGuid") GUID fileGuid ); } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/FMDTransactionBlock.java ================================================ package com.walnut.sparta.ucdn.console.ufm; public class FMDTransactionBlock { private Object clusterLock; private Long clusterCompletedCount; private Long consumerCompletedCount; public FMDTransactionBlock(){} public FMDTransactionBlock(Object clusterLock, Long clusterCompletedCount, Long consumerCompletedCount) { this.clusterLock = clusterLock; this.clusterCompletedCount = clusterCompletedCount; this.consumerCompletedCount = consumerCompletedCount; } public Object getClusterLock() { return this.clusterLock; } public void setClusterLock(Object clusterLock) { this.clusterLock = clusterLock; } public Long getClusterCompletedCount() { return this.clusterCompletedCount; } public void setClusterCompletedCount(Long clusterCompletedCount) { this.clusterCompletedCount = clusterCompletedCount; } public Long getConsumerCompletedCount() { return this.consumerCompletedCount; } public void setConsumerCompletedCount(Long consumerCompletedCount) { this.consumerCompletedCount = consumerCompletedCount; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/FileMultiDistributionController.java ================================================ package com.walnut.sparta.ucdn.console.ufm; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.ClusterPage; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.FSNodeAllotment; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.file.entity.Cluster; import com.pinecone.hydra.storage.file.entity.LocalCluster; import com.pinecone.hydra.storage.file.transmit.receiver.TitanFileReceiveEntity64; import com.pinecone.hydra.storage.io.TitanFileChannelChanface; import com.pinecone.hydra.storage.volume.UniformVolumeManager; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umct.AddressMapping; import com.pinecone.hydra.umct.stereotype.Controller; import com.walnut.sparta.ucdn.console.infrastructure.ClusterLock; import com.walnut.sparta.ucdn.console.infrastructure.UCDNConstants; import com.walnut.sparta.ucdn.console.ufm.protocol.RequestHead; import com.walnut.sparta.ucdn.console.ufm.session.UFMTransaction; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.channels.FileChannel; import java.nio.file.Path; import java.nio.file.StandardOpenOption; @Controller @AddressMapping( "com.pinecone.hydra.uofs.ufm.FileMultiDistributionIface." ) public class FileMultiDistributionController implements Pinenut { private Logger logger; protected KOMFileSystem primaryFileSystem; protected UniformVolumeManager primaryVolume; protected SessionPhaser sessionPhaser; protected SessionValidator fileSessionValidator; protected UFMConfig config; public FileMultiDistributionController( UOFSFileMultiDistributionService distributionService ) throws UMBServiceException { this.logger = LoggerFactory.getLogger( this.getClass() ); this.primaryFileSystem = distributionService.primaryFileSystem; this.primaryVolume = distributionService.primaryVolume; this.sessionPhaser = distributionService.sessionPhaser; this.fileSessionValidator = distributionService.fileSessionValidator; this.config = distributionService.config; } @AddressMapping("startDistribution") public void setFileMate( RequestHead head, String path, long definitionSize ) { if( this.sessionPhaser.getSessionTransaction( head.getSessionId() ) != null ){ this.logger.warn( "[Warning] UCDNService `startDistribution` session assertion compromised." ); this.sessionPhaser.removeSessionTransaction( head.getSessionId() ); return; } this.logger.info( "UCDNService invoked `startDistribution`. " ); long sessionId = head.getSessionId(); FileNode fileNode = this.primaryFileSystem.affirmFileNode( path ); fileNode.setDefinitionSize( definitionSize ); this.primaryFileSystem.update( fileNode ); this.sessionPhaser.registerClusterCount( fileNode.getGuid(),0 ); UFMTransaction ufmTransaction = new UFMTransaction( fileNode.getGuid() ); ufmTransaction.setLastEventArrivedMills( System.currentTimeMillis() ); this.sessionPhaser.registerSessionTransaction( sessionId, ufmTransaction ); this.sessionPhaser.getSessionTransaction( sessionId ).finishStartTransmit(); this.logger.info( "UCDNService invoked `startDistribution`. " ); } @AddressMapping("setFrameMeta") public void setFrameMeta( RequestHead head, UFMDClusterDO frameMeta ) throws IOException { long sessionId = head.getSessionId(); if ( this.assertTransmitTransaction ( frameMeta.getFilePath(), head) ) { this.logger.warn( "[Warning] UCDNService `setFrameMeta` session assertion compromised." ); return; } this.logger.info( "UCDNService invoked `setFrameMeta`. " ); FSNodeAllotment allotment = this.primaryFileSystem.getFSNodeAllotment(); String filePath = frameMeta.getFilePath(); ElementNode elementNode = this.primaryFileSystem.queryElement(filePath); LocalCluster localCluster = allotment.newLocalCluster(); localCluster.setSegId(frameMeta.getSegId() ); localCluster.setSourceName( frameMeta.getSourceName() ); localCluster.setSize(frameMeta.getSize() ); localCluster.setFileGuid( elementNode.getGuid() ); localCluster.save(); this.sessionPhaser.getSessionTransaction( sessionId ).setLastEventArrivedMills( System.currentTimeMillis() ); this.logger.info( "UCDNService invoked `setFrameMeta`. " ); } @AddressMapping("transmitClusterFrame") public void transmitClusterFrame( RequestHead head, UFMDClusterFrame ufmdClusterFrame ) throws IOException, InterruptedException { long sessionId = head.getSessionId(); if ( this.assertTransmitTransaction ( ufmdClusterFrame.getPath(), head) ) { this.logger.warn( "[Warning] UCDNService `transmitClusterFrame` session assertion compromised." ); return; } this.logger.info( "UCDNService invoked `transmitClusterFrame`. " ); ElementNode elementNode = this.primaryFileSystem.queryElement(ufmdClusterFrame.getPath()); Cluster cluster = this.primaryFileSystem.getClusterByFileWithId(elementNode.getGuid(), ufmdClusterFrame.getSegId()); if( this.sessionPhaser.getClusterLock(cluster.getSegGuid()) == null ){ this.sessionPhaser.registerClusterLock( cluster.getSegGuid(), new ClusterLock()); } else { synchronized (this.sessionPhaser.getClusterLock(cluster.getSegGuid())){ this.sessionPhaser.getClusterLock(cluster.getSegGuid()).increment(); this.sessionPhaser.getClusterLock(cluster.getSegGuid()).wait(); } } RandomAccessFile fos = this.sessionPhaser.getClusterOutputStream( cluster.getSegGuid() ); Path temporaryPath = this.config.formatTemporaryPath( cluster.getSegGuid().toString() ); String szTemporaryPath = temporaryPath.toString(); File tempFile = new File( szTemporaryPath ); if( fos == null ){ fos = new RandomAccessFile( tempFile,"rw" ); this.sessionPhaser.registerClusterOutputStream( cluster.getSegGuid(), fos ); } fos.seek(ufmdClusterFrame.getOffset() ); fos.write( ufmdClusterFrame.getBytes() ); this.sessionPhaser.getSessionTransaction( sessionId ).setLastEventArrivedMills( System.currentTimeMillis() ); if( cluster.getSize() == tempFile.length() ) { this.sessionPhaser.removeClusterCount( cluster.getSegGuid() ); this.frameTerminate( head, ufmdClusterFrame.getPath(), ufmdClusterFrame.getSegId(), ufmdClusterFrame.getTotalSegNum() ); } if( this.sessionPhaser.getClusterLock(cluster.getSegGuid()) != null ){ if( this.sessionPhaser.getClusterLock(cluster.getSegGuid()).getWaitThreatNum().get() == 0 ){ this.sessionPhaser.removeClusterLock( cluster.getSegGuid() ); this.logger.info( "UCDNService invoked `transmitClusterFrame`. " ); return; } synchronized ( this.sessionPhaser.getClusterLock(cluster.getSegGuid()) ){ this.sessionPhaser.getClusterLock( cluster.getSegGuid() ).decrement(); this.sessionPhaser.getClusterLock(cluster.getSegGuid()).notify(); } } this.logger.info( "UCDNService invoked `transmitClusterFrame`. " ); } //todo 添加写完后向主节点发送完成指令 @AddressMapping("frameTerminate") public void frameTerminate( RequestHead head, String path, long segId, long totalSegNum ) throws IOException { long sessionId = head.getSessionId(); if ( this.assertTransmitTransaction ( path, head) ) { this.logger.warn( "[Warning] UCDNService `frameTerminate` session assertion compromised." ); return; } this.logger.info( "UCDNService invoked `frameTerminate`. " ); FileNode fileNode = (FileNode) this.primaryFileSystem.queryElement(path); LocalCluster frame = (LocalCluster)this.primaryFileSystem.getClusterByFileWithId(fileNode.getGuid(), segId); Path temporaryPath = this.config.formatTemporaryPath( frame.getSegGuid().toString() ); String szTemporaryPath = temporaryPath.toString(); File tempFile = new File( szTemporaryPath ); try { if ( !tempFile.exists() ){ throw new IOException( "Creating file compromised, what :" + szTemporaryPath ); } FileChannel channel = FileChannel.open(tempFile.toPath(), StandardOpenOption.READ); TitanFileChannelChanface chanface = new TitanFileChannelChanface( channel ); TitanFileReceiveEntity64 receiveEntity64 = new TitanFileReceiveEntity64(this.primaryFileSystem, path, fileNode, chanface, this.primaryVolume); receiveEntity64.receive( segId ); this.sessionPhaser.incrementClusterCount( fileNode.getGuid() ); this.logger.info("`frameTerminate` Currently finished transition cluster:" + this.sessionPhaser.getClusterCount( fileNode.getGuid() )); if( this.sessionPhaser.getClusterCount( fileNode.getGuid() ) == this.config.getBatchTransmitMemberThreshold() ){ this.sessionPhaser.resetClusterCount( fileNode.getGuid() ); this.fileSessionValidator.stageClusterGroupComplete( path ); } } finally { RandomAccessFile outputStream = this.sessionPhaser.getClusterOutputStream(frame.getSegGuid()); outputStream.close(); this.sessionPhaser.removeClusterOutputStream( frame.getSegGuid() ); if ( !tempFile.delete() ) { throw new IOException( "Temporary file has been purged failed." ); } if( segId == totalSegNum - 1 ){ this.sessionPhaser.getSessionTransaction( sessionId ).setLastEventArrivedMills( System.currentTimeMillis() ); this.sessionPhaser.getSessionTransaction( sessionId ).finishTransmitFileContent(); this.sessionPhaser.getSessionTransaction( sessionId ).finishFileDistributionComplete(); this.sessionPhaser.removeClusterCount( fileNode.getGuid() ); this.sessionPhaser.removeFileLock( fileNode.getGuid() ); this.sessionPhaser.removeConsumerCount( fileNode.getGuid() ); this.sessionPhaser.removeSessionTransaction( sessionId ); this.fileSessionValidator.fileTransmitComplete( path, UCDNConstants.serviceId ); } else { this.sessionPhaser.getSessionTransaction( sessionId ).setLastEventArrivedMills( System.currentTimeMillis() ); } } this.logger.info( "UCDNService invoked `frameTerminate`. " ); } protected boolean assertTransmitTransaction( String filePath, RequestHead head ) throws IOException { long sessionId = head.getSessionId(); UFMTransaction transaction = this.sessionPhaser.getSessionTransaction(sessionId); if( transaction == null ){ this.logger.warn( "[Warning] UCDNService `assertTransmitTransaction` session doesn`t existed. " ); return true; } long currentTimeMillis = System.currentTimeMillis(); if( currentTimeMillis - transaction.getLastEventArrivedMills() > this.config.getSessionExpiredTimeMillis() ){ this.logger.warn( "[Warning] UCDNService `assertTransmitTransaction` session has expired. " ); this.sessionPhaser.removeSessionTransaction( sessionId ); this.transmitRollBack( filePath, sessionId ); return true; } if( !transaction.isStartTransmit() ){ this.logger.warn( "[Warning] UCDNService `assertTransmitTransaction` illegal transaction stage, which should never has started yet. " ); this.sessionPhaser.removeSessionTransaction( sessionId ); this.transmitRollBack( filePath, sessionId ); return true; } return false; } private void transmitRollBack( String filePath, long sessionId ) throws IOException { this.logger.warn( "[Warning] UCDNService `transmitRollBack`. " ); FileNode fileNode = (FileNode) this.primaryFileSystem.queryElement(filePath); ClusterPage clusterPage = this.primaryFileSystem.fetchClustersByFileGuid( fileNode.getGuid() ); long fileClusterNum = clusterPage.getClusters(); for( long i = 0; i < fileClusterNum; ++i ){ LocalCluster frame = clusterPage.getLocalCluster( i ); RandomAccessFile clusterOutputStream = this.sessionPhaser.getClusterOutputStream(frame.getSegGuid()); clusterOutputStream.close(); this.sessionPhaser.removeClusterOutputStream( frame.getSegGuid() ); } this.sessionPhaser.removeClusterCount( fileNode.getGuid() ); this.sessionPhaser.removeFileLock( fileNode.getGuid() ); this.sessionPhaser.removeClusterCount( fileNode.getGuid() ); //this.primaryFileSystem.remove( fileNode.getGuid() ); this.sessionPhaser.removeSessionTransaction( sessionId ); this.logger.warn( "[Warning] UCDNService `transmitRollBack`. " ); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/FileMultiDistributionIface.java ================================================ package com.walnut.sparta.ucdn.console.ufm; import com.pinecone.hydra.umct.stereotype.Iface; import com.walnut.sparta.ucdn.console.ufm.protocol.RequestHead; @Iface ( "com.pinecone.hydra.uofs.ufm.FileMultiDistributionIface" ) public interface FileMultiDistributionIface { void startDistribution ( RequestHead head, String path, long definitionSize ); void setFrameMeta ( RequestHead head, UFMDClusterDO frameMeta ); void transmitClusterFrame ( RequestHead head, UFMDClusterFrame contentVO ); void frameTerminate( RequestHead head, String path, long segId ); } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/FileMultiDistributionService.java ================================================ package com.walnut.sparta.ucdn.console.ufm; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer; import com.pinecone.hydra.umb.broadcast.BroadcastControlProducer; import com.walnut.sparta.ucdn.console.ufm.event.UFMEventSubscriber; import java.io.IOException; import java.util.Collection; public interface FileMultiDistributionService extends Pinenut { void fileDistribution( FileNode fileNode, String topic ) throws IOException, InterruptedException; void test() throws UMBServiceException; BroadcastControlConsumer getTransmitConsumer( String topic,String group ); BroadcastControlProducer getTransmitProducer(); FileMultiDistributionService registerFileTransmitCompleteEventSubscriber( UFMEventSubscriber subscriber ) ; FileMultiDistributionService deregisterFileTransmitCompleteEventSubscriber( UFMEventSubscriber subscriber ) ; Collection fetchFileTransmitCompleteEventSubscribers(); boolean hasStarted(); void start() throws UMBServiceException ; void shutdown(); UFMConfig getConfig(); } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/SessionPhaser.java ================================================ package com.walnut.sparta.ucdn.console.ufm; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.id.GUID; import com.walnut.sparta.ucdn.console.infrastructure.ClusterLock; import com.walnut.sparta.ucdn.console.ufm.session.UFMTransaction; import java.io.RandomAccessFile; public interface SessionPhaser extends Pinenut { void registerFileLock(GUID guid, Object object ); Object getFileLock(GUID guid ); void removeFileLock( GUID guid ); void registerClusterLock(GUID guid, ClusterLock clusterLock); ClusterLock getClusterLock( GUID guid ); void removeClusterLock( GUID guid ); void registerClusterCount(GUID guid, long count ); long getClusterCount(GUID guid ); void removeClusterCount( GUID guid ); void incrementClusterCount(GUID guid ); void resetClusterCount(GUID guid ); void registerConsumerCount(GUID guid, Long count ); long getConsumerCount(GUID guid ); void removeConsumerCount( GUID guid ); void incrementConsumerCount(GUID guid ); void resetConsumerCount(GUID guid ); void registerSessionTransaction(Long sessionId, UFMTransaction ufmTransaction); UFMTransaction getSessionTransaction( Long sessionId ); void removeSessionTransaction( Long sessionId ); void registerClusterOutputStream(GUID guid, RandomAccessFile fileOutputStream); RandomAccessFile getClusterOutputStream(GUID guid ); void removeClusterOutputStream( GUID guid ); } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/SessionValidator.java ================================================ package com.walnut.sparta.ucdn.console.ufm; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umct.stereotype.Iface; import java.io.IOException; @Iface public interface SessionValidator extends Pinenut { void stageClusterGroupComplete( String path ) throws IOException; void stageFileTransmitComplete( String path ) throws IOException; void fileTransmitComplete( String path, String serviceId ) throws IOException; void start() throws UMBServiceException; void shutdown(); boolean hasStarted(); } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/UCFMConfig.java ================================================ package com.walnut.sparta.ucdn.console.ufm; import java.nio.file.Path; import java.util.Map; public class UCFMConfig implements UFMConfig { protected int mnFileFrameSize; protected int mnBatchTransmitMemberThreshold; protected long mnSessionExpiredTimeMillis; protected String mszFileCloudDistributeTransmitTopic; protected String mszFileCloudDistributeEventTopic; protected String mszFileServiceTransmitGroup; protected String mszTemporaryFileExtends; protected String mszMajorTemporaryClusterFileDirectory; protected String mszLocalMasterTemporaryClusterFileDirectory; public UCFMConfig ( Map configMap ) { this.mnFileFrameSize = ( (Number)configMap.get("fileFrameSize") ).intValue(); this.mnBatchTransmitMemberThreshold = ( (Number)configMap.get("batchTransmitMemberThreshold") ).intValue(); this.mnSessionExpiredTimeMillis = ( (Number)configMap.get("sessionExpiredTimeMillis") ).longValue(); this.mszFileCloudDistributeTransmitTopic = (String) configMap.get("fileCloudDistributeTransmitTopic"); this.mszFileCloudDistributeEventTopic = (String) configMap.get("fileCloudDistributeEventTopic"); this.mszFileServiceTransmitGroup = (String) configMap.get("fileServiceTransmitGroup"); this.mszTemporaryFileExtends = (String) configMap.get("temporaryFileExtends"); this.mszMajorTemporaryClusterFileDirectory = (String) configMap.get("majorTemporaryClusterFileDirectory"); this.mszLocalMasterTemporaryClusterFileDirectory = (String) configMap.get("localMasterTemporaryClusterFileDirectory"); } @Override public int getFileFrameSize() { return this.mnFileFrameSize; } @Override public String getFileCloudDistributeTransmitTopic() { return this.mszFileCloudDistributeTransmitTopic; } @Override public String getFileCloudDistributeEventTopic() { return this.mszFileCloudDistributeEventTopic; } @Override public long getSessionExpiredTimeMillis() { return this.mnSessionExpiredTimeMillis; } @Override public String getFileServiceTransmitGroup() { return this.mszFileServiceTransmitGroup; } @Override public String getTemporaryFileExtends() { return this.mszTemporaryFileExtends; } @Override public String getMajorTemporaryClusterFileDirectory() { return this.mszMajorTemporaryClusterFileDirectory; } @Override public Path formatTemporaryPath( String segName ) { return Path.of( this.getMajorTemporaryClusterFileDirectory(), segName + this.getTemporaryFileExtends() ); } @Override public Path formatMasterTemporaryPath( String segName ) { return Path.of( this.mszLocalMasterTemporaryClusterFileDirectory, segName + this.getTemporaryFileExtends() ); } @Override public int getBatchTransmitMemberThreshold() { return this.mnBatchTransmitMemberThreshold; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/UFMConfig.java ================================================ package com.walnut.sparta.ucdn.console.ufm; import java.nio.file.Path; import com.pinecone.framework.system.prototype.Pinenut; public interface UFMConfig extends Pinenut { int getFileFrameSize(); String getFileCloudDistributeTransmitTopic(); String getFileCloudDistributeEventTopic(); String getFileServiceTransmitGroup(); String getTemporaryFileExtends(); String getMajorTemporaryClusterFileDirectory(); Path formatTemporaryPath( String segName ); Path formatMasterTemporaryPath( String segName ); long getSessionExpiredTimeMillis(); int getBatchTransmitMemberThreshold(); } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/UFMDClusterDO.java ================================================ package com.walnut.sparta.ucdn.console.ufm; import com.walnut.sparta.ucdn.console.ufm.protocol.FileMeta64; public class UFMDClusterDO extends FileMeta64 { protected String filePath; protected long segId; public UFMDClusterDO() { super(); } public UFMDClusterDO( String sourceName, long size, long validateVal, String filePath, long segId ) { super( sourceName, size, validateVal ); this.filePath = filePath; this.segId = segId; } public String getFilePath() { return filePath; } public void setFilePath( String filePath ) { this.filePath = filePath; } public long getSegId() { return segId; } public void setSegId(long segId) { this.segId = segId; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/UFMDClusterFrame.java ================================================ package com.walnut.sparta.ucdn.console.ufm; import com.pinecone.framework.system.prototype.Pinenut; public class UFMDClusterFrame implements Pinenut { private byte[] bytes; private String path; private long segId; private long totalSegNum; private long offset; public UFMDClusterFrame( byte[] bytes, String path, long segId, long totalSegNum, long offset ) { this.bytes = bytes; this.path = path; this.segId = segId; this.totalSegNum = totalSegNum; this.offset = offset; } public UFMDClusterFrame(){} public byte[] getBytes() { return bytes; } public void setBytes(byte[] bytes) { this.bytes = bytes; } public String getPath() { return path; } public void setPath(String path) { this.path = path; } public long getSegId() { return segId; } public void setSegId(long segId) { this.segId = segId; } public long getTotalSegNum(){ return this.totalSegNum; } public void setTotalSegNum( long totalSegNum ){ this.totalSegNum = totalSegNum; } public long getOffset(){ return this.offset; } public void setOffset( long offset ){ this.offset = offset; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/UFMSessionPhaser.java ================================================ package com.walnut.sparta.ucdn.console.ufm; import com.pinecone.framework.util.id.GUID; import com.walnut.sparta.ucdn.console.infrastructure.ClusterLock; import com.walnut.sparta.ucdn.console.ufm.session.UFMTransaction; import java.io.RandomAccessFile; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; //@Component public class UFMSessionPhaser implements SessionPhaser { private ConcurrentMap sessionTransactions; //用sessionId记录事件是否存在 // File.Guid => Lock private ConcurrentMap fileLocksMap;//大文件的Guid,用来保存阶段锁 private ConcurrentMap ClusterLocksMap;//簇锁,保证同时只有一个线程在写入临时文件 // File.Guid => Cluster.count (N) private ConcurrentMap clusterComplatedPhaserMap;//大文件Guid,用来记录已经传输了多少簇 // File.Guid => Consumer.count (N) private ConcurrentMap consumerComplatedPhaserMap;//大文件Guid,用来记录目前有多少线程完成任务 private ConcurrentMap clusterOutputStreamMap; // 簇GUID,用来记录当前簇的IO操作通道 public UFMSessionPhaser() { this.sessionTransactions = new ConcurrentHashMap<>(); this.fileLocksMap = new ConcurrentHashMap<>(); this.clusterComplatedPhaserMap = new ConcurrentHashMap<>(); this.consumerComplatedPhaserMap = new ConcurrentHashMap<>(); this.clusterOutputStreamMap = new ConcurrentHashMap<>(); this.ClusterLocksMap = new ConcurrentHashMap<>(); } @Override public void registerFileLock( GUID guid, Object object ) { this.fileLocksMap.put( guid, object ); } @Override public Object getFileLock( GUID guid ) { return this.fileLocksMap.get( guid ); } @Override public void removeFileLock(GUID guid) { this.fileLocksMap.remove( guid ); } @Override public void registerClusterLock(GUID guid, ClusterLock clusterLock) { this.ClusterLocksMap.put( guid, clusterLock ); } @Override public ClusterLock getClusterLock(GUID guid) { return this.ClusterLocksMap.get( guid ); } @Override public void removeClusterLock(GUID guid) { this.ClusterLocksMap.remove( guid ); } @Override public void removeClusterCount(GUID guid) { this.clusterComplatedPhaserMap.remove( guid ); } @Override public void removeConsumerCount(GUID guid) { this.consumerComplatedPhaserMap.remove( guid ); } @Override public void registerClusterCount( GUID guid, long count ) { this.clusterComplatedPhaserMap.put( guid,count ); } @Override public long getClusterCount( GUID guid ) { return this.clusterComplatedPhaserMap.get( guid ); } @Override public void incrementClusterCount( GUID guid ) { Long l = this.clusterComplatedPhaserMap.get(guid); this.clusterComplatedPhaserMap.put( guid, l+1 ); } @Override public void resetClusterCount( GUID guid ) { this.clusterComplatedPhaserMap.put( guid, 0L ); } @Override public void registerConsumerCount( GUID guid, Long count ) { this.consumerComplatedPhaserMap.put( guid, count ); } @Override public long getConsumerCount( GUID guid ) { return this.consumerComplatedPhaserMap.get( guid ); } @Override public void incrementConsumerCount( GUID guid ) { Long l = this.consumerComplatedPhaserMap.get(guid); this.consumerComplatedPhaserMap.put( guid, l+1 ); } @Override public void resetConsumerCount( GUID guid ) { this.consumerComplatedPhaserMap.put( guid, 0L ); } @Override public void registerSessionTransaction(Long sessionId, UFMTransaction ufmTransaction) { this.sessionTransactions.put( sessionId, ufmTransaction ); } @Override public UFMTransaction getSessionTransaction(Long sessionId) { return this.sessionTransactions.get( sessionId ); } @Override public void removeSessionTransaction(Long sessionId) { this.sessionTransactions.remove( sessionId ); } @Override public void registerClusterOutputStream(GUID guid, RandomAccessFile fileOutputStream) { this.clusterOutputStreamMap.put( guid, fileOutputStream ); } @Override public RandomAccessFile getClusterOutputStream(GUID guid) { return this.clusterOutputStreamMap.get( guid ); } @Override public void removeClusterOutputStream(GUID guid) { this.clusterOutputStreamMap.remove( guid ); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/UFMSessionValidator.java ================================================ package com.walnut.sparta.ucdn.console.ufm; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer; import com.pinecone.hydra.umb.broadcast.BroadcastControlProducer; import com.pinecone.hydra.umb.wolf.UlfBroadcastControlNode; import java.io.IOException; public class UFMSessionValidator implements SessionValidator { protected UlfBroadcastControlNode eventMQClient; protected BroadcastControlProducer eventProducer; protected BroadcastControlConsumer eventConsumer; protected UFMConfig config; protected UOFSFileMultiDistributionService distributionService; public UFMSessionValidator( UOFSFileMultiDistributionService distributionService ) { this.config = distributionService.config; this.distributionService = distributionService; this.eventMQClient = distributionService.ucdnService.getPrimaryMessageMiddlewareDirector().getPrimaryRocketClient(); } @Override public boolean hasStarted() { return this.eventProducer != null; } @Override public void start() throws UMBServiceException { if ( !this.hasStarted() ) { this.eventProducer = this.eventMQClient.createBroadcastControlProducer(); this.eventConsumer = this.eventMQClient.createBroadcastControlConsumer( this.config.getFileCloudDistributeEventTopic() ); this.eventConsumer.registerController( new UFMSessionValidatorController( this.distributionService ) ); this.eventConsumer.start(); this.eventProducer.start(); } } @Override public void shutdown() { if ( this.hasStarted() ) { this.eventProducer.close(); this.eventConsumer.close(); this.eventProducer = null; this.eventConsumer = null; } } @Override public void stageClusterGroupComplete( String path ) throws IOException { this.eventProducer.issueInform( this.config.getFileCloudDistributeEventTopic(), "com.walnut.sparta.ucdn.console.ufm.SessionValidator.stageClusterGroupComplete", path ); } @Override public void stageFileTransmitComplete( String path ) throws IOException { this.eventProducer.issueInform( this.config.getFileCloudDistributeEventTopic(), "com.walnut.sparta.ucdn.console.ufm.SessionValidator.stageFileTransmitComplete", path ); } @Override public void fileTransmitComplete( String path, String serviceId ) throws IOException { this.eventProducer.issueInform( this.config.getFileCloudDistributeEventTopic(), "com.walnut.sparta.ucdn.console.ufm.SessionValidator.fileTransmitComplete", path,serviceId ); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/UFMSessionValidatorController.java ================================================ package com.walnut.sparta.ucdn.console.ufm; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.umct.AddressMapping; import com.pinecone.hydra.umct.stereotype.Controller; import com.walnut.sparta.ucdn.console.ufm.event.UFMEventSubscriber; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Collection; @Controller @AddressMapping( "com.walnut.sparta.ucdn.console.ufm.SessionValidator." ) public class UFMSessionValidatorController implements Pinenut { private Logger logger; private SessionPhaser sessionPhaser; private KOMFileSystem primaryFileSystem; private FileMultiDistributionService distributionService; public UFMSessionValidatorController( UOFSFileMultiDistributionService distributionService ){ this.logger = LoggerFactory.getLogger( this.getClass() ); this.distributionService = distributionService; this.primaryFileSystem = distributionService.primaryFileSystem; this.sessionPhaser = distributionService.sessionPhaser; } @AddressMapping( "stageClusterGroupComplete" ) public void stageClusterGroupComplete( String path ){ this.logger.info( "UFMService invoked stageClusterGroupComplete." ); ElementNode elementNode = this.primaryFileSystem.queryElement(path); this.sessionPhaser.incrementConsumerCount( elementNode.getGuid() ); if( this.sessionPhaser.getConsumerCount( elementNode.getGuid() ) == 1 ){ final Object lock = this.sessionPhaser.getFileLock( elementNode.getGuid() ); synchronized ( lock ){ lock.notify(); } this.sessionPhaser.resetConsumerCount( elementNode.getGuid() ); } } @AddressMapping( "stageFileTransmitComplete" ) public void stageFileTransmitComplete( String path ){ this.logger.info( "SlaveNode {}, file receive complete.", path ); } @AddressMapping( "fileTransmitComplete" ) public void fileTransmitComplete( String path, String serviceId ) throws IOException { FileNode fileNode = (FileNode)this.primaryFileSystem.queryElement(path); //GUID versionFileGuid = this.versionManage.getVersionFileByGuid(fileNode.getGuid()); Collection subscribers = this.distributionService.fetchFileTransmitCompleteEventSubscribers(); for ( UFMEventSubscriber subscriber : subscribers ) { subscriber.afterEventTriggered( path, serviceId, fileNode ); } } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/UOFSFileMultiDistributionService.java ================================================ package com.walnut.sparta.ucdn.console.ufm; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.ClusterPage; import com.pinecone.hydra.storage.file.entity.FSNodeAllotment; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.file.entity.LocalCluster; import com.pinecone.hydra.storage.file.transmit.exporter.TitanFileExportEntity64; import com.pinecone.hydra.storage.io.TitanFileChannelChanface; import com.pinecone.hydra.storage.volume.UniformVolumeManager; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer; import com.pinecone.hydra.umb.broadcast.BroadcastControlProducer; import com.pinecone.hydra.umb.wolf.UlfBroadcastControlNode; import com.pinecone.ulf.util.guid.GUIDs; import com.walnut.sparta.ucdn.console.infrastructure.UCDNService; import com.walnut.sparta.ucdn.console.ufm.event.UFMEventSubscriber; import com.walnut.sparta.ucdn.console.ufm.protocol.RequestHead; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.nio.channels.FileChannel; import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; public class UOFSFileMultiDistributionService implements FileMultiDistributionService { protected KOMFileSystem primaryFileSystem; protected UniformVolumeManager primaryVolume; protected SessionPhaser sessionPhaser; protected UlfBroadcastControlNode transmitClient; protected BroadcastControlProducer transmitProducer; protected BroadcastControlConsumer transmitConsumer; protected List fileTransmitCompleteEventSubscribers; protected SessionValidator fileSessionValidator; protected UCDNService ucdnService; protected UFMConfig config; public UOFSFileMultiDistributionService( UCDNService ucdnService ) { this.ucdnService = ucdnService; this.primaryFileSystem = ucdnService.getKOMFileSystem(); this.primaryVolume = ucdnService.getUniformVolumeManager(); this.sessionPhaser = new UFMSessionPhaser(); this.transmitClient = ucdnService.getPrimaryMessageMiddlewareDirector().getPrimaryKafkaClient(); this.config = ucdnService.getClusterFileSynchronizationConfig(); this.fileSessionValidator = new UFMSessionValidator( this ); this.fileTransmitCompleteEventSubscribers = new ArrayList<>(); } @Override public FileMultiDistributionService registerFileTransmitCompleteEventSubscriber( UFMEventSubscriber subscriber ) { if ( this.hasStarted() ) { throw new IllegalStateException( "FileMultiDistributionService has already started." ); } this.fileTransmitCompleteEventSubscribers.add( subscriber ); return this; } @Override public FileMultiDistributionService deregisterFileTransmitCompleteEventSubscriber( UFMEventSubscriber subscriber ) { if ( this.hasStarted() ) { throw new IllegalStateException( "FileMultiDistributionService has already started." ); } this.fileTransmitCompleteEventSubscribers.remove( subscriber ); return this; } @Override public boolean hasStarted() { return this.transmitProducer != null; } @Override public void start() throws UMBServiceException { if ( !this.hasStarted() ) { this.transmitProducer = this.transmitClient.createBroadcastControlProducer(); this.transmitConsumer = this.transmitClient.createBroadcastControlConsumer( this.config.getFileCloudDistributeTransmitTopic(), this.config.getFileServiceTransmitGroup() ); this.transmitConsumer.registerController( new FileMultiDistributionController( this ) ); this.transmitConsumer.start(); this.transmitProducer.start(); if ( !this.fileSessionValidator.hasStarted() ) { this.fileSessionValidator.start(); } } } @Override public void shutdown() { if ( this.hasStarted() ) { this.transmitConsumer.close(); this.transmitProducer.close(); this.transmitConsumer = null; this.transmitProducer = null; if ( this.fileSessionValidator.hasStarted() ) { this.fileSessionValidator.shutdown(); } } } @Override public UFMConfig getConfig() { return this.config; } @Override public Collection fetchFileTransmitCompleteEventSubscribers() { return this.fileTransmitCompleteEventSubscribers; } @Override public void fileDistribution( FileNode fileNode, String topic ) throws IOException, InterruptedException { this.sessionPhaser.registerFileLock( fileNode.getGuid(), new Object() ); FSNodeAllotment fsNodeAllotment = this.primaryFileSystem.getFSNodeAllotment(); FileMultiDistributionIface fileDistribution = this.transmitProducer.getIface(FileMultiDistributionIface.class, topic); String path = this.primaryFileSystem.getPath(fileNode.getGuid()); long requestId = 0; RequestHead head = RequestHead.newRequest().setSessionId( System.currentTimeMillis() ); fileDistribution.startDistribution( head, path, fileNode.getPhysicalSize() ); //this.producer.issueInform( topic, "com.walnut.sparta.ucdn.console.umc.FileDistribution.startDistribution",path,fileNode.getPhysicalSize() ); ClusterPage clusterPage = this.primaryFileSystem.fetchClustersByFileGuid( fileNode.getGuid() ); long fileClusterNum = clusterPage.getClusters(); int distributionFrameNum = 0; this.sessionPhaser.registerConsumerCount( fileNode.getGuid(),0L ); for( long i = 0; i < fileClusterNum; ++i ){ LocalCluster frame = clusterPage.getLocalCluster( i ); // TODO, Remote UFMDClusterDO UFMDClusterDO = new UFMDClusterDO( frame.getSourceName(), frame.getSize(), frame.getCrc32(), path, i ); fileDistribution.setFrameMeta( head, UFMDClusterDO ); Path tempFilePath = this.config.formatMasterTemporaryPath( frame.getSegGuid().toString() ); String szTempFilePath = tempFilePath.toString(); File tempFile = new File( szTempFilePath ); if ( !tempFile.createNewFile() ){ throw new IOException( "Creating file compromised, what :" + szTempFilePath ); } FileChannel channel = FileChannel.open( tempFile.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND ); TitanFileChannelChanface kChannel = new TitanFileChannelChanface( channel ); FileNode newFileNode = fsNodeAllotment.newFileNode(); newFileNode.setPath( frame.getSourceName() ); newFileNode.setDefinitionSize( frame.getSize() ); TitanFileExportEntity64 exportEntity = new TitanFileExportEntity64( this.primaryFileSystem, this.primaryVolume, newFileNode, kChannel ); exportEntity.export( frame ); FileInputStream fileInputStream = new FileInputStream(tempFile); // int bufferSize = 950 * 1024; // byte[] buffer = new byte[ bufferSize ]; // int bytesRead; // // while( ( bytesRead = fileInputStream.read( buffer ) )!=-1 ) { // if ( bytesRead < bufferSize ) { // byte[] validData = Arrays.copyOfRange(buffer, 0, bytesRead); // buffer = validData; // } // // fileDistribution.transmitClusterFrame( head, new UFMDClusterFrame( buffer, path, i, fileClusterNum ) ); // //this.producer.issueInform( topic, "com.walnut.sparta.ucdn.console.umc.FileDistribution.transmitClusterFrame",new UFMDClusterFrame(buffer,path,i)); // } int bufferSize = 2 * 1024 * 1024; // 2MB byte[] buffer = new byte[bufferSize]; int bytesRead; int chunkSize = this.config.getFileFrameSize(); long currentPosition = 0; try { while ( (bytesRead = fileInputStream.read(buffer)) != -1 ) { int chunksToProcess = (bytesRead + chunkSize - 1) / chunkSize; // 计算需要拆分的块数 for ( int j = 0; j < chunksToProcess; ++j ) { // 计算当前块的起始和结束位置 int start = j * chunkSize; int end = Math.min(start + chunkSize, bytesRead); byte[] chunkData = Arrays.copyOfRange( buffer, start, end ); // 拆分出当前块 // 发送当前块的数据 fileDistribution.transmitClusterFrame( head, new UFMDClusterFrame(chunkData, path, i, fileClusterNum, currentPosition) ); currentPosition = currentPosition + (end - start); } } fileInputStream.close(); } finally { fileInputStream.close(); tempFile.delete(); } ++distributionFrameNum; if( distributionFrameNum == this.config.getBatchTransmitMemberThreshold() ){ synchronized( this.sessionPhaser.getFileLock( fileNode.getGuid() ) ){ this.sessionPhaser.getFileLock( fileNode.getGuid() ).wait(); } distributionFrameNum = 0; } } } @Override public void test() throws UMBServiceException { FileMultiDistributionIface fileDistribution = this.transmitProducer.getIface(FileMultiDistributionIface.class,"testTopic"); FileNode fileNode = this.primaryFileSystem.getFileNode(GUIDs.GUID128("1214792-000373-0003-00")); String path = this.primaryFileSystem.getPath(fileNode.getGuid()); //fileDistribution.startDistribution( path ); BroadcastControlConsumer consumer = this.getTransmitConsumer("testTopic", "testGroup"); consumer.start(); } @Override public BroadcastControlConsumer getTransmitConsumer( String topic,String group ) { return this.transmitConsumer; } @Override public BroadcastControlProducer getTransmitProducer() { return this.transmitProducer; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/event/UFMEventSubscriber.java ================================================ package com.walnut.sparta.ucdn.console.ufm.event; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.storage.file.entity.FileNode; import java.io.IOException; public interface UFMEventSubscriber extends Pinenut { void afterEventTriggered( String path, String serviceId, FileNode fileNode ) ; } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/protocol/FileMeta64.java ================================================ package com.walnut.sparta.ucdn.console.ufm.protocol; public class FileMeta64 { protected String sourceName; protected long size; protected long validateVal; public FileMeta64() {} public FileMeta64( String sourceName, long size, long validateVal ) { this.size = size; this.sourceName = sourceName; this.validateVal = validateVal; } public String getSourceName() { return this.sourceName; } public void setSourceName( String sourceName ) { this.sourceName = sourceName; } public long getSize() { return this.size; } public void setSize( long size ) { this.size = size; } public long getValidateVal() { return this.validateVal; } public void setValidateVal( long validateVal ) { this.validateVal = validateVal; } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/protocol/RequestHead.java ================================================ package com.walnut.sparta.ucdn.console.ufm.protocol; public class RequestHead { protected long sessionId; public RequestHead setSessionId(long sessionId ) { this.sessionId = sessionId; return this; } public long getSessionId() { return this.sessionId; } public static RequestHead newRequest() { return new RequestHead(); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/session/UFMTransaction.java ================================================ package com.walnut.sparta.ucdn.console.ufm.session; import com.pinecone.framework.util.id.GUID; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; public class UFMTransaction { protected GUID localFileGUID; protected AtomicBoolean startTransmit; protected AtomicBoolean transmitFileContent; protected AtomicBoolean fileDistributionComplete; protected long lastEventArrivedMills; public UFMTransaction( GUID localFileGUID ) { this.localFileGUID = localFileGUID; this.startTransmit = new AtomicBoolean(false); this.transmitFileContent = new AtomicBoolean(false); this.fileDistributionComplete = new AtomicBoolean(false); } public GUID getLocalFileGUID() { return this.localFileGUID; } public long getLastEventArrivedMills() { return this.lastEventArrivedMills; } public void setLastEventArrivedMills( long lastEventArrivedMills ) { this.lastEventArrivedMills = lastEventArrivedMills; } public boolean finishStartTransmit() { return this.startTransmit.compareAndSet(false, true); } public boolean finishTransmitFileContent() { return this.transmitFileContent.compareAndSet(false, true); } public boolean finishFileDistributionComplete() { return this.fileDistributionComplete.compareAndSet(false, true); } public boolean isStartTransmit() { return this.startTransmit.get(); } public boolean isTransmitFileContent() { return this.transmitFileContent.get(); } public boolean isFileDistributionComplete() { return this.fileDistributionComplete.get(); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/util/JWTUtil.java ================================================ package com.walnut.sparta.ucdn.console.util; import com.auth0.jwt.JWT; import com.auth0.jwt.algorithms.Algorithm; import com.auth0.jwt.interfaces.DecodedJWT; import java.util.Date; import java.util.HashMap; public class JWTUtil { private static final String SIGN = "!^&%&*!@$*%!!@(&%2ar^2t"; //学生登录生成JWT令牌 public static String createJWT(){ HashMap map = new HashMap<>(); String token = JWT.create() .withHeader(map) //设置头信息 .withExpiresAt(new Date(System.currentTimeMillis() + 3600 * 1000)) //设置失效时间 .sign(Algorithm.HMAC256(SIGN)); //设置签名以及签名方式 这里使用HMAC256加密方式 return token; } public static DecodedJWT ParseJWt(String jwt){ return JWT.require(Algorithm.HMAC256(SIGN)).build().verify(jwt); } } ================================================ FILE: Sparta/sparta-ucdn-console/src/test/java/cn/ken/AppTest.java ================================================ package cn.ken; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; /** * Unit test for simple App. */ public class AppTest extends TestCase { /** * Create the test case * * @param testName name of the test case */ public AppTest( String testName ) { super( testName ); } /** * @return the suite of tests being tested */ public static Test suite() { return new TestSuite( AppTest.class ); } /** * Rigourous Test :-) */ public void testApp() { assertTrue( true ); } } ================================================ FILE: Sparta/sparta-ucdn-service/pom.xml ================================================ sparta com.walnuts.sparta 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.walnut.sparta.ucdn.service sparta-ucdn-service 2.1.0 org.springframework.boot spring-boot-starter org.springframework.boot spring-boot-starter-test compile org.springframework.boot spring-boot-starter-web 2.6.13 compile com.pinecone pinecone 2.5.1 compile com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.pinecone.summer.springram springram 2.1.0 compile com.pinecone.hydra.kom.driver.default hydra-kom-default-driver 2.1.0 compile com.pinecone.tritium hydra-system-tritium 2.1.0 compile ================================================ FILE: Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/SpartaBoot.java ================================================ package com.walnut.sparta.ucdn.service; import org.springframework.boot.autoconfigure.SpringBootApplication; @SpringBootApplication public class SpartaBoot { } ================================================ FILE: Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/UCDNBoot.java ================================================ package com.walnut.sparta.ucdn.service; import com.pinecone.Pinecone; import com.walnut.sparta.ucdn.service.infrastructure.UOFSContentDelivery; public class UCDNBoot { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ UOFSContentDelivery ucdn = (UOFSContentDelivery) Pinecone.sys().getTaskManager().add( new UOFSContentDelivery( args, Pinecone.sys() ) ); ucdn.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/api/controller/v2/ClientController.java ================================================ package com.walnut.sparta.ucdn.service.api.controller.v2; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.io.TitanOutputStreamChanface; import com.pinecone.hydra.storage.bucket.BucketInstrument; import com.pinecone.hydra.storage.bucket.entity.Site; import com.pinecone.hydra.storage.bucket.source.SiteManipulator; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.file.entity.Folder; import com.pinecone.hydra.storage.file.transmit.exporter.TitanFileExportEntity64; import com.pinecone.hydra.storage.version.VersionManage; import com.pinecone.hydra.storage.volume.UniformVolumeManager; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer; import com.pinecone.hydra.umb.broadcast.BroadcastControlProducer; import com.pinecone.hydra.umb.kafka.WolfMCKafkaClient; import com.pinecone.hydra.umb.wolf.WolfMCBClient; import com.pinecone.hydra.umct.WolfMCExpress; import com.walnut.sparta.ucdn.service.api.iface.v2.FileSyncDistributionController; import com.walnut.sparta.ucdn.service.infrastructure.UOFSContentDelivery; import com.walnut.sparta.ucdn.service.infrastructure.constants.PolicyConstants; import com.walnut.sparta.ucdn.service.infrastructure.exception.IllegalPathException; import com.walnut.sparta.ucdn.service.umct.FileSyncDistribution; import org.apache.kafka.common.serialization.StringDeserializer; import org.apache.kafka.common.serialization.StringSerializer; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RestController; import javax.annotation.PostConstruct; import javax.annotation.Resource; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.Map; @RestController @CrossOrigin public class ClientController { @Resource private KOMFileSystem primaryFileSystem; @Resource private UniformVolumeManager primaryVolume; @Resource private VersionManage primaryVersion; @Resource private BucketInstrument bucketInstrument; @GetMapping("/titan/version") public String queryVersion(HttpServletRequest request, HttpServletResponse response){ return "undefined"; } @GetMapping("/**") public void getFile(HttpServletRequest request, HttpServletResponse response) throws IOException,IllegalPathException { String servletPath = request.getServletPath(); String[] pathPart = servletPath.split(PolicyConstants.FORWARD_SLASH); if( pathPart.length < 3 ){ //todo 路径500问题 throw new IllegalPathException(); } String siteName = pathPart[1]; //todo 统一路径解析 StringBuilder filePath = new StringBuilder(); for( int i = 2; i < pathPart.length; i++ ){ if( i == pathPart.length - 1 ){ int dotIndex = pathPart[i].lastIndexOf(PolicyConstants.PERIOD); String baseName = pathPart[i].substring(0, dotIndex); filePath.append(baseName); }else { filePath.append(pathPart[i]).append(PolicyConstants.FORWARD_SLASH); } } SiteManipulator siteManipulator = this.bucketInstrument.getSiteManipulator(); Site site = siteManipulator.querySiteByName(siteName); String realFilePath = this.primaryFileSystem.getPath(site.getMountPointGuid()) + PolicyConstants.FORWARD_SLASH + filePath; //后续升级成责任链获得更好的扩展性 Map parameterMap = request.getParameterMap(); String version = ""; if( parameterMap.get("version") != null ){ version = parameterMap.get("version")[0]; } ServletOutputStream outputStream = response.getOutputStream(); TitanOutputStreamChanface kChannel = new TitanOutputStreamChanface(outputStream); Folder folder = (Folder) this.primaryFileSystem.get(this.primaryFileSystem.queryGUIDByPath(realFilePath)); GUID storageObjectGuid = this.primaryVersion.queryObjectGuid(version, folder.getGuid()); FileNode storageObject = (FileNode) this.primaryFileSystem.get(storageObjectGuid); TitanFileExportEntity64 entity = new TitanFileExportEntity64(this.primaryFileSystem, this.primaryVolume, storageObject, kChannel); this.primaryFileSystem.export( entity ); } @Resource protected UOFSContentDelivery uofsContentDelivery; @Resource protected FileSyncDistributionController fileSyncDistributionController; @PostConstruct private void init() throws Exception { String server = "localhost:9092"; String keySerializer = StringSerializer.class.getName(); String valueSerializer = StringSerializer.class.getName(); String topic = "testTopic"; String group = "testGroup"; String keyDeserializer = StringDeserializer.class.getName(); String valueDeserializer = StringDeserializer.class.getName(); String autoOffsetReset = "earliest"; WolfMCBClient client = new WolfMCBClient(new WolfMCKafkaClient(server), "", this.uofsContentDelivery, WolfMCExpress.class); client.compile( FileSyncDistribution.class, false ); BroadcastControlProducer producer = client.createBroadcastControlProducer(); producer.start(); FileSyncDistribution raccoon = producer.getIface( FileSyncDistribution.class, topic ); BroadcastControlConsumer consumer = client.createBroadcastControlConsumer(topic,group); consumer.registerController( this.fileSyncDistributionController ); Thread thread = new Thread(()->{ try { consumer.start(); } catch (UMBServiceException e) { throw new RuntimeException(e); } }); thread.start(); } } ================================================ FILE: Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/api/controller/v2/ConsoleController.java ================================================ package com.walnut.sparta.ucdn.service.api.controller.v2; import com.pinecone.hydra.storage.io.TitanFileChannelChanface; import com.pinecone.hydra.storage.bucket.BucketInstrument; import com.pinecone.hydra.storage.bucket.entity.Site; import com.pinecone.hydra.storage.bucket.source.SiteManipulator; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.FSNodeAllotment; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.file.entity.FileTreeNode; import com.pinecone.hydra.storage.file.entity.Folder; import com.pinecone.hydra.storage.file.transmit.receiver.TitanFileReceiveEntity64; import com.pinecone.hydra.storage.version.VersionManage; import com.pinecone.hydra.storage.version.entity.TitanVersion; import com.pinecone.hydra.storage.volume.UniformVolumeManager; import com.walnut.sparta.ucdn.service.api.response.BasicResultResponse; import com.walnut.sparta.ucdn.service.infrastructure.constants.PolicyConstants; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.multipart.MultipartFile; import javax.annotation.Resource; import java.io.File; import java.io.IOException; import java.nio.channels.FileChannel; import java.nio.file.StandardOpenOption; @RestController @RequestMapping( "/api/v2/ucdn/console" ) @CrossOrigin public class ConsoleController { @Resource private KOMFileSystem primaryFileSystem; @Resource private UniformVolumeManager primaryVolume; @Resource private VersionManage primaryVersion; @Resource private BucketInstrument bucketInstrument; /** * 上传文件 * @param filePath 目标路径 * @param version 版本号 * @param file 文件 * @param siteName 站点 * @return 返回操作结果 */ @PostMapping("/upload") public BasicResultResponse upload(@RequestParam("siteName") String siteName, @RequestParam("filePath") String filePath, @RequestParam("version") String version, @RequestParam("file") MultipartFile file) throws IOException { SiteManipulator siteManipulator = this.bucketInstrument.getSiteManipulator(); Site site = siteManipulator.querySiteByName(siteName); if( site == null ){ return BasicResultResponse.error("站点不存在"); } int dotIndex = filePath.lastIndexOf(PolicyConstants.PERIOD); String baseName = filePath.substring(0, dotIndex); String extension = filePath.substring(dotIndex + 1); String realFilePath = this.primaryFileSystem.getPath(site.getMountPointGuid()) + PolicyConstants.FORWARD_SLASH + baseName; FSNodeAllotment fsNodeAllotment = this.primaryFileSystem.getFSNodeAllotment(); Folder node = this.primaryFileSystem.affirmFolder(realFilePath); String storageObjectPath = realFilePath + PolicyConstants.VERSION_PREFIX+ PolicyConstants.FORWARD_SLASH + version +PolicyConstants.PERIOD+ extension; File tempFile = File.createTempFile("upload",".temp"); if(!tempFile.exists()){ throw new IOException( "Creating file compromised, what :" + tempFile.toPath() ); } file.transferTo(tempFile); FileChannel channel = FileChannel.open(tempFile.toPath(), StandardOpenOption.READ); TitanFileChannelChanface titanFileChannelKChannel = new TitanFileChannelChanface( channel ); FileNode fileNode = fsNodeAllotment.newFileNode(); fileNode.setDefinitionSize( tempFile.length() ); fileNode.setName( tempFile.getName() ); TitanFileReceiveEntity64 receiveEntity = new TitanFileReceiveEntity64( this.primaryFileSystem,storageObjectPath, fileNode,titanFileChannelKChannel,this.primaryVolume ); this.primaryFileSystem.receive( receiveEntity ); FileTreeNode storageObject = this.primaryFileSystem.get(this.primaryFileSystem.queryGUIDByPath(storageObjectPath)); TitanVersion titanVersion = new TitanVersion(); titanVersion.setVersion( version ); titanVersion.setFileGuid( node.getGuid() ); titanVersion.setTargetStorageObjectGuid( storageObject.getGuid() ); this.primaryVersion.insert( titanVersion ); if( !tempFile.delete() ){ throw new IOException( "Purging temporary file compromised, what :" + tempFile.toPath() ); } return BasicResultResponse.success(); } } ================================================ FILE: Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/api/iface/v2/FileSyncDistributionController.java ================================================ package com.walnut.sparta.ucdn.service.api.iface.v2; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.umct.AddressMapping; import com.pinecone.hydra.umct.stereotype.Controller; import com.walnut.sparta.ucdn.service.umct.FileSyncDistribution; import org.springframework.stereotype.Service; import javax.annotation.Resource; import java.io.IOException; @Controller @AddressMapping( "com.walnut.sparta.ucdn.service.umct.FileSyncDistribution." ) @Service public class FileSyncDistributionController { @Resource private FileSyncDistribution fileSyncDistribution; @Resource private KOMFileSystem primaryFileSystem; @AddressMapping( "dino" ) public void dino( String name ) { } @AddressMapping(" fileDistribution ") public void fileDistribution( String path, String topic, String server, long startSegId, long endSegId ) throws IOException { ElementNode elementNode = this.primaryFileSystem.queryElement(path); if( elementNode instanceof FileNode){ this.fileSyncDistribution.fileDistribution( (FileNode) elementNode, topic, server, startSegId, endSegId ); } } } ================================================ FILE: Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/api/response/BasicResultResponse.java ================================================ package com.walnut.sparta.ucdn.service.api.response; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.json.JSONEncoder; import org.springframework.http.HttpStatus; import java.io.Serializable; public class BasicResultResponse implements Pinenut, Serializable { private Integer code = HttpStatus.OK.value(); private String msg; //错误信息 private T data; //数据 public static BasicResultResponse success() { BasicResultResponse result = new BasicResultResponse<>(); result.code = HttpStatus.OK.value(); return result; } public static BasicResultResponse successMsg( String msg ) { BasicResultResponse result = new BasicResultResponse<>(); result.msg = msg; result.code = HttpStatus.OK.value(); return result; } public static BasicResultResponse success( T object ) { BasicResultResponse result = new BasicResultResponse<>(); result.data = object; result.code = HttpStatus.OK.value(); return result; } public static BasicResultResponse error( String msg ) { BasicResultResponse result = new BasicResultResponse<>(); result.msg = msg; result.code = HttpStatus.INTERNAL_SERVER_ERROR.value(); return result; } /** * 获取 * @return code */ public Integer getCode() { return this.code; } /** * 设置 * @param code */ public void setCode(Integer code) { this.code = code; } /** * 获取 * @return msg */ public String getMsg() { return this.msg; } /** * 设置 * @param msg */ public void setMsg(String msg) { this.msg = msg; } /** * 获取 * @return data */ public T getData() { return this.data; } /** * 设置 * @param data */ public void setData(T data) { this.data = data; } @Override public String toJSONString() { return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "code" , this.code ), new KeyValue<>( "msg" , this.msg ), new KeyValue<>( "data" , this.data ) } ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/infrastructure/FSContentDeliveryService.java ================================================ package com.walnut.sparta.ucdn.service.infrastructure; import com.pinecone.framework.system.prototype.Pinenut; public interface FSContentDeliveryService extends Pinenut { } ================================================ FILE: Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/infrastructure/SpartaUCDNService.java ================================================ package com.walnut.sparta.ucdn.service.infrastructure; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.system.functions.Executor; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.hydra.bucket.ibatis.hydranium.BucketMappingDriver; import com.pinecone.hydra.file.ibatis.hydranium.FileMappingDriver; import com.pinecone.hydra.servgram.Servgram; import com.pinecone.hydra.storage.bucket.TitanBucketInstrument; import com.pinecone.hydra.storage.file.FileSystemConfig; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.KernelFileSystemConfig; import com.pinecone.hydra.storage.file.UniformObjectFileSystem; import com.pinecone.hydra.storage.version.TitanVersionManage; import com.pinecone.hydra.storage.version.VersionManage; import com.pinecone.hydra.storage.volume.KernelVolumeConfig; import com.pinecone.hydra.storage.volume.UniformVolumeManager; import com.pinecone.hydra.storage.volume.VolumeConfig; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.version.ibatis.hydranium.VersionMappingDriver; import com.pinecone.hydra.volume.ibatis.hydranium.VolumeMappingDriver; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.pinecone.summer.spring.Springron; import com.pinecone.tritium.Tritium; import com.walnut.sparta.ucdn.service.SpartaBoot; import org.springframework.context.ApplicationContextInitializer; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.support.GenericApplicationContext; import java.io.IOException; import java.nio.file.Path; public class SpartaUCDNService extends Springron implements UCDNService { protected KOIMappingDriver koiMappingDriver; protected KOIMappingDriver koiFileMappingDriver; protected KOIMappingDriver koiBucketMappingDriver; protected KOIMappingDriver koiVersionMappingDriver; protected KOMFileSystem fileSystem; protected UniformVolumeManager volumeTree; protected TitanBucketInstrument bucketInstrument; protected TitanVersionManage versionManage; protected void initSubsystem() { this.koiMappingDriver = new VolumeMappingDriver( this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.parentSystem().getDispenserCenter() ); this.koiFileMappingDriver = new FileMappingDriver( this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.parentSystem().getDispenserCenter() ); this.koiBucketMappingDriver = new BucketMappingDriver( this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.parentSystem().getDispenserCenter() ); this.koiVersionMappingDriver = new VersionMappingDriver( this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.parentSystem().getDispenserCenter() ); JSONConfig selfConfig = (JSONConfig) this.getConfig(); FileSystemConfig fileSystemConfig = new KernelFileSystemConfig( selfConfig.queryJSONObject( "service.PrimaryUniformFileSystem" ) ); this.fileSystem = new UniformObjectFileSystem( this.koiFileMappingDriver, fileSystemConfig ); VolumeConfig volumeConfig = new KernelVolumeConfig( selfConfig.queryJSONObject( "service.PrimaryUniformVolumeManager" ) ); this.volumeTree = new UniformVolumeManager( this.koiMappingDriver, volumeConfig ); this.bucketInstrument = new TitanBucketInstrument( this.koiBucketMappingDriver ); this.versionManage = new TitanVersionManage( this.koiVersionMappingDriver ); this.setPrimarySources( SpartaBoot.class ); this.setInitializer(new Executor() { @Override public void execute() throws Exception { SpartaUCDNService.this.getSpringApplication().addInitializers(new ApplicationContextInitializer() { @Override public void initialize( ConfigurableApplicationContext applicationContext ) { GenericApplicationContext genericApplicationContext = (GenericApplicationContext) applicationContext; genericApplicationContext.registerBean("primaryFileSystem", UniformObjectFileSystem.class, () -> (UniformObjectFileSystem)fileSystem); genericApplicationContext.registerBean("primaryVolume", UniformVolumeManager.class, () -> (UniformVolumeManager) volumeTree); genericApplicationContext.registerBean("primaryBucket", TitanBucketInstrument.class, () -> (TitanBucketInstrument) bucketInstrument); genericApplicationContext.registerBean("primaryVersion", VersionManage.class, () -> (VersionManage) versionManage); genericApplicationContext.registerBean("uofsContentDelivery", UOFSContentDelivery.class, () -> (UOFSContentDelivery) SpartaUCDNService.this.parentSystem()); } }); } }); } public SpartaUCDNService( String szName, Processum parent, String[] springbootArgs ) { super( szName, parent, springbootArgs ); this.mSpringKernel.setPrimarySources( SpartaBoot.class ); this.initSubsystem(); } public SpartaUCDNService( String szName, Processum parent ) { this( szName, parent, new String[0] ); } @Override protected void loadConfig() { this.mServgramList = this.getAttachedOrchestrator().getSectionConfig().getChild( Servgram.ConfigServgramsKey ); Object dyServgramConf = this.mServgramList.get( this.gramName() ); if( dyServgramConf instanceof String ) { try{ this.mServgramConf = this.mServgramList.getChildFromPath( Path.of((String) dyServgramConf) ); } catch ( IOException ignore ) { this.getLogger().info( "[Notice] Spring will use the default config `application.yaml`." ); } } else { this.mServgramConf = this.mServgramList.getChild( this.gramName() ); } } @Override public Tritium parentSystem() { return (Tritium)super.parentSystem(); } } ================================================ FILE: Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/infrastructure/UCDNBeans.java ================================================ package com.walnut.sparta.ucdn.service.infrastructure; import com.pinecone.hydra.umct.WolfMCExpress; public class UCDNBeans { // @Bean // public UlfKafkaClient ulfKafkaClient() { // return new WolfMCBClient(new WolfMCKafkaClient(server), "", this, WolfMCExpress.class); // } } ================================================ FILE: Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/infrastructure/UCDNService.java ================================================ package com.walnut.sparta.ucdn.service.infrastructure; import com.pinecone.hydra.system.component.Slf4jTraceable; public interface UCDNService extends Slf4jTraceable { } ================================================ FILE: Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/infrastructure/UOFSContentDelivery.java ================================================ package com.walnut.sparta.ucdn.service.infrastructure; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.tritium.Tritium; public class UOFSContentDelivery extends Tritium implements FSContentDeliveryService { protected SpartaUCDNService spartaUCDNService; public UOFSContentDelivery( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public UOFSContentDelivery( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { this.spartaUCDNService = new SpartaUCDNService( "SpartaUCDNService", this ); this.spartaUCDNService.execute(); this.getTaskManager().add(this.spartaUCDNService); this.getTaskManager().syncWaitingTerminated(); } } ================================================ FILE: Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/infrastructure/constants/PolicyConstants.java ================================================ package com.walnut.sparta.ucdn.service.infrastructure.constants; public class PolicyConstants { public static String VERSION_PREFIX = "/$version"; public static String FORWARD_SLASH = "/"; public static String PERIOD = "."; } ================================================ FILE: Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/infrastructure/dto/UploadDTO.java ================================================ package com.walnut.sparta.ucdn.service.infrastructure.dto; import com.pinecone.framework.system.prototype.Pinenut; public class UploadDTO implements Pinenut { private String version; private String filePath; public UploadDTO() { } public UploadDTO(String version, String filePath) { this.version = version; this.filePath = filePath; } public String getVersion() { return version; } public void setVersion(String version) { this.version = version; } public String getFilePath() { return filePath; } public void setFilePath(String filePath) { this.filePath = filePath; } } ================================================ FILE: Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/infrastructure/exception/IllegalPathException.java ================================================ package com.walnut.sparta.ucdn.service.infrastructure.exception; public class IllegalPathException extends Exception{ @Override public String toString() { return "illegal path !!!"; } } ================================================ FILE: Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/umct/FileSyncDistribution.java ================================================ package com.walnut.sparta.ucdn.service.umct; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.umct.stereotype.Iface; import java.io.IOException; @Iface public interface FileSyncDistribution { void fileDistribution(FileNode fileNode, String topic, String server, long startSegId, long endSegId ) throws IOException; } ================================================ FILE: Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/umct/FileSyncDistributionImpl.java ================================================ package com.walnut.sparta.ucdn.service.umct; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.volume.UniformVolumeManager; import com.pinecone.hydra.umb.kafka.UlfKafkaClient; import org.springframework.stereotype.Service; import javax.annotation.Resource; @Service public class FileSyncDistributionImpl implements FileSyncDistribution { @Resource private KOMFileSystem primaryFileSystem; @Resource private UniformVolumeManager primaryVolume; @Resource UlfKafkaClient ulfKafkaClient; @Override public void fileDistribution(FileNode fileNode, String topic, String server,long startSegId, long endSegId) { // FSNodeAllotment fsNodeAllotment = this.primaryFileSystem.getFSNodeAllotment(); // WolfMCBClient client = new WolfMCBClient(new WolfMCKafkaClient(server), "", this, WolfMCExpress.class); // client.compile( ); // BroadcastControlProducer producer = client.createBroadcastControlProducer(); // producer.start(); // TreeMap frames = fileNode.getClusters(); // for( long i = startSegId; i < endSegId; i++ ){ // LocalCluster frame = (LocalCluster)frames.get(i); // File tempFile = File.createTempFile("temp", frame.getSegGuid().toString()); // FileChannel channel = FileChannel.open(tempFile.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND); // TitanFileChannelChanface kChannel = new TitanFileChannelChanface( channel ); // FileNode newFileNode = fsNodeAllotment.newFileNode(); // // newFileNode.setPath( frame.getSourceName() ); // newFileNode.setDefinitionSize( frame.getSize() ); // TitanFileExportEntity64 exportEntity = new TitanFileExportEntity64( this.primaryFileSystem, this.primaryVolume, newFileNode, kChannel ); // // exportEntity.export( frame ); // } } } ================================================ FILE: Sparta/sparta-uofs-console/pom.xml ================================================ sparta com.walnuts.sparta 2.5.1 org.springframework.boot spring-boot-maven-plugin package repackage true com.walnut.sparta.uofs.console.UOFSBoot org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.walnut.sparta.uofs.console sparta-uofs-console 2.1.0 org.springframework.boot spring-boot-starter org.springframework.boot spring-boot-starter-test test org.springframework.boot spring-boot-starter-web 2.6.13 compile com.pinecone pinecone 2.5.1 compile com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.pinecone.summer.springram springram 2.1.0 compile com.pinecone.tritium hydra-system-tritium 2.1.0 test com.pinecone.hydra.kom.driver.default hydra-kom-default-driver 2.1.0 compile org.apache.thrift libthrift 0.18.0 compile com.pinecone.hydra.sdk.thrift hydra-lib-thrift-sdk 1.2.1 compile com.walnut.sparta.api.uac sparta-api-uac 2.1.0 compile commons-fileupload commons-fileupload 1.3.1 com.pinecone.tritium hydra-system-tritium 2.1.0 compile com.auth0 java-jwt 4.4.0 ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/SpartaBoot.java ================================================ package com.walnut.sparta.uofs.console; import org.springframework.boot.autoconfigure.SpringBootApplication; @SpringBootApplication public class SpartaBoot { } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/UOFSBoot.java ================================================ package com.walnut.sparta.uofs.console; import com.pinecone.Pinecone; import com.walnut.sparta.uofs.console.infrastructure.UOFSContentDelivery; public class UOFSBoot { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ UOFSContentDelivery uofs = (UOFSContentDelivery) Pinecone.sys().getTaskManager().add( new UOFSContentDelivery( args, Pinecone.sys() ) ); uofs.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/api/controller/v2/BucketController.java ================================================ //package com.walnuts.sparta.uofs.console.api.controller.v2; // //import com.pinecone.hydra.storage.io.TitanFileChannelChanface; //import com.pinecone.hydra.storage.bucket.BucketInstrument; //import com.pinecone.hydra.storage.bucket.entity.Bucket; //import com.pinecone.hydra.storage.bucket.entity.GenericBucket; //import com.pinecone.hydra.storage.file.KOMFileSystem; //import com.pinecone.hydra.storage.file.entity.FileNode; //import com.pinecone.hydra.storage.file.entity.Folder; //import com.pinecone.hydra.storage.file.transmit.exporter.TitanFileExportEntity64; //import com.pinecone.hydra.storage.io.UIOException; //import com.pinecone.hydra.storage.volume.UniformVolumeManager; //import com.pinecone.ulf.util.guid.GUIDs; //import com.walnuts.sparta.uofs.console.api.response.BasicResultResponse; //import org.springframework.web.bind.annotation.CrossOrigin; //import org.springframework.web.bind.annotation.DeleteMapping; //import org.springframework.web.bind.annotation.GetMapping; //import org.springframework.web.bind.annotation.PathVariable; //import org.springframework.web.bind.annotation.PutMapping; //import org.springframework.web.bind.annotation.RequestBody; //import org.springframework.web.bind.annotation.RequestMapping; //import org.springframework.web.bind.annotation.RequestParam; //import org.springframework.web.bind.annotation.RestController; //import org.springframework.web.multipart.MultipartFile; // //import javax.annotation.Resource; //import javax.servlet.http.HttpServletRequest; //import java.io.File; //import java.io.IOException; //import java.nio.channels.FileChannel; //import java.nio.file.StandardOpenOption; //import java.time.LocalDateTime; //import java.util.List; // //@RestController //@RequestMapping( "/api/v2/uofs/bucket" ) //@CrossOrigin //public class BucketController { // @Resource // private KOMFileSystem primaryFileSystem; // // @Resource // private BucketInstrument bucketInstrument; // @Resource // private TransmitController transmitController; // // @Resource // private UniformVolumeManager primaryVolume; // // // /** // * 创建bucket // * @param bucketName 桶名 // * @param accountGuid 用户Guid // * @return 返回bucketGuid // */ // @PutMapping("/{bucketName}") // public BasicResultResponse createBucket(@PathVariable String bucketName, @RequestBody String accountGuid ){ // Folder folder = this.primaryFileSystem.affirmFolder(bucketName); // GenericBucket bucket = new GenericBucket(); // bucket.setBucketName( bucketName ); // bucket.setCreateTime(LocalDateTime.now()); // bucket.setMountPoint( folder.getGuid() ); // bucket.setUserGuid( GUIDs.GUID72( accountGuid ) ); // this.bucketInstrument.createBucket( bucket ); // return BasicResultResponse.success( bucket.getBucketGuid().toString() ); // } // // /** // * 获取账号下的所有桶 // * @param accountGuid 用户账号guid // * @return 返回所有桶信息 // */ // @GetMapping("/") // public String listBuckets(@RequestParam("accountGuid") String accountGuid ){ // List buckets = this.bucketInstrument.queryBucketsByUserGuid(GUIDs.GUID72(accountGuid)); // return BasicResultResponse.success(buckets).toJSONString(); // } // // /** // * 删除桶 // * @param bucketName 桶名 // * @param accountGuid 账号Guid // * @return 返回操作结果 // */ // @DeleteMapping("/{bucketName}") // public BasicResultResponse deleteBucket( @PathVariable String bucketName, @RequestBody String accountGuid ){ // this.bucketInstrument.removeBucketByAccountAndBucketName( GUIDs.GUID72(accountGuid), bucketName ); // this.primaryFileSystem.remove( bucketName ); // return BasicResultResponse.success(); // } // // /** // * 获取存储对象 // * @param bucketName 桶名 // * @param objectName 对象名 // * @param targetPath 目标地址 // * @return 操作结果 // */ // @GetMapping("/{bucketName}/{objectName}") // public BasicResultResponse getObject(@PathVariable String bucketName, @PathVariable String objectName, @RequestBody String targetPath) throws IOException { // FileNode fileNode = (FileNode) this.primaryFileSystem.get(this.primaryFileSystem.queryGUIDByPath(bucketName + "/" + objectName)); // File file = new File(targetPath); // FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND); // TitanFileChannelChanface kChannel = new TitanFileChannelChanface( channel ); // TitanFileExportEntity64 exportEntity = new TitanFileExportEntity64(this.primaryFileSystem, this.primaryVolume , fileNode, kChannel ); // this.primaryFileSystem.export( exportEntity ); // return BasicResultResponse.success(); // } // // @GetMapping("/*") // public void test(HttpServletRequest httpRequest){ // // } // /** // * 上传储存对象 // * @param bucketName 桶名 // * @param objectName 对象名 // * @return // */ // @PutMapping("/{bucketName}/{objectName}") // public BasicResultResponse putObject(@PathVariable String bucketName, @PathVariable String objectName, @RequestBody MultipartFile file){ // return BasicResultResponse.success(); // } // // /** // * 删除存储对象 // * @param bucketName 桶名 // * @param objectName 存储对象名 // * @return 返回操作信息 // */ // @DeleteMapping("/{bucketName}/{objectName}") // public BasicResultResponse deleteObject( @PathVariable String bucketName, @PathVariable String objectName ){ // return BasicResultResponse.success(); // } // // /** // * 列出储存桶中的对象 // * @param bucketName 桶名 // * @return 返回对象列表 // */ // public BasicResultResponse> listObjects( @PathVariable String bucketName ){ // return BasicResultResponse.success(); // } //} ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/api/controller/v2/ExternalSymbolicController.java ================================================ package com.walnut.sparta.uofs.console.api.controller.v2; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.external.ExternalFile; import com.pinecone.hydra.storage.file.external.ExternalFolder; import com.pinecone.hydra.storage.file.external.GenericNativeExternalFolder; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.ExternalSymbolic; import com.pinecone.hydra.storage.file.entity.Folder; import com.walnut.sparta.uofs.console.api.response.BasicResultResponse; import com.walnut.sparta.uofs.console.domain.dto.CreateExternalDTO; import com.walnut.sparta.uofs.console.domain.dto.CreateExternalSymbolicDTO; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.PutMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.multipart.MultipartFile; import javax.annotation.Resource; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; @RestController @RequestMapping( "/api/v2/uofs/externalSymbolic" ) @CrossOrigin public class ExternalSymbolicController { @Resource private KOMFileSystem primaryFileSystem; /** * 获取外部目录所有内容 * @param path 路径 * @return 返回内容 */ @GetMapping("/listItem") public String listItem(@RequestParam("path") String path){ ElementNode elementNode = this.primaryFileSystem.queryElement(path); if(elementNode != null){ ExternalSymbolic externalSymbolic = (ExternalSymbolic) elementNode; GenericNativeExternalFolder externalFolder = new GenericNativeExternalFolder(new File(externalSymbolic.getReparsedPoint())); return BasicResultResponse.success(externalFolder.listItem()).toJSONString(); } return BasicResultResponse.success().toJSONString(); } /** * 获取外部文件夹的所有内容 * @param path 路径 * @return 返回内容信息 */ @GetMapping("/listItem/externalFoldr") public String externalFolderListItem(@RequestParam("path") String path){ File file = new File(path); GenericNativeExternalFolder externalFolder = new GenericNativeExternalFolder(file); return BasicResultResponse.success(externalFolder.listItem()).toJSONString(); } /** * 外部复制 * @param sourcePath 目标路径 * @param destinationPath 源路径 */ @PutMapping("/copy") public BasicResultResponse directCopy( @RequestParam("sourcePath") String sourcePath, @RequestParam("destinationPath") String destinationPath ) throws IOException { ElementNode elementNode = this.primaryFileSystem.queryElement(sourcePath); if(elementNode instanceof ExternalFolder){ ExternalFolder externalFolder = (ExternalFolder) elementNode; this.primaryFileSystem.directCopy( externalFolder.getPath(),destinationPath ); } return BasicResultResponse.success(); } public BasicResultResponse createExternalSymbolic(@RequestBody CreateExternalSymbolicDTO dto){ return BasicResultResponse.success(); } /** * 直接上传文件 * @param file 文件 * @param sourcePath 目标文件夹 */ @PostMapping("/directUpload") public BasicResultResponse directUpload( @RequestParam("file") MultipartFile file, @RequestParam("sourcePath") String sourcePath ) throws IOException { ElementNode elementNode = this.primaryFileSystem.queryElement(sourcePath); if( elementNode instanceof ExternalFolder){ ExternalFolder externalFolder = (ExternalFolder) elementNode; String path = externalFolder.getPath(); // 确保目标文件夹存在 Path targetDir = Paths.get(path); if (!Files.exists(targetDir)) { Files.createDirectories(targetDir); } // 构建目标文件路径 Path targetFile = targetDir.resolve(file.getOriginalFilename()); // 将 MultipartFile 写入目标文件 try (InputStream inputStream = file.getInputStream()) { Files.copy(inputStream, targetFile, StandardCopyOption.REPLACE_EXISTING); } } return BasicResultResponse.success(); } /** * 删除外部文件 * @param path 文件路径 * @return 返回操作结果 */ @DeleteMapping("/remove") public BasicResultResponse remove( @RequestParam("path") String path ){ ElementNode elementNode = this.primaryFileSystem.queryElement(path); if( elementNode instanceof ExternalFolder ){ ExternalFolder externalFolder = (ExternalFolder) elementNode; externalFolder.delete(); }else if( elementNode instanceof ExternalFile ){ ExternalFile externalFile = (ExternalFile) elementNode; externalFile.delete(); } return BasicResultResponse.success(); } /** * 创建外接文件 * @param dto 创建外接文件信息 * @return 返回操作结果 */ @PostMapping("/createExternalFile") public BasicResultResponse createExternalFile(@RequestBody CreateExternalDTO dto){ ElementNode elementNode = this.primaryFileSystem.queryElement(dto.getFolderPath()); Folder folder = elementNode.evinceFolder(); folder.createExternalSymbolic( dto.getExternalSymbolicName(), dto.getReparsedPoint()); return BasicResultResponse.success(); } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/api/controller/v2/FileController.java ================================================ package com.walnut.sparta.uofs.console.api.controller.v2; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.FileTreeNode; import com.pinecone.hydra.storage.version.VersionManage; import com.pinecone.ulf.util.guid.GUIDs; import com.walnut.sparta.uofs.console.api.response.BasicResultResponse; import com.walnut.sparta.uofs.console.domain.dto.RenameDTO; import com.walnut.sparta.uofs.console.domain.dto.UpdateFileNameDTO; import com.walnut.sparta.uofs.console.service.FileService; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import javax.annotation.Resource; @RestController @RequestMapping( "/api/v2/uofs/file" ) @CrossOrigin public class FileController { @Resource private KOMFileSystem primaryFileSystem; @Resource private FileService fileService; @Resource private VersionManage primaryVersion; /** * 创建文件 * @param filePath 文件路径 * @return 返回操作状态 */ @GetMapping("/creat/file") public BasicResultResponse createFile(@RequestParam String filePath ){ this.primaryFileSystem.affirmFileNode( filePath ); return BasicResultResponse.success(); } /** * 获取文件或文件夹属性 * @param nodeGuid 文件或文件夹guid * @return 返回属性信息 */ @GetMapping("/attribute") public BasicResultResponse attribute(@RequestParam("nodeGuid") String nodeGuid ){ FileTreeNode fileTreeNode = this.primaryFileSystem.get(GUIDs.GUID128(nodeGuid)); return BasicResultResponse.success( fileTreeNode ); } /** * 移除文件夹或者文件 * @param fileGuid 文件夹或者文件guid * @return 返回操作结果 */ @DeleteMapping("/remove/file") public BasicResultResponse removeFile( String fileGuid ){ this.fileService.remove( GUIDs.GUID128( fileGuid ) ); this.primaryFileSystem.remove( GUIDs.GUID128( fileGuid ) ); return BasicResultResponse.success(); } /** * 重命名文件或文件夹 * @param dto 信息 * @return 返回操作信息 */ @PostMapping("/rename") public BasicResultResponse renameFile(@RequestBody RenameDTO dto){ this.primaryFileSystem.renameFile( dto.getPath(), dto.getNewName() ); return BasicResultResponse.success(); } /** * 重命名接口 * @param dto 重命名数据 * @return */ @PostMapping("/updateFileName") public BasicResultResponse updateFileName(@RequestBody UpdateFileNameDTO dto){ this.primaryFileSystem.renameFile( dto.getFilePath(), dto.getNewFileName() ); return BasicResultResponse.success(); } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/api/controller/v2/FolderController.java ================================================ package com.walnut.sparta.uofs.console.api.controller.v2; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.FileTreeNode; import com.pinecone.hydra.storage.file.entity.Folder; import com.pinecone.hydra.storage.volume.UniformVolumeManager; import com.pinecone.ulf.util.guid.GUIDs; import com.walnut.sparta.uofs.console.api.response.BasicResultResponse; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PutMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import javax.annotation.Resource; import java.io.IOException; import java.util.List; @RestController @RequestMapping( "/api/v2/uofs/folder" ) @CrossOrigin public class FolderController { @Resource private KOMFileSystem primaryFileSystem; @Resource private UniformVolumeManager primaryVolume; /** * 获取文件夹下所有内容 * @param folderGuid 文件夹guid * @returnS */ @GetMapping("/folder/listItem") public String listItem(@RequestParam String folderGuid ){ Folder folder = this.primaryFileSystem.getFolder(GUIDs.GUID128(folderGuid)); List fileTreeNodes = folder.listItem(); return BasicResultResponse.success(fileTreeNodes).toJSONString() ; } public String listItemByPath( @RequestParam String folderPath ){ return null; } /** * 创建文件夹 * @param destDirPath 文件夹路径 * @return 返回操作状态 */ @GetMapping("/creat/folder") public BasicResultResponse createFolder( @RequestParam("destDirPath") String destDirPath ){ this.primaryFileSystem.affirmFolder( destDirPath ); return BasicResultResponse.success(); } /** * 获取文件或文件夹属性 * @param nodeGuid 文件或文件夹guid * @return 返回属性信息 */ @GetMapping("/attribute") public BasicResultResponse< FileTreeNode > attribute( @RequestParam("nodeGuid") String nodeGuid ){ FileTreeNode fileTreeNode = this.primaryFileSystem.get(GUIDs.GUID128(nodeGuid)); return BasicResultResponse.success( fileTreeNode ); } /** * 获取所有根文件夹 * @return 返回根信息 */ @GetMapping("/list/root") public String listRoot(){ List roots = this.primaryFileSystem.fetchRoot(); return BasicResultResponse.success( roots ).toJSONString(); } /** * 移除文件夹或者文件 * @param fileGuid 文件夹或者文件guid * @return 返回操作结果 */ @DeleteMapping("/remove/file") public BasicResultResponse removeFile( @RequestParam("fileGuid") String fileGuid ){ this.primaryFileSystem.remove( GUIDs.GUID128( fileGuid ) ); return BasicResultResponse.success(); } /** * 文件复制接口 * @param sourcePath 目标路径 * @param destinationPath 源路径 */ @PutMapping("/copy") public BasicResultResponse copy( @RequestParam("sourcePath") String sourcePath, @RequestParam("destinationPath") String destinationPath ) throws IOException { this.primaryFileSystem.copy( sourcePath, destinationPath, primaryVolume ); return BasicResultResponse.success("复制成功"); } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/api/controller/v2/SiteController.java ================================================ package com.walnut.sparta.uofs.console.api.controller.v2; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.storage.bucket.BucketInstrument; import com.pinecone.hydra.storage.bucket.entity.GenericSite; import com.pinecone.hydra.storage.bucket.entity.Site; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.Folder; import com.walnut.sparta.uofs.console.api.response.BasicResultResponse; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PutMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import javax.annotation.Resource; import java.util.List; @RestController @RequestMapping( "/api/v2/uofs/site" ) @CrossOrigin public class SiteController implements Pinenut { @Resource private BucketInstrument bucketInstrument; @Resource private KOMFileSystem primaryFileSystem; /** * 创建站点 * @param siteName 站点名 * @return 返回操作结果 */ @PutMapping("/createSite") public BasicResultResponse createSite(@RequestParam("siteName") String siteName){ Folder folder = this.primaryFileSystem.affirmFolder(siteName); GenericSite site = new GenericSite(); site.setSiteName( siteName ); site.setMountPointGuid( folder.getGuid() ); this.bucketInstrument.createSite( site ); return BasicResultResponse.success(); } /** * 删除站点 * @param siteName 站点名 * @return 操作结果 */ @DeleteMapping("/deleteSite") public BasicResultResponse removeSite( @RequestParam("siteName") String siteName ){ this.bucketInstrument.removeSite(siteName); return BasicResultResponse.success(); } /** * 获取全部站点 * @return 返回全部站点 */ @GetMapping("/listSite") public String listSite(){ List sites = this.bucketInstrument.listSite(); return BasicResultResponse.success(sites).toJSONString(); } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/api/controller/v2/TransmitController.java ================================================ package com.walnut.sparta.uofs.console.api.controller.v2; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.external.ExternalFile; import com.pinecone.hydra.storage.file.external.GenericNativeExternalFile; import com.pinecone.hydra.storage.file.entity.ElementNode; import com.pinecone.hydra.storage.file.entity.GenericFileNode; import com.pinecone.hydra.storage.io.Chanface; import com.pinecone.hydra.storage.io.TitanFileChannelChanface; import com.pinecone.hydra.storage.io.TitanOutputStreamChanface; import com.pinecone.hydra.storage.bucket.BucketInstrument; import com.pinecone.hydra.storage.bucket.entity.Site; import com.pinecone.hydra.storage.bucket.source.SiteManipulator; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.FSNodeAllotment; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.file.entity.FileTreeNode; import com.pinecone.hydra.storage.file.entity.Folder; import com.pinecone.hydra.storage.file.transmit.exporter.TitanFileExportEntity64; import com.pinecone.hydra.storage.file.transmit.receiver.TitanFileReceiveEntity64; import com.pinecone.hydra.storage.version.VersionManage; import com.pinecone.hydra.storage.version.entity.TitanVersion; import com.pinecone.hydra.storage.volume.UniformVolumeManager; import com.pinecone.ulf.util.guid.GUIDs; import com.walnut.sparta.uofs.console.api.response.BasicResultResponse; import com.walnut.sparta.uofs.console.domain.dto.DownloadObjectByChannelDTO; import com.walnut.sparta.uofs.console.domain.dto.UpdateObjectByChannelDTO; import com.walnut.sparta.uofs.console.infrastructure.UOFSConsoleContents; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.multipart.MultipartFile; import javax.annotation.Resource; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.channels.FileChannel; import java.nio.file.StandardOpenOption; import java.sql.SQLException; import java.util.Map; import static org.apache.commons.io.FilenameUtils.getExtension; @RestController @RequestMapping( "/api/v2/uofs/transmit" ) @CrossOrigin public class TransmitController { @Resource private KOMFileSystem primaryFileSystem; @Resource private UniformVolumeManager primaryVolume; @Resource private BucketInstrument bucketInstrument; @Resource private VersionManage primaryVersion; // @Resource // private UOFSConfig uofsConfig; /** * 使用channel上传对象 * @param dto 上传所需数据 * @return 返回操作结果 * @throws IOException * @throws SQLException */ @PostMapping("/channel/update") public BasicResultResponse updateObjectByChannel(UpdateObjectByChannelDTO dto ) throws IOException { MultipartFile object = dto.getObject(); File file = File.createTempFile( "uofs","."+ getExtension(object.getOriginalFilename()) ); if( !file.exists() ){ throw new IOException( "Creating file compromised, what :" + file.toPath() ); } object.transferTo( file ); Chanface chanface = this.getKChannel(file); FSNodeAllotment fsNodeAllotment = this.primaryFileSystem.getFSNodeAllotment(); FileNode fileNode = fsNodeAllotment.newFileNode(); fileNode.setDefinitionSize( file.length() ); fileNode.setName( file.getName() ); TitanFileReceiveEntity64 receiveEntity = new TitanFileReceiveEntity64( this.primaryFileSystem, dto.getDestDirPath(), fileNode, chanface, this.primaryVolume ); this.primaryFileSystem.receive( receiveEntity ); if(!file.delete()){ throw new IOException( "Purging temporary file compromised, what :" + file.toPath() ); } return BasicResultResponse.success(); } /** * 使用channel将对象下载到本地 * @param dto 下载所需的数据 * @return 返回操作结果 * @throws IOException * @throws SQLException */ @PostMapping("/channel/download") public BasicResultResponse downloadObjectByChannel( DownloadObjectByChannelDTO dto ) throws IOException { File file = new File( dto.getTargetPath()); FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND); TitanFileChannelChanface titanFileChannelKChannel = new TitanFileChannelChanface( channel ); FileNode fileNode = (FileNode) this.primaryFileSystem.get(this.primaryFileSystem.queryGUIDByPath(dto.getDestDirPath())); TitanFileExportEntity64 exportEntity = new TitanFileExportEntity64( this.primaryFileSystem, this.primaryVolume, fileNode, titanFileChannelKChannel ); primaryFileSystem.export( exportEntity ); return BasicResultResponse.success(); } @GetMapping("/download/guid") public void getFile(HttpServletRequest request, HttpServletResponse response) throws IOException { Map parameterMap = request.getParameterMap(); String[] guids = parameterMap.get("guid"); GUID storageObjectGuid = null; if( guids != null ){ storageObjectGuid = GUIDs.GUID128( guids[0] ); } ServletOutputStream outputStream = response.getOutputStream(); TitanOutputStreamChanface kChannel = new TitanOutputStreamChanface(outputStream); FileNode storageObject = (FileNode) this.primaryFileSystem.get(storageObjectGuid); TitanFileExportEntity64 entity = new TitanFileExportEntity64(this.primaryFileSystem, this.primaryVolume, storageObject, kChannel); this.primaryFileSystem.export( entity ); } /** * 使用文件路径下载文件 */ @GetMapping("/download/path") public void getFileByPath(HttpServletRequest request, HttpServletResponse response) throws IOException { Map parameterMap = request.getParameterMap(); String[] paths = parameterMap.get("path"); String path = null; if(paths != null){ path = paths[0]; } ServletOutputStream outputStream = response.getOutputStream(); TitanOutputStreamChanface kChannel = new TitanOutputStreamChanface(outputStream); ElementNode elementNode = this.primaryFileSystem.queryElement(path); if(elementNode instanceof GenericNativeExternalFile){ ExternalFile externalFile = (ExternalFile) elementNode; File nativeFile = externalFile.getNativeFile(); try (FileInputStream fileInputStream = new FileInputStream(nativeFile)) { byte[] buffer = new byte[1024]; int bytesRead; while ((bytesRead = fileInputStream.read(buffer)) != -1) { outputStream.write(buffer, 0, bytesRead); } // 刷新输出流 outputStream.flush(); return; } catch (IOException e) { // 处理异常,比如记录日志等 e.printStackTrace(); } } if( elementNode instanceof GenericFileNode){ FileNode fileNode = (FileNode) elementNode; TitanFileExportEntity64 entity = new TitanFileExportEntity64(this.primaryFileSystem, this.primaryVolume, fileNode, kChannel); this.primaryFileSystem.export( entity ); } } /** * 上传文件 * @param filePath 目标路径 * @param version 版本号 * @param file 文件 * @param siteName 站点 * @return 返回操作结果 */ @PostMapping("/CDNUpload") public BasicResultResponse CDNUpload(@RequestParam("siteName") String siteName, @RequestParam("filePath") String filePath, @RequestParam("version") String version, @RequestParam("file") MultipartFile file) throws IOException { SiteManipulator siteManipulator = this.bucketInstrument.getSiteManipulator(); Site site = siteManipulator.querySiteByName(siteName); if( site == null ){ return BasicResultResponse.error("站点不存在"); } int dotIndex = filePath.lastIndexOf(UOFSConsoleContents.PERIOD); String baseName = filePath.substring(0, dotIndex); String extension = filePath.substring(dotIndex + 1); String realFilePath = this.primaryFileSystem.getPath(site.getMountPointGuid()) + UOFSConsoleContents.FORWARD_SLASH + baseName; FSNodeAllotment fsNodeAllotment = this.primaryFileSystem.getFSNodeAllotment(); Folder node = this.primaryFileSystem.affirmFolder(realFilePath); String storageObjectPath = realFilePath +UOFSConsoleContents.VERSION_PREFIX+ UOFSConsoleContents.FORWARD_SLASH + version +UOFSConsoleContents.PERIOD+ extension; File tempFile = File.createTempFile("upload",".temp"); if( !tempFile.exists() ){ throw new IOException( "Creating file compromised, what :" + tempFile.toPath() ); } file.transferTo(tempFile); FileChannel channel = FileChannel.open(tempFile.toPath(), StandardOpenOption.READ); TitanFileChannelChanface titanFileChannelKChannel = new TitanFileChannelChanface( channel ); FileNode fileNode = fsNodeAllotment.newFileNode(); fileNode.setDefinitionSize( tempFile.length() ); fileNode.setName( tempFile.getName() ); TitanFileReceiveEntity64 receiveEntity = new TitanFileReceiveEntity64( this.primaryFileSystem,storageObjectPath, fileNode,titanFileChannelKChannel,this.primaryVolume ); this.primaryFileSystem.receive( receiveEntity ); FileTreeNode storageObject = this.primaryFileSystem.get(this.primaryFileSystem.queryGUIDByPath(storageObjectPath)); TitanVersion titanVersion = new TitanVersion(); titanVersion.setVersion( version ); titanVersion.setFileGuid( node.getGuid() ); titanVersion.setTargetStorageObjectGuid( storageObject.getGuid() ); this.primaryVersion.insert( titanVersion ); if( !tempFile.delete() ){ throw new IOException( "Purging temporary file compromised, what :" + tempFile.toPath() ); } return BasicResultResponse.success(); } /** * * @param filePath 文件要上传的路径 * @param file 文件本体 * @return */ @PostMapping("/upload") public BasicResultResponse upload(@RequestParam("filePath") String filePath, @RequestParam("file") MultipartFile file ) throws IOException { File tempFile = File.createTempFile("upload",".temp"); if(!tempFile.exists()){ throw new IOException( "Creating file compromised, what :" + tempFile.toPath() ); } file.transferTo(tempFile); FSNodeAllotment fsNodeAllotment = this.primaryFileSystem.getFSNodeAllotment(); FileChannel channel = FileChannel.open(tempFile.toPath(), StandardOpenOption.READ); TitanFileChannelChanface titanFileChannelKChannel = new TitanFileChannelChanface( channel ); FileNode fileNode = fsNodeAllotment.newFileNode(); fileNode.setDefinitionSize( tempFile.length() ); fileNode.setName( tempFile.getName() ); TitanFileReceiveEntity64 receiveEntity = new TitanFileReceiveEntity64( this.primaryFileSystem,filePath, fileNode,titanFileChannelKChannel,this.primaryVolume ); this.primaryFileSystem.receive( receiveEntity ); if(!tempFile.delete()){ throw new IOException( "Temporary file has been purged failed." ); } return BasicResultResponse.success(); } @PostMapping("/stream") public String handleStreamUpload(HttpServletRequest request) throws IOException { try (InputStream inputStream = request.getInputStream()) { // 处理输入流 return "File stream processed."; } } private Chanface getKChannel(File file ) throws IOException { FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.READ); return new TitanFileChannelChanface( channel ); } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/api/controller/v2/UserController.java ================================================ package com.walnut.sparta.uofs.console.api.controller.v2; import com.walnut.sparta.uofs.console.util.JWTUtil; import com.walnut.sparta.uofs.console.api.response.BasicResultResponse; import com.walnut.sparta.uofs.console.domain.dto.UserLoginDTO; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; @RestController @RequestMapping( "/api/v2/user" ) @CrossOrigin public class UserController { @PostMapping("/login") public BasicResultResponse login(@RequestBody UserLoginDTO dto){ if( dto.getPassword().equals("11122233") ){ return BasicResultResponse.success(JWTUtil.createJWT()); } return BasicResultResponse.error("Permission code error"); } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/api/controller/v2/VolumeController.java ================================================ package com.walnut.sparta.uofs.console.api.controller.v2; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.volume.UniformVolumeManager; import com.pinecone.hydra.storage.volume.entity.LogicVolume; import com.pinecone.hydra.storage.volume.entity.MountPoint; import com.pinecone.hydra.storage.volume.entity.PhysicalVolume; import com.pinecone.hydra.storage.volume.entity.SimpleVolume; import com.pinecone.hydra.storage.volume.entity.Volume; import com.pinecone.hydra.storage.volume.entity.VolumeAllotment; import com.pinecone.hydra.storage.volume.entity.VolumeCapacity64; import com.pinecone.hydra.storage.volume.entity.local.LocalPhysicalVolume; import com.pinecone.hydra.storage.volume.entity.local.LocalSimpleVolume; import com.pinecone.hydra.storage.volume.entity.local.LocalSpannedVolume; import com.pinecone.hydra.storage.volume.entity.local.LocalStripedVolume; import com.pinecone.hydra.storage.volume.entity.local.simple.TitanLocalSimpleVolume; import com.pinecone.ulf.util.guid.GUIDs; import com.walnut.sparta.uofs.console.api.response.BasicResultResponse; import com.walnut.sparta.uofs.console.domain.dto.PhysicalVolumeDTO; import com.walnut.sparta.uofs.console.domain.dto.LogicVolumeDTO; import com.walnut.sparta.uofs.console.domain.dto.StorageExpansionDTO; import com.walnut.sparta.uofs.console.infrastructure.UOFSConsoleContents; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import javax.annotation.Resource; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; @RestController @RequestMapping( "/api/v2/uofs/volume" ) @CrossOrigin public class VolumeController { @Resource private UniformVolumeManager primaryVolume; // @Resource // private UOFSConfig uofsConfig; /** * 创建物理卷 * @param dto 创建物理卷需要的参数 * @return 返回操作结果 */ @PostMapping("/create/physicalVolume") public BasicResultResponse createPhysicalVolume(@RequestBody PhysicalVolumeDTO dto){ VolumeAllotment volumeAllotment = this.primaryVolume.getVolumeAllotment(); LocalPhysicalVolume physicalVolume = volumeAllotment.newLocalPhysicalVolume(); physicalVolume.setName( dto.getName() ); physicalVolume.setType( UOFSConsoleContents.VOLUME_TYPE_PHYSICAL ); physicalVolume.setExtConfig(dto.getExtConfig() ); VolumeCapacity64 volumeCapacity = volumeAllotment.newVolumeCapacity(); volumeCapacity.setDefinitionCapacity( dto.getDefinitionCapacity() ); MountPoint mountPoint = volumeAllotment.newMountPoint(); mountPoint.setMountPoint(dto.getMountPoint() ); physicalVolume.setMountPoint( mountPoint ); physicalVolume.setVolumeCapacity( volumeCapacity ); this.primaryVolume.insertPhysicalVolume( physicalVolume ); return BasicResultResponse.success(); } /** * 创建简单卷 * @param dto 创建简单卷所需的参数 * @return 返回操作结果 * @throws SQLException sqlite异常 */ @PostMapping("/create/simpleVolume") public BasicResultResponse createSimpleVolume(@RequestBody LogicVolumeDTO dto) throws SQLException { VolumeAllotment volumeAllotment = this.primaryVolume.getVolumeAllotment(); LocalSimpleVolume simpleVolume = volumeAllotment.newLocalSimpleVolume(); simpleVolume.setType(UOFSConsoleContents.VOLUME_TYPE_SIMPLE); simpleVolume.setName(dto.getName() ); simpleVolume.setExtConfig(dto.getExtConfig() ); VolumeCapacity64 volumeCapacity = volumeAllotment.newVolumeCapacity(); volumeCapacity.setDefinitionCapacity( dto.getDefinitionCapacity() ); simpleVolume.setVolumeCapacity( volumeCapacity ); simpleVolume.build(); return BasicResultResponse.success(); } /** * 创建跨区卷 * @param dto 跨区卷所需的参数 * @return 返回操作结果 * @throws SQLException sqlite异常 */ @PostMapping("/create/spannedVolume") public BasicResultResponse createSpannedVolume(@RequestBody LogicVolumeDTO dto) throws SQLException { VolumeAllotment volumeAllotment = this.primaryVolume.getVolumeAllotment(); LocalSpannedVolume spannedVolume = volumeAllotment.newLocalSpannedVolume(); spannedVolume.setType(UOFSConsoleContents.VOLUME_TYPE_SPANNED); spannedVolume.setName(dto.getName() ); spannedVolume.setExtConfig(dto.getExtConfig()); VolumeCapacity64 volumeCapacity = volumeAllotment.newVolumeCapacity(); volumeCapacity.setDefinitionCapacity( dto.getDefinitionCapacity() ); spannedVolume.setVolumeCapacity( volumeCapacity ); spannedVolume.build(); return BasicResultResponse.success(); } /** * 创建条带卷 * @param dto 创建条带卷所需的参数 * @return 返回操作结果 * @throws SQLException sqlite异常 */ @PostMapping("/create/stripedVolume") public BasicResultResponse createStripedVolume(@RequestBody LogicVolumeDTO dto) throws SQLException { VolumeAllotment volumeAllotment = this.primaryVolume.getVolumeAllotment(); LocalStripedVolume stripedVolume = volumeAllotment.newLocalStripedVolume(); stripedVolume.setType( UOFSConsoleContents.VOLUME_TYPE_STRIPED ); stripedVolume.setName(dto.getName() ); stripedVolume.setExtConfig(dto.getExtConfig() ); VolumeCapacity64 volumeCapacity = volumeAllotment.newVolumeCapacity(); volumeCapacity.setDefinitionCapacity( dto.getDefinitionCapacity() ); stripedVolume.setVolumeCapacity( volumeCapacity ); stripedVolume.build(); return BasicResultResponse.success(); } /** *获取物理卷 * @param guid 物理卷guid * @return 返回物理卷详细信息 */ @GetMapping("/query/physical") public String queryPhysicalVolume( @RequestParam("guid") String guid ){ PhysicalVolume physicalVolume = this.primaryVolume.getPhysicalVolume(GUIDs.GUID128(guid)); return BasicResultResponse.success(physicalVolume).toJSONString(); } /** * 获取逻辑卷 * @param guid 逻辑卷guid * @return 返回逻辑卷详细信息 */ @GetMapping("/query/logic") public String queryLogicVolume( @RequestParam("guid") String guid ){ LogicVolume logicVolume = this.primaryVolume.get(GUIDs.GUID128(guid)); return BasicResultResponse.success(logicVolume).toJSONString(); } /** * 逻辑卷扩容 * @param dto 扩容所需参数 * @return 返回操作结果 */ @PostMapping("/storageExpansion") public BasicResultResponse storageExpansion(@RequestBody StorageExpansionDTO dto){ GUID logicGuid = GUIDs.GUID128( dto.getLogicGuid() ); GUID physicalGuid = GUIDs.GUID128( dto.getChildGuid() ); LogicVolume logicVolume = this.primaryVolume.get(logicGuid); logicVolume.storageExpansion( physicalGuid ); return BasicResultResponse.success(); } /** * 获取子卷 * @param volumeGuid 卷guid * @return 返回子集情况 */ @GetMapping("/getChildren") public String getChildren(@RequestParam("volumeGuid") String volumeGuid){ PhysicalVolume physicalVolume = this.primaryVolume.getPhysicalVolume(GUIDs.GUID128(volumeGuid)); if(physicalVolume != null){ return BasicResultResponse.error("物理卷不存在子卷").toJSONString(); } LogicVolume logicVolume = this.primaryVolume.get(GUIDs.GUID128(volumeGuid)); if( logicVolume instanceof TitanLocalSimpleVolume){ SimpleVolume simpleVolume = (SimpleVolume) logicVolume; List guids = simpleVolume.listPhysicalVolume(); if(guids.isEmpty()){ return BasicResultResponse.success().toJSONString(); } PhysicalVolume volumePhysicalVolume = this.primaryVolume.getPhysicalVolume(guids.get(0)); ArrayList volumes = new ArrayList<>(); volumes.add(volumePhysicalVolume); return BasicResultResponse.success(volumes).toJSONString(); } List volumes = logicVolume.queryChildren(); ArrayList arrayList = new ArrayList<>(volumes); return BasicResultResponse.success(arrayList).toJSONString(); } /** * 获取全部卷 * @return 返回卷信息 */ @GetMapping("/queryAllVolumes") public String queryAllVolumes(){ List volumes = this.primaryVolume.queryAllVolumes(); return BasicResultResponse.success(volumes).toJSONString(); } /** * 获取全部逻辑卷 * @return 返回卷信息 */ @GetMapping("/listLogicVolumes") public String queryLogicVolumes(){ List volumes = this.primaryVolume.listLogicVolumes(); return BasicResultResponse.success(volumes).toJSONString(); } /** * 获取全部物理卷 * @return 返回卷信息 */ @GetMapping("/listPhysicsVolumes") public String queryPhysicsVolumes(){ List volumes = this.primaryVolume.listPhysicsVolumes(); return BasicResultResponse.success(volumes).toJSONString(); } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/api/controller/xxx.java ================================================ package com.walnut.sparta.uofs.console.api.controller; public class xxx { } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/api/response/BasicResultResponse.java ================================================ package com.walnut.sparta.uofs.console.api.response; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.json.JSONEncoder; import org.springframework.http.HttpStatus; import java.io.Serializable; public class BasicResultResponse implements Pinenut, Serializable { private Integer code = HttpStatus.OK.value(); private String msg; //错误信息 private T data; //数据 public static BasicResultResponse success() { BasicResultResponse result = new BasicResultResponse<>(); result.code = HttpStatus.OK.value(); return result; } public static BasicResultResponse successMsg( String msg ) { BasicResultResponse result = new BasicResultResponse<>(); result.msg = msg; result.code = HttpStatus.OK.value(); return result; } public static BasicResultResponse success( T object ) { BasicResultResponse result = new BasicResultResponse<>(); result.data = object; result.code = HttpStatus.OK.value(); return result; } public static BasicResultResponse error( String msg ) { BasicResultResponse result = new BasicResultResponse<>(); result.msg = msg; result.code = HttpStatus.INTERNAL_SERVER_ERROR.value(); return result; } /** * 获取 * @return code */ public Integer getCode() { return this.code; } /** * 设置 * @param code */ public void setCode(Integer code) { this.code = code; } /** * 获取 * @return msg */ public String getMsg() { return this.msg; } /** * 设置 * @param msg */ public void setMsg(String msg) { this.msg = msg; } /** * 获取 * @return data */ public T getData() { return this.data; } /** * 设置 * @param data */ public void setData(T data) { this.data = data; } @Override public String toJSONString() { return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "code" , this.code ), new KeyValue<>( "msg" , this.msg ), new KeyValue<>( "data" , this.data ) } ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/config/AsyncConfig.java ================================================ package com.walnut.sparta.uofs.console.config; import org.springframework.context.annotation.Configuration; import org.springframework.scheduling.annotation.EnableAsync; @Configuration public class AsyncConfig { } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/config/WebConfig.java ================================================ package com.walnut.sparta.uofs.console.config; import com.walnut.sparta.uofs.console.interceptor.JWTInterceptor; import org.springframework.context.annotation.Configuration; import org.springframework.web.servlet.config.annotation.InterceptorRegistry; import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; import javax.annotation.Resource; @Configuration public class WebConfig implements WebMvcConfigurer { @Resource private JWTInterceptor jwtInterceptor; @Override public void addInterceptors(InterceptorRegistry registry) { registry.addInterceptor(jwtInterceptor).addPathPatterns("/**"); } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/dto/CreateExternalDTO.java ================================================ package com.walnut.sparta.uofs.console.domain.dto; import com.pinecone.framework.system.prototype.Pinenut; public class CreateExternalDTO implements Pinenut { private String folderPath; private String externalSymbolicName; private String reparsedPoint; public String getReparsedPoint() { return reparsedPoint; } public void setReparsedPoint(String reparsedPoint) { this.reparsedPoint = reparsedPoint; } public String getFolderPath() { return folderPath; } public void setFolderPath(String folderPath) { this.folderPath = folderPath; } public String getExternalSymbolicName() { return externalSymbolicName; } public void setExternalSymbolicName(String externalSymbolicName) { this.externalSymbolicName = externalSymbolicName; } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/dto/CreateExternalSymbolicDTO.java ================================================ package com.walnut.sparta.uofs.console.domain.dto; public class CreateExternalSymbolicDTO { private String folderPath; private String externalSymbolicName; private String reparsedPoint; public String getFolderPath() { return folderPath; } public void setFolderPath(String folderPath) { this.folderPath = folderPath; } public String getExternalSymbolicName() { return externalSymbolicName; } public void setExternalSymbolicName(String externalSymbolicName) { this.externalSymbolicName = externalSymbolicName; } public String getReparsedPoint() { return reparsedPoint; } public void setReparsedPoint(String reparsedPoint) { this.reparsedPoint = reparsedPoint; } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/dto/DownloadObjectByChannelDTO.java ================================================ package com.walnut.sparta.uofs.console.domain.dto; public class DownloadObjectByChannelDTO { private String destDirPath; private String targetPath; public DownloadObjectByChannelDTO() { } public DownloadObjectByChannelDTO(String destDirPath, String targetPath) { this.destDirPath = destDirPath; this.targetPath = targetPath; } public String getDestDirPath() { return destDirPath; } public void setDestDirPath(String destDirPath) { this.destDirPath = destDirPath; } public String getTargetPath() { return targetPath; } public void setTargetPath(String targetPath) { this.targetPath = targetPath; } public String toString() { return "downloadObjectByChannelDto{destDirPath = " + destDirPath + ", targetPath = " + targetPath + "}"; } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/dto/LogicVolumeDTO.java ================================================ package com.walnut.sparta.uofs.console.domain.dto; import com.pinecone.framework.system.prototype.Pinenut; public class LogicVolumeDTO implements Pinenut { private String name; private long definitionCapacity; private String extConfig; public LogicVolumeDTO() { } public LogicVolumeDTO(String name, long definitionCapacity, String extConfig) { this.name = name; this.definitionCapacity = definitionCapacity; this.extConfig = extConfig; } public String getName() { return name; } public void setName(String name) { this.name = name; } public long getDefinitionCapacity() { return definitionCapacity; } public void setDefinitionCapacity(long definitionCapacity) { this.definitionCapacity = definitionCapacity; } public String getExtConfig() { return extConfig; } public void setExtConfig(String extConfig) { this.extConfig = extConfig; } public String toString() { return "SimpleVolumeDTO{name = " + name + ", definitionCapacity = " + definitionCapacity + ", extConfig = " + extConfig + "}"; } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/dto/PhysicalVolumeDTO.java ================================================ package com.walnut.sparta.uofs.console.domain.dto; import com.pinecone.framework.system.prototype.Pinenut; public class PhysicalVolumeDTO implements Pinenut { private String name; private long definitionCapacity; private String extConfig; private String mountPoint; public PhysicalVolumeDTO() { } public PhysicalVolumeDTO(String name, long definitionCapacity, String extConfig, String mountPoint) { this.name = name; this.definitionCapacity = definitionCapacity; this.extConfig = extConfig; this.mountPoint = mountPoint; } public String getName() { return name; } public void setName(String name) { this.name = name; } public long getDefinitionCapacity() { return definitionCapacity; } public void setDefinitionCapacity(long definitionCapacity) { this.definitionCapacity = definitionCapacity; } public String getExtConfig() { return extConfig; } public void setExtConfig(String extConfig) { this.extConfig = extConfig; } public String getMountPoint() { return mountPoint; } public void setMountPoint(String mountPoint) { this.mountPoint = mountPoint; } public String toString() { return "PhysicalVolumeDTO{name = " + name + ", definitionCapacity = " + definitionCapacity + ", extConfig = " + extConfig + ", mountPoint = " + mountPoint + "}"; } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/dto/RenameDTO.java ================================================ package com.walnut.sparta.uofs.console.domain.dto; public class RenameDTO { private String path; private String newName; public String getPath() { return path; } public void setPath(String path) { this.path = path; } public String getNewName() { return newName; } public void setNewName(String newName) { this.newName = newName; } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/dto/StorageExpansionDTO.java ================================================ package com.walnut.sparta.uofs.console.domain.dto; import com.pinecone.framework.system.prototype.Pinenut; public class StorageExpansionDTO implements Pinenut { public String logicGuid; public String childGuid; public StorageExpansionDTO() { } public StorageExpansionDTO(String logicGuid, String childGuid) { this.logicGuid = logicGuid; this.childGuid = childGuid; } public String getLogicGuid() { return logicGuid; } public void setLogicGuid(String logicGuid) { this.logicGuid = logicGuid; } public String getChildGuid() { return childGuid; } public void setChildGuid(String childGuid) { this.childGuid = childGuid; } public String toString() { return "StorageExpansionDTO{logicGuid = " + logicGuid + ", physicalGuid = " + childGuid + "}"; } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/dto/UpdateFileNameDTO.java ================================================ package com.walnut.sparta.uofs.console.domain.dto; public class UpdateFileNameDTO { private String filePath; private String newFileName; public String getFilePath() { return this.filePath; } public void setFilePath(String filePath) { this.filePath = filePath; } public String getNewFileName() { return this.newFileName; } public void setNewFileName(String newFileName) { this.newFileName = newFileName; } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/dto/UpdateObjectByChannelDTO.java ================================================ package com.walnut.sparta.uofs.console.domain.dto; import org.springframework.web.multipart.MultipartFile; public class UpdateObjectByChannelDTO { private String volumeGuid; private String destDirPath; private MultipartFile object; public UpdateObjectByChannelDTO() { } public UpdateObjectByChannelDTO(String volumeGuid, String destDirPath, MultipartFile object) { this.volumeGuid = volumeGuid; this.destDirPath = destDirPath; this.object = object; } public String getVolumeGuid() { return volumeGuid; } public void setVolumeGuid(String volumeGuid) { this.volumeGuid = volumeGuid; } public String getDestDirPath() { return destDirPath; } public void setDestDirPath(String destDirPath) { this.destDirPath = destDirPath; } public MultipartFile getObject() { return object; } public void setObject(MultipartFile object) { this.object = object; } public String toString() { return "updateObjectDto{volumeGuid = " + volumeGuid + ", destDirPath = " + destDirPath + ", object = " + object + "}"; } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/dto/UserLoginDTO.java ================================================ package com.walnut.sparta.uofs.console.domain.dto; public class UserLoginDTO { private String username; private String password; public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/vo/FolderContentVo.java ================================================ package com.walnut.sparta.uofs.console.domain.vo; import com.pinecone.hydra.storage.file.entity.FileTreeNode; import java.util.List; public class FolderContentVo { private List< FileTreeNode > fileTreeNodes; public FolderContentVo() { } public FolderContentVo(List fileTreeNodes) { this.fileTreeNodes = fileTreeNodes; } public List getFileTreeNodes() { return fileTreeNodes; } public void setFileTreeNodes(List fileTreeNodes) { this.fileTreeNodes = fileTreeNodes; } public String toString() { return "FolderContentVo{fileTreeNodes = " + fileTreeNodes + "}"; } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/infrastructure/SpartaUOFSService.java ================================================ package com.walnut.sparta.uofs.console.infrastructure; import com.pinecone.framework.system.executum.Processum; import com.pinecone.framework.system.functions.Executor; import com.pinecone.framework.util.config.JSONConfig; import com.pinecone.hydra.bucket.ibatis.hydranium.BucketMappingDriver; import com.pinecone.hydra.file.ibatis.hydranium.FileMappingDriver; import com.pinecone.hydra.servgram.Servgram; import com.pinecone.hydra.service.ibatis.hydranium.ServiceMappingDriver; import com.pinecone.hydra.service.kom.ServiceInstrument; import com.pinecone.hydra.service.kom.UniformServiceInstrument; import com.pinecone.hydra.storage.bucket.TitanBucketInstrument; import com.pinecone.hydra.storage.file.FileSystemConfig; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.KernelFileSystemConfig; import com.pinecone.hydra.storage.file.UniformObjectFileSystem; import com.pinecone.hydra.storage.version.TitanVersionManage; import com.pinecone.hydra.storage.version.VersionManage; import com.pinecone.hydra.storage.volume.KernelVolumeConfig; import com.pinecone.hydra.storage.volume.UniformVolumeManager; import com.pinecone.hydra.storage.volume.VolumeConfig; import com.pinecone.hydra.system.component.ComponentInitializationException; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.version.ibatis.hydranium.VersionMappingDriver; import com.pinecone.hydra.volume.ibatis.hydranium.VolumeMappingDriver; import com.pinecone.tritium.Tritium; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.pinecone.summer.spring.Springron; import com.walnut.sparta.uofs.console.SpartaBoot; import org.springframework.context.ApplicationContextInitializer; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.support.GenericApplicationContext; import java.io.IOException; import java.nio.file.Path; public class SpartaUOFSService extends Springron implements UOFSService { protected KOIMappingDriver koiMappingDriver; protected KOIMappingDriver koiFileMappingDriver; protected KOIMappingDriver koiBucketMappingDriver; protected KOIMappingDriver koiVersionMappingDriver; protected KOIMappingDriver koiServiceMappingDriver; protected KOMFileSystem fileSystem; protected UniformVolumeManager volumeTree; protected TitanBucketInstrument bucketInstrument; protected TitanVersionManage versionManage; protected ServiceInstrument servicesInstrument; protected void initKOMSubsystem() throws ComponentInitializationException { this.koiMappingDriver = new VolumeMappingDriver( this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.parentSystem().getDispenserCenter() ); this.koiFileMappingDriver = new FileMappingDriver( this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.parentSystem().getDispenserCenter() ); this.koiBucketMappingDriver = new BucketMappingDriver( this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.parentSystem().getDispenserCenter() ); this.koiVersionMappingDriver = new VersionMappingDriver( this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.parentSystem().getDispenserCenter() ); this.koiServiceMappingDriver = new ServiceMappingDriver( this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.parentSystem().getDispenserCenter() ); JSONConfig selfConfig = (JSONConfig) this.getConfig(); FileSystemConfig fileSystemConfig = new KernelFileSystemConfig( selfConfig.queryJSONObject( "service.PrimaryUniformFileSystem" ) ); this.fileSystem = new UniformObjectFileSystem( this.koiFileMappingDriver,fileSystemConfig ); VolumeConfig volumeConfig = new KernelVolumeConfig( selfConfig.queryJSONObject( "service.PrimaryUniformVolumeManager" ) ); this.volumeTree = new UniformVolumeManager( this.koiMappingDriver,volumeConfig ); this.bucketInstrument = new TitanBucketInstrument( this.koiBucketMappingDriver ); this.versionManage = new TitanVersionManage( this.koiVersionMappingDriver ); this.servicesInstrument = new UniformServiceInstrument( this.koiServiceMappingDriver ); } protected void initSpringBeanFactorySubsystem() throws ComponentInitializationException { this.setPrimarySources( SpartaBoot.class ); this.setInitializer(new Executor() { @Override public void execute() throws Exception { SpartaUOFSService.this.getSpringApplication().addInitializers(new ApplicationContextInitializer() { @Override public void initialize( ConfigurableApplicationContext applicationContext ) { GenericApplicationContext genericApplicationContext = (GenericApplicationContext) applicationContext; genericApplicationContext.registerBean("primaryFileSystem", UniformObjectFileSystem.class, () -> (UniformObjectFileSystem) fileSystem); genericApplicationContext.registerBean("primaryVolume", UniformVolumeManager.class, () -> (UniformVolumeManager) volumeTree); genericApplicationContext.registerBean("primaryBucket", TitanBucketInstrument.class, () -> (TitanBucketInstrument) bucketInstrument); genericApplicationContext.registerBean("primaryVersion", VersionManage.class, () -> (VersionManage) versionManage); genericApplicationContext.registerBean("primaryService", ServiceInstrument.class, () -> servicesInstrument); } }); } }); } protected void initSubsystem() throws ComponentInitializationException { this.initKOMSubsystem(); this.initSpringBeanFactorySubsystem(); } public SpartaUOFSService(String szName, Processum parent, String[] springbootArgs ) throws ComponentInitializationException { super( szName, parent, springbootArgs ); this.mSpringKernel.setPrimarySources( SpartaBoot.class ); this.initSubsystem(); } public SpartaUOFSService( String szName, Processum parent ) throws ComponentInitializationException { this( szName, parent, new String[0] ); } @Override protected void loadConfig() { this.mServgramList = this.getAttachedOrchestrator().getSectionConfig().getChild( Servgram.ConfigServgramsKey ); Object dyServgramConf = this.mServgramList.get( this.gramName() ); if( dyServgramConf instanceof String ) { try{ this.mServgramConf = this.mServgramList.getChildFromPath( Path.of((String) dyServgramConf) ); } catch ( IOException ignore ) { this.getLogger().info( "[Notice] Spring will use the default config `application.yaml`." ); } } else { this.mServgramConf = this.mServgramList.getChild( this.gramName() ); } } @Override public Tritium parentSystem() { return (Tritium)super.parentSystem(); } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/infrastructure/UOFSCommonConfig.java ================================================ package com.walnut.sparta.uofs.console.infrastructure; import java.util.Map; public class UOFSCommonConfig implements UOFSConfig{ protected String mszPhysicalVolumeType; protected String mszSimpleVolumeType; protected String mszSpannedVolumeType; protected String mszStripedVolumeType; protected String mszVersionPrefix; public UOFSCommonConfig(){} public UOFSCommonConfig(Map config){ this.mszPhysicalVolumeType = (String) config.get("PhysicalVolumeType"); this.mszSimpleVolumeType = (String) config.get("SimpleVolumeType"); this.mszSpannedVolumeType = (String) config.get("SpannedVolumeType"); this.mszStripedVolumeType = (String) config.get("StripedVolumeType"); this.mszVersionPrefix = (String) config.get("VersionPrefix"); } @Override public String getPhysicalVolumeType() { return this.mszPhysicalVolumeType; } @Override public String getSimpleVolumeType() { return this.mszSimpleVolumeType; } @Override public String getSpannedVolumeType() { return this.mszSpannedVolumeType; } @Override public String getStripedVolumeType() { return this.mszStripedVolumeType; } @Override public String getVersionPrefix() { return this.mszVersionPrefix; } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/infrastructure/UOFSConfig.java ================================================ package com.walnut.sparta.uofs.console.infrastructure; import com.pinecone.framework.system.prototype.Pinenut; public interface UOFSConfig extends Pinenut { String getPhysicalVolumeType(); String getSimpleVolumeType(); String getSpannedVolumeType(); String getStripedVolumeType(); String getVersionPrefix(); } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/infrastructure/UOFSConsoleContents.java ================================================ package com.walnut.sparta.uofs.console.infrastructure; import com.pinecone.framework.system.prototype.Pinenut; public class UOFSConsoleContents implements Pinenut { public static String VOLUME_TYPE_PHYSICAL = "PhysicalVolume"; public static String VOLUME_TYPE_SIMPLE = "SimpleVolume"; public static String VOLUME_TYPE_SPANNED = "SpannedVolume"; public static String VOLUME_TYPE_STRIPED = "StripedVolume"; public static String VERSION_PREFIX = "/$version"; public static String FORWARD_SLASH = "/"; public static String PERIOD = "."; } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/infrastructure/UOFSContentDelivery.java ================================================ package com.walnut.sparta.uofs.console.infrastructure; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.tritium.Tritium; public class UOFSContentDelivery extends Tritium implements UOFSContentDeliveryService{ protected SpartaUOFSService spartaUOFSService; public UOFSContentDelivery(String[] args, CascadeSystem parent ) { this( args, null, parent ); } public UOFSContentDelivery(String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } public SpartaUOFSService getSpartaUOFSService(){ return this.spartaUOFSService; } @Override public void vitalize () throws Exception { this.spartaUOFSService = new SpartaUOFSService( "SpartaUOFSService", this ); this.spartaUOFSService.execute(); this.getTaskManager().add(this.spartaUOFSService); this.getTaskManager().syncWaitingTerminated(); } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/infrastructure/UOFSContentDeliveryService.java ================================================ package com.walnut.sparta.uofs.console.infrastructure; import com.pinecone.framework.system.prototype.Pinenut; public interface UOFSContentDeliveryService extends Pinenut { } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/infrastructure/UOFSService.java ================================================ package com.walnut.sparta.uofs.console.infrastructure; import com.pinecone.hydra.system.component.Slf4jTraceable; public interface UOFSService extends Slf4jTraceable { } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/interceptor/JWTInterceptor.java ================================================ package com.walnut.sparta.uofs.console.interceptor; import com.alibaba.fastjson.JSONObject; import com.walnut.sparta.uofs.console.util.JWTUtil; import com.walnut.sparta.uofs.console.api.response.BasicResultResponse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpMethod; import org.springframework.stereotype.Component; import org.springframework.util.StringUtils; import org.springframework.web.servlet.HandlerInterceptor; @Component public class JWTInterceptor implements HandlerInterceptor { private Logger log = LoggerFactory.getLogger( this.getClass() ); @Override public boolean preHandle(javax.servlet.http.HttpServletRequest request, javax.servlet.http.HttpServletResponse response, Object handler) throws Exception { String url=request.getRequestURI(); if (request.getMethod().equals(HttpMethod.OPTIONS.name())) { return true; } //log.info("请求的路径是:"+ url); if (url.contains("login")||url.contains("register")||url.contains("send_code")||url.contains("download")){ log.info("Allow login or registration operations"); return true; } String jwt=request.getHeader("Token"); if (!StringUtils.hasLength(jwt)){ log.info("The request header Token is empty"); BasicResultResponse error = BasicResultResponse.error("not login"); String jsonString = JSONObject.toJSONString(error); response.getWriter().write(jsonString); return false; } try { JWTUtil.ParseJWt(jwt); } catch (Exception e){ log.info("Token parsing failed"); BasicResultResponse error = BasicResultResponse.error("Not logged in"); String jsonString = JSONObject.toJSONString(error); response.getWriter().write(jsonString); return false; } return true; } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/service/FileService.java ================================================ package com.walnut.sparta.uofs.console.service; import com.pinecone.framework.util.id.GUID; public interface FileService { void remove(GUID fileGuid); } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/service/impl/FileServiceImpl.java ================================================ package com.walnut.sparta.uofs.console.service.impl; import com.pinecone.framework.util.id.GUID; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.entity.ClusterPage; import com.pinecone.hydra.storage.file.entity.FileNode; import com.pinecone.hydra.storage.file.entity.FileTreeNode; import com.pinecone.hydra.storage.file.entity.Folder; import com.pinecone.hydra.storage.file.entity.LocalCluster; import com.pinecone.hydra.storage.volume.UniformVolumeManager; import com.pinecone.hydra.unit.imperium.entity.TreeNode; import com.walnut.sparta.uofs.console.service.FileService; import org.springframework.stereotype.Service; import javax.annotation.Resource; import java.sql.SQLException; import java.util.List; @Service public class FileServiceImpl implements FileService { @Resource private KOMFileSystem primaryFileSystem; @Resource private UniformVolumeManager primaryVolume; @Override public void remove(GUID fileGuid){ FileTreeNode fileTreeNode = this.primaryFileSystem.get(fileGuid); if( fileTreeNode instanceof Folder){ Folder folder = (Folder) fileTreeNode; List children = this.primaryFileSystem.getChildren(folder.getGuid()); for( TreeNode treeNode : children ){ this.remove( treeNode.getGuid() ); } }else if( fileTreeNode instanceof FileNode){ FileNode fileNode = (FileNode) fileTreeNode; ClusterPage clusterPage = this.primaryFileSystem.fetchClustersByFileGuid( fileNode.getGuid() ); long fileClusterNum = clusterPage.getClusters(); for( long i = 0; i < fileClusterNum; i++ ){ LocalCluster frame = clusterPage.getLocalCluster( i ); try { this.primaryVolume.removeStorageObject( frame ); } catch (SQLException e) { throw new RuntimeException(e); } } } } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/util/JWTUtil.java ================================================ package com.walnut.sparta.uofs.console.util; import com.auth0.jwt.JWT; import com.auth0.jwt.algorithms.Algorithm; import com.auth0.jwt.interfaces.DecodedJWT; import java.util.Date; import java.util.HashMap; public class JWTUtil { private static final String SIGN = "!^&%&*!@$*%!!@(&%2ar^2t"; //学生登录生成JWT令牌 public static String createJWT(){ HashMap map = new HashMap<>(); String token = JWT.create() .withHeader(map) //设置头信息 .withExpiresAt(new Date(System.currentTimeMillis() + 3600 * 1000)) //设置失效时间 .sign(Algorithm.HMAC256(SIGN)); //设置签名以及签名方式 这里使用HMAC256加密方式 return token; } public static DecodedJWT ParseJWt(String jwt){ return JWT.require(Algorithm.HMAC256(SIGN)).build().verify(jwt); } } ================================================ FILE: Sparta/sparta-uofs-console/src/main/resources/UOFSIface.thrift ================================================ service UOFSIface{ void affirmFileNode(1:string filePath); } ================================================ FILE: Sparta/sparta-uofs-console/src/test/java/com/walnut/sparta/uofs/SparteUcdnServiceApplicationTests.java ================================================ package com.walnut.sparta.uofs; import org.junit.Test; import org.springframework.boot.test.context.SpringBootTest; @SpringBootTest public class SparteUcdnServiceApplicationTests { @Test public void contextLoads() { } } ================================================ FILE: Sparta/sparta-uofs-console/src/test/java/com/walnut/sparta/uofs/TestSparta.java ================================================ package com.walnut.sparta.uofs; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.tritium.Tritium; class JesusChrist extends Tritium { public JesusChrist( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public JesusChrist( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { // Sparta sparta = new Sparta( "Sparta", this ); // // // Thread shutdowner = new Thread(()->{ // Debug.sleep( 5000 ); // sparta.terminate(); // }); // //shutdowner.start(); // // // // // sparta.setPrimarySources( SpartaBoot.class ); // // KOIMappingDriver koiMappingDriver = new VolumeMappingDriver( // sparta, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() // ); // KOIMappingDriver koiFileMappingDriver = new FileMappingDriver( // sparta, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() // ); // KOIMappingDriver koiBucketMappingDriver = new BucketMappingDriver( // sparta, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() // ); // KOIMappingDriver koiVersionMappingDriver = new VersionMappingDriver( // sparta, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() // ); // // // // KOMFileSystem fileSystem = new UniformObjectFileSystem( koiFileMappingDriver ); // UniformVolumeManager volumeTree = new UniformVolumeManager( koiMappingDriver ); // TitanBucketInstrument bucketInstrument = new TitanBucketInstrument( koiBucketMappingDriver ); // TitanVersionManage versionManage = new TitanVersionManage( koiVersionMappingDriver ); // // sparta.setInitializer(new Executor() { // @Override // public void execute() throws Exception { // sparta.getSpringApplication().addInitializers(new ApplicationContextInitializer() { // @Override // public void initialize( ConfigurableApplicationContext applicationContext ) { // GenericApplicationContext genericApplicationContext = (GenericApplicationContext) applicationContext; // genericApplicationContext.registerBean("primaryFileSystem", UniformObjectFileSystem.class, () -> (UniformObjectFileSystem)fileSystem); // genericApplicationContext.registerBean("primaryVolume", UniformVolumeManager.class, () -> (UniformVolumeManager) volumeTree); // genericApplicationContext.registerBean("primaryBucket", TitanBucketInstrument.class, () -> (TitanBucketInstrument) bucketInstrument); // genericApplicationContext.registerBean("primaryVersion", VersionManage.class, () -> (VersionManage) versionManage); // } // }); // } // }); // // // sparta.execute(); // // // // // // this.getTaskManager().add( sparta ); // this.getTaskManager().syncWaitingTerminated(); } } public class TestSparta { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ JesusChrist jesus = (JesusChrist) Pinecone.sys().getTaskManager().add( new JesusChrist( args, Pinecone.sys() ) ); jesus.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: Sparta/sparta-uofs-service/pom.xml ================================================ sparta com.walnuts.sparta 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.walnut.sparta.uofs.service sparta-uofs-service 2.1.0 UTF-8 org.springframework.boot spring-boot-starter-web org.mybatis.spring.boot mybatis-spring-boot-starter 2.2.2 org.projectlombok lombok true org.springframework.boot spring-boot-starter-test test com.pinecone pinecone 2.5.1 compile com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.pinecone.summer.springram springram 2.1.0 compile com.pinecone.tritium hydra-system-tritium 2.1.0 test com.pinecone.hydra.kom.driver.default hydra-kom-default-driver 2.1.0 test ================================================ FILE: Sparta/sparta-uofs-service/src/main/java/com/walnut/sparta/uofs/service/Sparta.java ================================================ package com.walnut.sparta.uofs.service; import com.pinecone.framework.system.executum.Processum; import com.pinecone.hydra.servgram.Servgram; import com.pinecone.hydra.system.component.Slf4jTraceable; import com.pinecone.summer.spring.Springron; import java.io.IOException; import java.nio.file.Path; public class Sparta extends Springron implements Slf4jTraceable { public Sparta(String szName, Processum parent, String[] springbootArgs ) { super( szName, parent, springbootArgs ); this.mSpringKernel.setPrimarySources( SpartaBoot.class ); } public Sparta( String szName, Processum parent ) { this( szName, parent, new String[0] ); } @Override protected void loadConfig() { this.mServgramList = this.getAttachedOrchestrator().getSectionConfig().getChild( Servgram.ConfigServgramsKey ); Object dyServgramConf = this.mServgramList.get( this.gramName() ); if( dyServgramConf instanceof String ) { try{ this.mServgramConf = this.mServgramList.getChildFromPath( Path.of((String) dyServgramConf) ); } catch ( IOException ignore ) { this.getLogger().info( "[Notice] Spring will use the default config `application.yaml`." ); } } else { this.mServgramConf = this.mServgramList.getChild( this.gramName() ); } } } ================================================ FILE: Sparta/sparta-uofs-service/src/main/java/com/walnut/sparta/uofs/service/SpartaBoot.java ================================================ package com.walnut.sparta.uofs.service; import org.springframework.boot.autoconfigure.SpringBootApplication; @SpringBootApplication public class SpartaBoot { } ================================================ FILE: Sparta/sparta-uofs-service/src/main/java/com/walnut/sparta/uofs/service/api/controller/xxx.java ================================================ package com.walnut.sparta.uofs.service.api.controller; public class xxx { } ================================================ FILE: Sparta/sparta-uofs-service/src/main/java/com/walnut/sparta/uofs/service/api/response/BasicResultResponse.java ================================================ package com.walnut.sparta.uofs.service.api.response; import java.io.Serializable; import org.springframework.http.HttpStatus; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.unit.KeyValue; import com.pinecone.framework.util.json.JSONEncoder; public class BasicResultResponse implements Pinenut, Serializable { private Integer code = HttpStatus.OK.value(); private String msg; //错误信息 private T data; //数据 public static BasicResultResponse success() { BasicResultResponse result = new BasicResultResponse<>(); result.code = HttpStatus.OK.value(); return result; } public static BasicResultResponse successMsg( String msg ) { BasicResultResponse result = new BasicResultResponse<>(); result.msg = msg; result.code = HttpStatus.OK.value(); return result; } public static BasicResultResponse success( T object ) { BasicResultResponse result = new BasicResultResponse<>(); result.data = object; result.code = HttpStatus.OK.value(); return result; } public static BasicResultResponse error( String msg ) { BasicResultResponse result = new BasicResultResponse<>(); result.msg = msg; result.code = HttpStatus.INTERNAL_SERVER_ERROR.value(); return result; } /** * 获取 * @return code */ public Integer getCode() { return this.code; } /** * 设置 * @param code */ public void setCode(Integer code) { this.code = code; } /** * 获取 * @return msg */ public String getMsg() { return this.msg; } /** * 设置 * @param msg */ public void setMsg(String msg) { this.msg = msg; } /** * 获取 * @return data */ public T getData() { return this.data; } /** * 设置 * @param data */ public void setData(T data) { this.data = data; } @Override public String toJSONString() { return JSONEncoder.stringifyMapFormat( new KeyValue[]{ new KeyValue<>( "code" , this.code ), new KeyValue<>( "msg" , this.msg ), new KeyValue<>( "data" , this.data ) } ); } @Override public String toString() { return this.toJSONString(); } } ================================================ FILE: Sparta/sparta-uofs-service/src/main/java/com/walnut/sparta/uofs/service/infrastructure/Contents.java ================================================ package com.walnut.sparta.uofs.service.infrastructure; public class Contents { public static String LOCAL_ACCOUNT = "0000000-000000-0000-00"; } ================================================ FILE: Sparta/sparta-uofs-service/src/test/java/com/sparta/TestSparta.java ================================================ package com.sparta; import com.pinecone.Pinecone; import com.pinecone.framework.system.CascadeSystem; import com.pinecone.framework.system.functions.Executor; import com.pinecone.framework.util.Debug; import com.pinecone.hydra.bucket.ibatis.hydranium.BucketMappingDriver; import com.pinecone.hydra.file.ibatis.hydranium.FileMappingDriver; import com.pinecone.hydra.storage.bucket.TitanBucketInstrument; import com.pinecone.hydra.storage.file.KOMFileSystem; import com.pinecone.hydra.storage.file.UniformObjectFileSystem; import com.pinecone.hydra.storage.volume.UniformVolumeManager; import com.pinecone.hydra.system.ko.driver.KOIMappingDriver; import com.pinecone.hydra.volume.ibatis.hydranium.VolumeMappingDriver; import com.pinecone.tritium.Tritium; import com.pinecone.slime.jelly.source.ibatis.IbatisClient; import com.walnut.sparta.uofs.service.Sparta; import com.walnut.sparta.uofs.service.SpartaBoot; import org.springframework.context.ApplicationContextInitializer; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.support.GenericApplicationContext; class JesusChrist extends Tritium { public JesusChrist( String[] args, CascadeSystem parent ) { this( args, null, parent ); } public JesusChrist( String[] args, String szName, CascadeSystem parent ){ super( args, szName, parent ); } @Override public void vitalize () throws Exception { Sparta sparta = new Sparta( "Sparta", this ); Thread shutdowner = new Thread(()->{ Debug.sleep( 5000 ); sparta.terminate(); }); //shutdowner.start(); KOIMappingDriver koiMappingDriver = new VolumeMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); KOIMappingDriver koiFileMappingDriver = new FileMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); KOIMappingDriver koiBucketMappingDriver = new BucketMappingDriver( this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( "MySQLKingHydranium" ), this.getDispenserCenter() ); KOMFileSystem fileSystem = new UniformObjectFileSystem( koiFileMappingDriver, null ); UniformVolumeManager volumeTree = new UniformVolumeManager( koiMappingDriver, null ); TitanBucketInstrument bucketInstrument = new TitanBucketInstrument( koiBucketMappingDriver ); sparta.setPrimarySources( SpartaBoot.class ); sparta.setInitializer(new Executor() { @Override public void execute() throws Exception { sparta.getSpringApplication().addInitializers(new ApplicationContextInitializer() { @Override public void initialize( ConfigurableApplicationContext applicationContext ) { GenericApplicationContext genericApplicationContext = (GenericApplicationContext) applicationContext; genericApplicationContext.registerBean("primaryFileSystem", UniformObjectFileSystem.class, () -> (UniformObjectFileSystem)fileSystem); genericApplicationContext.registerBean("primaryVolume", UniformVolumeManager.class, () -> (UniformVolumeManager) volumeTree); genericApplicationContext.registerBean("primaryBucket", TitanBucketInstrument.class, () -> (TitanBucketInstrument) bucketInstrument); } }); } }); sparta.execute(); this.getTaskManager().add( sparta ); this.getTaskManager().syncWaitingTerminated(); } } public class TestSparta { public static void main( String[] args ) throws Exception { Pinecone.init( (Object...cfg )->{ JesusChrist jesus = (JesusChrist) Pinecone.sys().getTaskManager().add( new JesusChrist( args, Pinecone.sys() ) ); jesus.vitalize(); return 0; }, (Object[]) args ); } } ================================================ FILE: TaskJuggler/TaskJuggler.iml ================================================ ================================================ FILE: TaskJuggler/pom.xml ================================================ com.Sauron sauron 1.0-SNAPSHOT 4.0.0 TaskJuggler 11 11 UTF-8 com.Sauron pinecone 3.3.1 org.springframework.boot spring-boot-starter-web com.Sauron Messenger 1.0-SNAPSHOT org.springframework.boot spring-boot-starter-amqp ================================================ FILE: TaskJuggler/src/main/java/com/genius/common/Heist.java ================================================ package com.genius.common; import com.genius.core.HeistCenter; import com.genius.util.SystemUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Random; /** * @author Genius * @date 2023/05/08 23:41 **/ public class Heist implements Runnable{ private Logger logger; private String heistName; private HeistCenter heistCenter; private int spoilSum; private int failureRetryTimes; public Heist(HeistCenter heistCenter){ this.heistCenter = heistCenter; this.heistName = "Heist:"+this.hashCode(); this.spoilSum = this.heistCenter.getSpoilNum() + this.heistCenter.getSpoilBase(); this.failureRetryTimes = this.heistCenter.getProtoConfig().getFailureRetryTimes(); logger = LoggerFactory.getLogger(SystemUtils.getLoggerFormatName(heistName)); } //根据任务数量获取线程数 private int getSpoil(){ int index = this.heistCenter.getNowSpoil().getAndIncrement(); if(index<=spoilSum){ logger.info("{} get the spoil[{}]",heistName,index); } return index; } private boolean handlerSpoil(int index){ logger.info("{} handler the spoil[{}]",heistName,index); return new Random().nextInt(100)+1>80; } private boolean failureRetry(int index,int retryTimes){ logger.info("{} retry the spoil[{}] retryTimes:{}",heistName,index,retryTimes); return this.handlerSpoil(index); } private boolean completeRob(int index){ logger.info("{} complete the spoil[{}]",heistName,index); this.heistCenter.getSpoilsLock().countDown(); return true; } private void failureHandler(int index){ logger.info("{} report failure info: spoil[{}] rob fail",heistName,index); } @Override public void run() { int index = this.getSpoil(); while(index<=spoilSum){ int nowFailureRetryTimes = 0; if(!handlerSpoil(index)){ while((nowFailureRetryTimes++)this.failureRetryTimes){ failureHandler(index); } } completeRob(index); index = this.getSpoil(); } } } ================================================ FILE: TaskJuggler/src/main/java/com/genius/config/HeistConfig.java ================================================ package com.genius.config; import com.genius.constpool.RadiumConstPool; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Configuration; import org.springframework.stereotype.Component; /** * @author Genius * @date 2023/05/09 13:06 **/ @Data @Component @AllArgsConstructor @NoArgsConstructor @ConfigurationProperties(prefix = RadiumConstPool.CONFIG_COMPONENTS_PREFIX+"heist") public class HeistConfig { private Integer failureRetryTimes; private Integer heistNum; private Long exceptionWaitTime; //线程池异常等待时间 private String robTaskName; //执行的任务 } ================================================ FILE: TaskJuggler/src/main/java/com/genius/config/MqConfig.java ================================================ package com.genius.config; import com.genius.pool.MqPool; import org.springframework.amqp.core.*; import org.springframework.boot.autoconfigure.AutoConfigureAfter; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; /** * @author Genius * @date 2023/05/12 18:57 **/ @Configuration @AutoConfigureAfter(SystemConfig.class) public class MqConfig { public static final String TASK_REPLY= String.format("task.%s.reply",SystemConfig.ServiceId); @Bean public DirectExchange nonjronTaskDirectExchange(){ return new DirectExchange(MqPool.EXCHANGE_TOPIC_NONJRON_TASK); } @Bean public Queue taskSendQueue(){ return new Queue(MqPool.MASTER_TASK_SEND_CENTER); } @Bean public Queue taskReplyQueue(){ return new Queue( "task.Nonaron-Kingpin-Prime.reply"); } @Bean public Binding bindingReplyQueue(Queue taskReplyQueue,DirectExchange nonjronTaskDirectExchange){ return BindingBuilder.bind(taskReplyQueue).to(nonjronTaskDirectExchange).with(SystemConfig.ServiceId); } } ================================================ FILE: TaskJuggler/src/main/java/com/genius/core/HeistCenter.java ================================================ package com.genius.core; import com.genius.common.Heist; import com.genius.common.UlfUMC.UlfUMCMessage; import com.genius.config.HeistConfig; import com.genius.config.SystemConfig; import com.genius.mq.Harbor; import com.genius.pool.FunctionNamePool; import com.genius.util.SystemUtils; import lombok.Data; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicInteger; /** * @author Genius * @date 2023/05/08 23:08 **/ @Data @Component //负责劫匪的任务调度分配 public class HeistCenter { @Autowired private Harbor harbor; //港口,负责和master结点进行通信 @Resource private HeistConfig heistConfig; private ExecutorService heistPool; private AtomicInteger nowSpoil; private int spoilNum; //赃物数量 private int spoilBase; //赃物基数 private CountDownLatch spoilsLock; Logger logger = LoggerFactory.getLogger(SystemUtils.getLoggerFormatName("HeistCenter")); public HeistCenter(){ } public HeistCenter(HeistConfig heistConfig){ this.heistConfig = heistConfig; } private void initHeistPool(){ heistPool = Executors.newFixedThreadPool(heistConfig.getHeistNum()); } private boolean init(){ if(heistPool==null){ initHeistPool(); } getSpoil(); if(this.spoilNum<=0){ return false; } nowSpoil = new AtomicInteger(spoilBase); spoilsLock = new CountDownLatch(spoilNum+1); return true; } public void getSpoil(){ //从港口获取任务数量 try { UlfUMCMessage msg; while((msg = harbor.getSpoil(heistConfig.getRobTaskName()))==null); if(msg.getFunction().equals(FunctionNamePool.SHUTDOWN)||msg.getFunction().equals(FunctionNamePool.ERROR)){ spoilNum = -1; } else{ int upLimit = Integer.parseInt(msg.getData().get("upLimit").toString()); int lowLimit = Integer.parseInt(msg.getData().get("lowLimit").toString()); spoilNum = upLimit-lowLimit; spoilBase = lowLimit; } } catch (Exception e) { spoilNum = -1; } } public void start() throws InterruptedException { while (init()) { logger.info("{} Robbing {}[{}~{}]", SystemConfig.ServiceId,heistConfig.getRobTaskName(),spoilBase,spoilBase+spoilNum); for ( int i = 0; i < heistConfig.getHeistNum(); i++ ) { heistPool.submit(new Heist(this)); } this.spoilsLock.await(); } } } ================================================ FILE: TaskJuggler/src/main/java/com/genius/mq/Harbor.java ================================================ package com.genius.mq; import com.genius.common.UlfUMC.*; import com.genius.pool.FunctionNamePool; import com.genius.pool.MqPool; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.amqp.rabbit.annotation.RabbitListener; import org.springframework.amqp.rabbit.core.RabbitTemplate; import org.springframework.context.annotation.Lazy; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.Map; import java.util.Objects; import java.util.concurrent.BlockingDeque; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.TimeUnit; /** * @author Genius * @date 2023/05/12 18:08 **/ @Lazy @Component public class Harbor { @Resource RabbitTemplate mqPublisher; private BlockingDeque messageQueue = new LinkedBlockingDeque<>(); private Logger logger = LoggerFactory.getLogger(""); public void stockWithGoods(String goodsName){ UlfUMCMessage message = MessageFactory.getMessageBuilder(MessageFactory.MessageBuilderType.SLAVE) .method(UlfUMCMessageType.GET) .func(FunctionNamePool.QUERY_TASK_RANGE) .data(Map.of("task", goodsName)).build(); mqPublisher.convertAndSend(MqPool.MASTER_TASK_SEND_CENTER,UlfUMCMessage.encode(message)); } //TODO need MQ confirm to optimize Message robustness @RabbitListener(queues = "task.Nonaron-Kingpin-Prime.reply") private void getSpoilFromMaster(byte[] data){ if(!Objects.isNull(data)){ try { UlfUMCMessage msg = UlfUMCMessage.decode(data); logger.info("Get instructations from the boss :{}",msg); messageQueue.add(msg); }catch (UlfUMCMessageException e){ ErrorMessageBuilder messageBuilder = (ErrorMessageBuilder) MessageFactory.getMessageBuilder(MessageFactory.MessageBuilderType.ERROR); messageQueue.add(messageBuilder.error(e.getMessage()).build()); } } } public UlfUMCMessage getSpoil(String name) throws InterruptedException { UlfUMCMessage msg = messageQueue.poll(2L, TimeUnit.SECONDS); if(msg == null){ stockWithGoods(name); return null; } return msg; } } ================================================ FILE: TestJar.cmd ================================================ PATH=%PATH%D:\ProgramFiles\ToolChains\Java\jdk11x64\bin; START java -Xdebug -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -jar ./Saurons/Shadow/target/shadow-1.2.7.jar --server.port=8000 ================================================ FILE: Walnuts/pom.xml ================================================ sauron com.sauron 1.2.7 4.0.0 com.walnuts walnuts pom 2.5.1 sailor-stream-distribute-sdk ================================================ FILE: Walnuts/sailor-stream-distribute-sdk/pom.xml ================================================ walnuts com.walnuts 2.5.1 org.apache.maven.plugins maven-compiler-plugin 11 11 4.0.0 com.walnut.sailor.sdd sailor-stream-distribute-sdk 2.1.0 org.springframework.boot spring-boot-starter org.springframework.boot spring-boot-starter-test test org.springframework.boot spring-boot-starter-web 2.6.13 compile com.pinecone pinecone 2.5.1 compile com.pinecone.hydra.kernel hydra-framework-runtime 2.1.0 compile com.pinecone.hydra.kernel hydra-message-broadcast 2.1.0 compile com.walnut.sparta.api.uac sparta-api-uac 2.1.0 compile ================================================ FILE: Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/Dummy.java ================================================ package com.walnut.sailor.stream; public class Dummy { } ================================================ FILE: Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/FileMultiDistributionIface.java ================================================ package com.walnut.sailor.stream.fm; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umct.stereotype.Iface; import com.walnut.sailor.stream.fm.protocol.RequestHead; @Iface("com.walnut.sailor.stream.fm.FileMultiDistributionIface") public interface FileMultiDistributionIface extends Pinenut { void startDistribution( RequestHead head, String fileName, String directionRouteToken ); void transmitFileContent( RequestHead head, SFMFileFrame fileContent ); void fileTransmitComplete( RequestHead head ); } ================================================ FILE: Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SFMConfig.java ================================================ package com.walnut.sailor.stream.fm; import com.pinecone.framework.system.prototype.Pinenut; public interface SFMConfig extends Pinenut { int getFileFrameSize(); long getSessionExpiredTimeMillis(); String getFileCloudDistributeTransmitTopic(); String getStorageDirectory(); String getFileServiceTransmitGroup(); } ================================================ FILE: Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SFMDistributionController.java ================================================ package com.walnut.sailor.stream.fm; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.framework.util.StringUtils; import com.pinecone.hydra.umct.AddressMapping; import com.pinecone.hydra.umct.stereotype.Controller; import com.walnut.sailor.stream.fm.event.SFMEventSubscriber; import com.walnut.sailor.stream.fm.protocol.RequestHead; import com.walnut.sailor.stream.fm.session.SFMTransaction; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.file.Path; import java.util.Collection; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @Controller @AddressMapping( "com.walnut.sailor.stream.fm.FileMultiDistributionIface." ) public class SFMDistributionController implements Pinenut { protected Logger logger; protected SessionPhaser sessionPhaser; protected SFMSessionValidator sessionValidator; protected SFMConfig config; protected SingleStreamFileMultiDistributionService distributionService; public SFMDistributionController( SingleStreamFileMultiDistributionService service ) { this.logger = LoggerFactory.getLogger( this.getClass() ); this.distributionService = service; this.config = service.getConfig(); this.sessionPhaser = new SFMSessionPhaser(); this.sessionValidator = new SFMSessionValidator( service ); } protected String queryDirectoryPath( String directionRouteToken ) { String directoryPath = this.config.getStorageDirectory(); if ( StringUtils.isNoneEmpty( directionRouteToken ) ) { String sz = this.distributionService.queryDestinedDirectoryByToken( directionRouteToken ); if ( StringUtils.isNoneEmpty( sz ) ) { directoryPath = sz; } } return directoryPath; } protected Path formatFilePath( long sessionId, String fileName, String directionRouteToken ) { String directoryPath = this.queryDirectoryPath( directionRouteToken ); this.sessionPhaser.registerDestinationDirectory( sessionId, directoryPath ); return Path.of( directoryPath, fileName ); } protected Path formatFilePath( long sessionId, String fileName ) { String directoryPath = this.config.getStorageDirectory(); String sz = this.sessionPhaser.getDestinationDirectory( sessionId ); if ( StringUtils.isNoneEmpty( sz ) ) { directoryPath = sz; } return Path.of( directoryPath, fileName ); } @AddressMapping( "startDistribution" ) public void startDistribution( RequestHead head, String fileName, String directionRouteToken ) throws IOException { if( this.sessionPhaser.getSFMTransaction(head.getSessionId()) != null ){ this.logger.warn( "[Warning] SFMService `startDistribution` session assertion compromised." ); this.sessionPhaser.removeSession( head.getSessionId() ); } this.logger.info( "SFMService invoked `startDistribution`. " ); long sessionId = head.getSessionId(); Path desPath = this.formatFilePath( sessionId, fileName, directionRouteToken ); File newFile = new File( desPath.toString() ); if( newFile.length() != 0 ){ if ( !newFile.delete() ) { throw new IOException( "Purging file has compromised, what => " + fileName ); } this.logger.info( "The destination file (" + fileName + ") exists, and has been successfully eliminated. " ); } SFMTransaction SFMTransaction = new SFMTransaction(); SFMTransaction.setLastEventArrivedMills( System.currentTimeMillis() ); SFMTransaction.finishStartTransmit(); this.sessionPhaser.registerSessionTransaction( sessionId, SFMTransaction); this.sessionPhaser.getSFMTransaction( sessionId ).finishStartTransmit(); this.sessionPhaser.registerFileHandler( sessionId, new RandomAccessFile(newFile, "rw") ); this.logger.info( "SFMService invoked `startDistribution`. " ); } @AddressMapping( "transmitFileContent" ) public void transmitFileContent( RequestHead head, SFMFileFrame fileFrame ) throws IOException { if ( this.assertTransmitTransaction( head, fileFrame.getFileName() ) ){ this.logger.warn( "[Warning] SFMService `transmitFileContent` session assertion compromised." ); return; } this.logger.info( "SFMService invoked `transmitFileContent`. " ); long sessionId = head.getSessionId(); String fileName = fileFrame.getFileName(); Path desPath = this.formatFilePath( sessionId, fileName ); String szDesPath = desPath.toString(); File file = new File( szDesPath ); RandomAccessFile randomAccessFile = this.sessionPhaser.getFileHandler(sessionId); randomAccessFile.seek( fileFrame.getOffset() ); randomAccessFile.write( fileFrame.getBytes() ); this.sessionPhaser.getSFMTransaction( sessionId ).setLastEventArrivedMills( System.currentTimeMillis() ); String desDirectory = this.sessionPhaser.getDestinationDirectory( sessionId ); if( file.length() == fileFrame.getFileSize() ){ randomAccessFile.close(); this.sessionPhaser.removeSession( sessionId ); this.sessionValidator.fileTransmitComplete( head ); this.notifyFileTransmitCompleteEventSubscribers( sessionId, szDesPath, fileName, desDirectory ); } this.logger.info( "SFMService invoked `transmitFileContent`. " ); } protected void notifyFileTransmitCompleteEventSubscribers( long sessionId, String szDesPath, String fileName, String desDirectory ) { Collection subscribers = this.distributionService.fetchFileTransmitCompleteEventSubscribers(); for ( SFMEventSubscriber subscriber : subscribers ) { subscriber.afterEventTriggered( szDesPath, fileName, desDirectory ); } } protected boolean assertTransmitTransaction( RequestHead head, String fileName ) { long sessionId = head.getSessionId(); SFMTransaction transaction = this.sessionPhaser.getSFMTransaction( sessionId ); if( transaction == null ){ this.logger.warn( "[Warning] SFMService `assertTransmitTransaction` session doesn`t existed. " ); this.sessionPhaser.removeSession( sessionId ); return true; } long currentTimeMillis = System.currentTimeMillis(); if( currentTimeMillis - transaction.getLastEventArrivedMills() > this.config.getSessionExpiredTimeMillis() ){ this.logger.warn( "[Warning] SFMService `assertTransmitTransaction` session has expired. " ); this.rollBack( sessionId, fileName ); return true; } if( !transaction.isStartTransmit() ){ this.logger.warn( "[Warning] SFMService `assertTransmitTransaction` illegal transaction stage, which should never has started yet. " ); this.rollBack( sessionId, fileName ); return true; } return false; } protected void rollBack( long sessionId, String fileName ) { this.logger.warn( "[Warning] SFMService `rollBack`. " ); Path desPath = this.formatFilePath( sessionId, fileName ); File file = new File( desPath.toString() ); if ( !file.delete() ) { throw new IllegalStateException( "Purging file has compromised, what :" + fileName ); } this.sessionPhaser.removeSession( sessionId ); this.logger.warn( "[Warning] SFMService `rollBack`. " ); } } ================================================ FILE: Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SFMFileFrame.java ================================================ package com.walnut.sailor.stream.fm; import com.pinecone.framework.system.prototype.Pinenut; public class SFMFileFrame implements Pinenut { private byte[] bytes; private long fileSize; private String fileName; private long offset; private int bufferLength; public SFMFileFrame(){} public SFMFileFrame( byte[] bytes, long fileSize, String fileName, long offset, int bufferLength ){ this.bytes = bytes; this.fileSize = fileSize; this.fileName = fileName; this.offset = offset; this.bufferLength = bufferLength; } public byte[] getBytes(){ return this.bytes; } public void setBytes( byte[] bytes ){ this.bytes = bytes; } public long getFileSize(){ return this.fileSize; } public void setFileSize( long fileSize ){ this.fileSize = fileSize; } public String getFileName(){ return this.fileName; } public void setFileName( String fileName ){ this.fileName = fileName; } public long getOffset(){ return this.offset; } public void setOffset( long offset ){ this.offset = offset; } public long getBufferLength(){ return this.bufferLength; } public void setBufferLength( int bufferLength ){ this.bufferLength = bufferLength; } } ================================================ FILE: Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SFMSessionPhaser.java ================================================ package com.walnut.sailor.stream.fm; import com.walnut.sailor.stream.fm.session.SFMTransaction; import java.io.RandomAccessFile; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; public class SFMSessionPhaser implements SessionPhaser { private ConcurrentMap sessionHandlers; public SFMSessionPhaser() { this.sessionHandlers = new ConcurrentHashMap<>(); } @Override public void registerSessionTransaction( Long sessionId, SFMTransaction transaction ) { PhaseHandler handler = this.sessionHandlers.computeIfAbsent( sessionId, (k)->{ return new PhaseHandler(); } ); handler.sfmTransaction = transaction; } @Override public void registerDestinationDirectory( Long sessionId, String destinationDirectory ) { PhaseHandler handler = this.sessionHandlers.computeIfAbsent( sessionId, (k)->{ return new PhaseHandler(); } ); handler.destinationDirectory = destinationDirectory; } @Override public SFMTransaction getSFMTransaction( Long sessionId ) { PhaseHandler handler = this.sessionHandlers.get( sessionId ); if ( handler != null ) { return handler.sfmTransaction; } return null; } @Override public String getDestinationDirectory( Long sessionId ) { PhaseHandler handler = this.sessionHandlers.get( sessionId ); if ( handler != null ) { return handler.destinationDirectory; } return null; } @Override public void removeSession( Long sessionId ) { this.sessionHandlers.remove( sessionId ); } @Override public void registerFileHandler( Long sessionId, RandomAccessFile randomAccessFile ) { PhaseHandler handler = this.sessionHandlers.computeIfAbsent( sessionId, (k)->{ return new PhaseHandler(); } ); handler.fileHandler = randomAccessFile; } @Override public RandomAccessFile getFileHandler( Long sessionId ) { PhaseHandler handler = this.sessionHandlers.get( sessionId ); if ( handler != null ) { return handler.fileHandler; } return null; } public static class PhaseHandler { public SFMTransaction sfmTransaction; public RandomAccessFile fileHandler; public String destinationDirectory; } } ================================================ FILE: Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SFMSessionValidator.java ================================================ package com.walnut.sailor.stream.fm; import com.pinecone.hydra.umb.broadcast.BroadcastControlProducer; import com.walnut.sailor.stream.fm.protocol.RequestHead; public class SFMSessionValidator implements SessionValidator { protected BroadcastControlProducer producer; protected SessionValidator sessionValidator; protected SingleStreamFileMultiDistributionService distributionService; public SFMSessionValidator( SingleStreamFileMultiDistributionService service ) { this.producer = service.getTransmitProducer(); this.distributionService = service; this.sessionValidator = this.producer.getIface( SessionValidator.class, this.distributionService.getConfig().getFileCloudDistributeTransmitTopic() ); } @Override public void fileTransmitComplete( RequestHead head ) { this.sessionValidator.fileTransmitComplete( head ); } } ================================================ FILE: Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SFMSessionValidatorController.java ================================================ package com.walnut.sailor.stream.fm; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umct.AddressMapping; import com.pinecone.hydra.umct.stereotype.Controller; import com.walnut.sailor.stream.fm.protocol.RequestHead; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @Controller @AddressMapping( "com.walnut.sailor.stream.fm.SessionValidator." ) public class SFMSessionValidatorController implements Pinenut { protected Logger logger; public SFMSessionValidatorController(){ this.logger = LoggerFactory.getLogger( this.getClass() ); } @AddressMapping( "fileTransmitComplete" ) public void fileTransmitComplete( RequestHead head ){ this.logger.info( "FileTransmitComplete sessionId:" + head.getSessionId() ); } } ================================================ FILE: Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SailorFMConfig.java ================================================ package com.walnut.sailor.stream.fm; import java.util.Map; public class SailorFMConfig implements SFMConfig { protected String mszStorageDirectory; protected int mnFileFrameSize; protected long mnSessionExpiredTimeMillis; protected String mszFileCloudDistributeTransmitTopic; protected String mszFileServiceTransmitGroup; public SailorFMConfig ( Map configMap ) { this.mnFileFrameSize = ( (Number)configMap.get("fileFrameSize") ).intValue(); this.mnSessionExpiredTimeMillis = ( (Number)configMap.get("sessionExpiredTimeMillis") ).longValue(); this.mszFileCloudDistributeTransmitTopic = (String) configMap.get("fileCloudDistributeTransmitTopic"); this.mszFileServiceTransmitGroup = (String) configMap.get("fileServiceTransmitGroup"); this.mszStorageDirectory = (String) configMap.get("storageDirectory"); } @Override public int getFileFrameSize() { return this.mnFileFrameSize; } @Override public long getSessionExpiredTimeMillis() { return this.mnSessionExpiredTimeMillis; } @Override public String getFileCloudDistributeTransmitTopic() { return this.mszFileCloudDistributeTransmitTopic; } @Override public String getStorageDirectory() { return this.mszStorageDirectory; } @Override public String getFileServiceTransmitGroup() { return this.mszFileServiceTransmitGroup; } } ================================================ FILE: Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SailorFMDistributionService.java ================================================ package com.walnut.sailor.stream.fm; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer; import com.pinecone.hydra.umb.broadcast.BroadcastControlProducer; import com.pinecone.hydra.umb.wolf.UlfBroadcastControlNode; import com.walnut.sailor.stream.fm.event.SFMEventSubscriber; import com.walnut.sailor.stream.fm.protocol.RequestHead; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; public class SailorFMDistributionService implements SingleStreamFileMultiDistributionService { protected UlfBroadcastControlNode transmitClient; protected BroadcastControlProducer transmitProducer; protected BroadcastControlConsumer transmitConsumer; protected SFMConfig config; protected Map directionRoute; protected List fileTransmitCompleteEventSubscribers; public SailorFMDistributionService( UlfBroadcastControlNode client, SFMConfig config ) { this.transmitClient = client; this.config = config; this.directionRoute = new ConcurrentHashMap<>(); this.fileTransmitCompleteEventSubscribers = new ArrayList<>(); } @Override public SingleStreamFileMultiDistributionService registerFileTransmitCompleteEventSubscriber( SFMEventSubscriber subscriber ) { if ( this.hasStarted() ) { throw new IllegalStateException( "FileMultiDistributionService has already started." ); } this.fileTransmitCompleteEventSubscribers.add( subscriber ); return this; } @Override public SingleStreamFileMultiDistributionService deregisterFileTransmitCompleteEventSubscriber( SFMEventSubscriber subscriber ) { if ( this.hasStarted() ) { throw new IllegalStateException( "FileMultiDistributionService has already started." ); } this.fileTransmitCompleteEventSubscribers.remove( subscriber ); return this; } @Override public Collection fetchFileTransmitCompleteEventSubscribers() { return this.fileTransmitCompleteEventSubscribers; } @Override public UlfBroadcastControlNode getTransmitClient() { return this.transmitClient; } @Override public BroadcastControlConsumer getTransmitConsumer() { return this.transmitConsumer; } @Override public BroadcastControlProducer getTransmitProducer() { return this.transmitProducer; } @Override public String queryDestinedDirectoryByToken( String token ) { return this.directionRoute.get( token ); } @Override public void registerDirectionRoute( String token, String directoryPath ) { this.directionRoute.put( token, directoryPath ); } @Override public void deregisterDirectionRoute( String token ) { this.directionRoute.remove( token ); } @Override public boolean hasStarted() { return this.transmitProducer != null; } @Override public void start() throws UMBServiceException { if ( !this.hasStarted() ) { this.transmitProducer = this.transmitClient.createBroadcastControlProducer(); this.transmitConsumer = this.transmitClient.createBroadcastControlConsumer( this.config.getFileCloudDistributeTransmitTopic(), this.config.getFileServiceTransmitGroup() ); this.transmitProducer.compile( FileMultiDistributionIface.class,false ); this.transmitProducer.compile( SessionValidator.class, false ); this.transmitConsumer.registerController( new SFMDistributionController( this ) ); this.transmitConsumer.start(); this.transmitProducer.start(); } } @Override public void shutdown() { if ( this.hasStarted() ) { this.transmitConsumer.close(); this.transmitProducer.close(); this.transmitConsumer = null; this.transmitProducer = null; } } @Override public SFMConfig getConfig() { return this.config; } @Override public void distributeFile( File file, String directionRouteToken ) throws IOException { FileMultiDistributionIface distributionIface = this.transmitProducer.getIface( FileMultiDistributionIface.class, this.config.getFileCloudDistributeTransmitTopic() ); RequestHead head = RequestHead.newRequest().setSessionId(System.currentTimeMillis()); distributionIface.startDistribution( head, file.getName(), directionRouteToken ); long fileSize = file.length(); try ( FileInputStream fileInputStream = new FileInputStream(file) ) { int bufferSize = this.config.getFileFrameSize(); byte[] buffer = new byte[ bufferSize ]; int bytesRead; long currentPosition = 0; while ( (bytesRead = fileInputStream.read(buffer)) != -1 ) { byte[] dataChunk = bytesRead == bufferSize ? buffer : Arrays.copyOf(buffer, bytesRead); distributionIface.transmitFileContent(head, new SFMFileFrame( dataChunk, fileSize, file.getName(), currentPosition, bytesRead) ); currentPosition += bytesRead; } } } @Override public void distributeFile( String szFileName, String originalDirectory, String directionRouteToken ) throws IOException { Path targetPath = Path.of( originalDirectory, szFileName ); File file = new File( targetPath.toString() ); this.distributeFile( file, directionRouteToken ); } /*@Override public void fileDistributionJar(File file, String topic) throws IOException { if (isJarFile(file)) { stopCurrentJarProcess(); cleanExistingBackup(); // 清理旧备份 File backupFile = createBackup(file); // 创建新备份 this.fileDistribution(backupFile, topic); this.currentJarFile = backupFile; startJarProcess(this.currentJarFile); } else { this.fileDistribution(file, topic); } } private File createBackup(File jarFile) throws IOException { File backupDir = new File(BACKUP_DIR); if (!backupDir.exists() && !backupDir.mkdirs()) { throw new IOException("无法创建备份目录: " + BACKUP_DIR); } String baseName = jarFile.getName().replaceFirst("\\.jar$", ""); File backupFile = new File(backupDir, baseName + BACKUP_SUFFIX); // 覆盖 try ( InputStream in = new FileInputStream(jarFile); OutputStream out = new FileOutputStream(backupFile, false)) { byte[] buffer = new byte[1024 * 1024]; int bytesRead; while ((bytesRead = in.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } System.out.println("已更新备份文件: " + backupFile.getAbsolutePath()); return backupFile; } } private void cleanExistingBackup() { if (currentJarFile != null && currentJarFile.exists()) { try { Files.delete(currentJarFile.toPath()); System.out.println("已清理旧文件: " + currentJarFile.getName()); } catch (IOException e) { System.err.println("清理旧文件失败: " + e.getMessage()); } } } private boolean isJarFile(File file) { return file != null && file.isFile() && file.getName().toLowerCase().endsWith(".jar"); } private void stopCurrentJarProcess() { if (currentJarProcess != null) { currentJarProcess.destroyForcibly(); try { if (!currentJarProcess.waitFor(10, TimeUnit.SECONDS)) { System.err.println("警告: 进程终止超时"); } } catch (InterruptedException e) { Thread.currentThread().interrupt(); System.err.println("进程终止被中断"); } currentJarProcess = null; } } private void startJarProcess(File jarFile) throws IOException { if (!jarFile.exists()) { throw new FileNotFoundException("JAR文件不存在: " + jarFile.getAbsolutePath()); } String javaPath = System.getProperty("java.home") + File.separator + "bin" + File.separator + "java"; ProcessBuilder processBuilder = new ProcessBuilder(javaPath, "-jar", jarFile.getAbsolutePath()); processBuilder.redirectErrorStream(true); try { this.currentJarProcess = processBuilder.start(); new Thread(() -> readStream(currentJarProcess.getInputStream())).start(); System.out.println("已启动最新版本: " + jarFile.getName()); } catch (IOException e) { throw new IOException("进程启动失败: " + e.getMessage(), e); } } private void readStream(InputStream inputStream) { try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream))) { String line; while ((line = reader.readLine()) != null) { System.out.println("[JAR输出] " + line); } } catch (IOException e) { System.err.println("输出读取错误: " + e.getMessage()); } }*/ } ================================================ FILE: Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SessionPhaser.java ================================================ package com.walnut.sailor.stream.fm; import com.pinecone.framework.system.prototype.Pinenut; import com.walnut.sailor.stream.fm.session.SFMTransaction; import java.io.RandomAccessFile; public interface SessionPhaser extends Pinenut { void registerSessionTransaction( Long sessionId, SFMTransaction SFMTransaction ); SFMTransaction getSFMTransaction( Long sessionId ); void registerDestinationDirectory( Long sessionId, String destinationDirectory ); String getDestinationDirectory( Long sessionId ); void removeSession( Long sessionId ); void registerFileHandler( Long sessionId, RandomAccessFile randomAccessFile ); RandomAccessFile getFileHandler( Long sessionId ); } ================================================ FILE: Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SessionValidator.java ================================================ package com.walnut.sailor.stream.fm; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umct.stereotype.Iface; import com.walnut.sailor.stream.fm.protocol.RequestHead; @Iface public interface SessionValidator extends Pinenut { void fileTransmitComplete( RequestHead head ); } ================================================ FILE: Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SingleStreamFileMultiDistributionService.java ================================================ package com.walnut.sailor.stream.fm; import java.io.File; import java.io.IOException; import java.util.Collection; import com.pinecone.framework.system.prototype.Pinenut; import com.pinecone.hydra.umb.UMBServiceException; import com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer; import com.pinecone.hydra.umb.broadcast.BroadcastControlProducer; import com.pinecone.hydra.umb.wolf.UlfBroadcastControlNode; import com.walnut.sailor.stream.fm.event.SFMEventSubscriber; public interface SingleStreamFileMultiDistributionService extends Pinenut { void distributeFile( File file, String directionRouteToken ) throws IOException; void distributeFile( String szFileName, String originalDirectory, String directionRouteToken ) throws IOException; boolean hasStarted(); void start() throws UMBServiceException; void shutdown(); SFMConfig getConfig(); UlfBroadcastControlNode getTransmitClient() ; BroadcastControlConsumer getTransmitConsumer() ; BroadcastControlProducer getTransmitProducer() ; String queryDestinedDirectoryByToken( String token ); void registerDirectionRoute( String token, String directoryPath ); void deregisterDirectionRoute( String token ); SingleStreamFileMultiDistributionService registerFileTransmitCompleteEventSubscriber( SFMEventSubscriber subscriber ); SingleStreamFileMultiDistributionService deregisterFileTransmitCompleteEventSubscriber( SFMEventSubscriber subscriber ); Collection fetchFileTransmitCompleteEventSubscribers(); } ================================================ FILE: Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/event/SFMEventSubscriber.java ================================================ package com.walnut.sailor.stream.fm.event; import com.pinecone.framework.system.prototype.Pinenut; public interface SFMEventSubscriber extends Pinenut { void afterEventTriggered( String path, String fileName, String directoryPath ) ; } ================================================ FILE: Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/protocol/RequestHead.java ================================================ package com.walnut.sailor.stream.fm.protocol; public class RequestHead { protected long sessionId; public RequestHead setSessionId(long sessionId ) { this.sessionId = sessionId; return this; } public long getSessionId() { return this.sessionId; } public static RequestHead newRequest() { return new RequestHead(); } } ================================================ FILE: Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/protocol/UFMCFileMeta64.java ================================================ package com.walnut.sailor.stream.fm.protocol; import com.pinecone.framework.system.prototype.Pinenut; public class UFMCFileMeta64 implements Pinenut { protected String sourceName; public UFMCFileMeta64(){} public UFMCFileMeta64( String sourceName ){ this.sourceName = sourceName; } public String getSourceName(){ return this.sourceName; } public void setSourceName( String sourceName ){ this.sourceName = sourceName; } } ================================================ FILE: Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/session/SFMTransaction.java ================================================ package com.walnut.sailor.stream.fm.session; import java.util.concurrent.atomic.AtomicBoolean; import com.pinecone.framework.system.prototype.Pinenut; public class SFMTransaction implements Pinenut { protected AtomicBoolean startTransmit; protected AtomicBoolean transmitFileContent; protected AtomicBoolean fileDistributionComplete; protected long lastEventArrivedMills; public SFMTransaction() { this.startTransmit = new AtomicBoolean(false); this.transmitFileContent = new AtomicBoolean(false); this.fileDistributionComplete = new AtomicBoolean(false); } public long getLastEventArrivedMills() { return this.lastEventArrivedMills; } public void setLastEventArrivedMills( long lastEventArrivedMills ) { this.lastEventArrivedMills = lastEventArrivedMills; } public boolean finishStartTransmit() { return this.startTransmit.compareAndSet(false, true); } public boolean finishTransmitFileContent() { return this.transmitFileContent.compareAndSet(false, true); } public boolean finishFileDistributionComplete() { return this.fileDistributionComplete.compareAndSet(false, true); } public boolean isStartTransmit() { return this.startTransmit.get(); } public boolean isTransmitFileContent() { return this.transmitFileContent.get(); } public boolean isFileDistributionComplete() { return this.fileDistributionComplete.get(); } } ================================================ FILE: gitignore.txt ================================================ # Compiled class file *.class # Log file *.log # BlueJ files *.ctxt # Mobile Tools for Java (J2ME) .mtj.tmp/ # Package Files # *.jar *.war *.nar *.ear *.zip *.tar.gz *.rar # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml hs_err_pid* replay_pid* ================================================ FILE: pom.xml ================================================ 4.0.0 com.sauron sauron pom 1.2.7 pinecones hydra saurons sparta walnuts odin archcraft redqueen skynet org.springframework.boot spring-boot-starter-parent 2.4.1 11 11 UTF-8 2020.0.3 3.0.1 8.0.23 2.1.1 2.2.5.RELEASE 2.3.9.RELEASE 2.3.9.RELEASE 2.3.9.RELEASE 3.4.1 6.1.6.RELEASE 2.9.0 1.2.75 org.springframework.cloud spring-cloud-dependencies ${spring-cloud.version} pom import org.springframework.cloud spring-cloud-starter-bootstrap ${spring-cloud-start.version} pom import com.alibaba.cloud spring-cloud-alibaba-dependencies ${nacos.version} pom import mysql mysql-connector-java ${mysql.version} org.springframework.boot spring-boot-starter-data-redis spring-data-redis org.springframework.data lettuce-core io.lettuce ${redis.version} org.springframework.boot spring-boot-starter-test ${boot.version} org.mybatis.spring.boot mybatis-spring-boot-starter ${mybatis.version} com.baomidou mybatis-plus-boot-starter ${mybatis-plus.version} org.springframework.boot spring-boot-starter-amqp 2.3.9.RELEASE io.lettuce lettuce-core ${io-lettcue.version} org.apache.commons commons-pool2 ${commons-pool2.version} com.alibaba fastjson ${fastjson.version} org.springframework.cloud spring-cloud-context 3.0.3 org.springframework.cloud spring-cloud-commons 3.0.3 org.projectlombok lombok org.aspectj aspectjweaver com.google.protobuf protobuf-java 4.28.2 ================================================ FILE: prompt/base_front_standard.md ================================================ # 编码规范 本 Skill 对 vue 都生效,以下是核心规范: 1. 纯中文展示:所有标题、描述、分类均使用流畅的中文,严禁出现“中文(英文)”的括号备注形式。 2. 独立界面架构:摒弃传统的“面包屑导航”和“弹窗对话框”。采用全屏分块式布局或侧边栏联动式单页应用结构。 3. 每个技能模块都是一个独立的视觉区域,点击后通过平滑滚动或视图切换直接展示详情,而非弹出窗口对话框。 ================================================ FILE: prompt/coding_standard.md ================================================ # 编码规范 本 Skill 对 C/C++、Java、ECMAScript 都生效,以下是核心风格规范: 1. 不要省略任何 this,无论是函数调用还是成员变量访问等。 2. 不要省略任何括号,例如: ```java switch ( exp ) { case 1: { doSomething(); break; } default: { doSomethingElse(); } } ``` 3. 除了泛型括号之外(),其他所有括号前后都要有空格,例如: 3.1. if、for、while、switch 等控制流语句的括号前后都要有空格,例如: ```java if ( 2 * ( condition[ "key" ] + 4 ) ) { doSomething(); } ``` 4. 对于代码单行不超过140字符时,不要换行(字符串场景除外) 5. 对于 C/C++、Java使用如下匈牙利命名法(除非上下文禁用): 5.1. 成员变量(基本数据类型需要叠加): mMember,mszString,mnNumber,mbFlag 5.2. 字符串: szString,const char*: lpszString 5.3. 任意数字(integer, decimal): nNumber 5.4. 逻辑(bool): bFlag 5.5. 结构体、对象: DataStruct dataStruct, mDataStruct(成员态) 全称即可,不用额外叠加 5.6. 指针: pDataStruct, lpDataStruct 5.7. 临时变量无须使用匈牙利命名法 6. 抽象类: 模板模式下强制使用:ArchClass(Archetypic Class) 其他场景下使用:AbstractClass 7. 对齐,例如: ``` public class MyClass { private int mMemberVariable = XXX; private String mszStringVariable = XX ; } function() { var i = 1234; var sz = "Hello World"; } ``` 8. 单词: 8.1 接口默认实现使用 Generic,除非明确给出。 8.2 getOrCreate 语义统一使用 affirm,如affirmPath( String szPath ) 8.3 子类对基类映射实现,统一使用 evince,例如: ```java public interface ElementNode extends TaskTreeNode { default AppElement evinceAppElement() { return null; } default TaskElement evinceTaskElement() { return null; } } public interface TaskElement extends ElementNode { @Override default TaskElement evinceTaskElement() { return this; } } ``` ================================================ FILE: prompt/mysql_table_standard.md ================================================ # MYSQL 建表与SQL编码规范 本 Skill 对MYSQL生效,以下是核心风格规范: ## 目标 建立统一、可审计、可扩展、适合高并发 + 分库分表演进的 MySQL DDL 风格。 ## 1. 标准结构模板(强制规范) ```sql CREATE TABLE `{{table_name}}` ( `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '主键ID', -- 业务字段写在此处 `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', PRIMARY KEY (`id`) USING BTREE ) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=DYNAMIC; ``` ### 2. 时间字段规范(强制) 所有业务表必须包含: ```sql create_time update_time ``` --- ### 3. 存储引擎规范 必须: ``` ENGINE=InnoDB ``` ### 4. 字符集规范 默认都使用 utf8,内容类大text字段使用utf8mb4 ### 5. 所有字段必须带 COMMENT 禁止无注释字段 ### 6. SQL 语法 6.1 禁用 SELECT * 6.2 所有表别名不允许使用单字母 6.3 表别名可以使用别名缩写,不要使用全名,此外对于非联合查询无需使用别名和前缀表名。 6.4 关键字大写。 6.5 冲突Key使用字符'\`' 如 '`key`' 标记。 e.g. ```sql SELECT `key` FROM table WHERE id > 1234; ``` ```sql SELECT t1.`key`, t2.`k2` FROM table as t1 LEFT JOIN table2 as t2 ON t1.x = t2.x WHERE t1.id > 1234; ``` ### 7. Ibatis 7.1 Param 不要使用匈牙利命名法,如 @Param("guid") GUID guid 7.2 GUID直接用GUID,如 @Param("guid") GUID guid 7.3 XML 中特殊字符使用如: ================================================ FILE: system/setup/CenterMessagram.json5 ================================================ { "Engine" : "com.pinecone.tritium.messagron.Messagron", "Enable" : true, "ExpressFactory" : "com.pinecone.framework.util.lang.GenericDynamicFactory", "Expresses" : { "WolfMCExpress": { "Engine": "com.pinecone.hydra.umct.WolfMCExpress" } } } ================================================ FILE: system/setup/ServersScope.json5 ================================================ { "CentralCluster": { "Kingpin": { // Intranet out-bound master "Name" : "BeanServerKingpin", "NickName" : "SerKingpin", "LocalDomain" : "B-ServerKingpin", "WideDomain" : "", "Location" : "LocalNetwork", "Enable" : true, "Station" : "Master", "DevArchitecture" : [ "CPU", "MEM", "SSD", "HDD" ] }, "Paladin": [ // Intranet sub-net master { "Name" : "BeanServerPaladinMasterTask", "NickName" : "SerPaladinMT", "LocalDomain" : "B-ServerPaladin", "WideDomain" : "", "Location" : "LocalNetwork", "Enable" : true, "Station" : "MasterTask", "DevArchitecture" : [ "CPU", "MEM", "SSD", "HDD" ] }, { "Name" : "BeanServerPaladinMasterProcess", "NickName" : "SerPaladinMP", "LocalDomain" : "Bean-PC-PH317", "WideDomain" : "", "Location" : "LocalNetwork", "Enable" : true, "Station" : "MasterProcess", "DevArchitecture" : [ "CPU", "MEM", "GPU", "NVMe", "SSD" ] } ], "Minion": [ { "Name" : "BeanServerNutRoot", "NickName" : "SerNutRoot", "LocalDomain" : "B-ServerNutRoot", "WideDomain" : "root.nutgit.com", "Location" : "WideNetwork", "Enable" : true, "Station" : "MasterOutBoundRouter", "DevArchitecture" : [ "NET" ] }, { "Name" : "BeanServerNutNode1", "NickName" : "SerNutNode1", "LocalDomain" : "B-ServerNutNode1", "WideDomain" : "node1.nutgit.com", "Location" : "WideNetwork", "Enable" : true, "Station" : "AuxiliaryOutBoundRouter", "DevArchitecture" : [ "NET" ] } ], "Slave": [ ] }, "EdgeChains": { } } ================================================ FILE: system/setup/SpringBootApplication.json5 ================================================ { "server": { "port": 8080, "servlet": { "context-path": "/" } }, "spring": { "servlet": { "multipart": { "max-file-size": "4096MB", "max-request-size": "4096MB" } }, "datasource": { // "url": "jdbc:mysql://localhost:3306/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true", // "username": "root", // "password": "123456", "url": "jdbc:mysql://b-serverkingpin:33062/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true&autoReconnect=true", "username": "root", "password": "", "driver-class-name": "com.mysql.cj.jdbc.Driver", }, "mybatis":{ "mapper-locations": "classpath:mapper/*.xml" }, "redis": { "host": "localhost", "port": 6379, "password": "Genius123", "lettuce": { "pool": { "max-active": 8, "max-idle": 8, "min-idle": 0, "max-wait": 100, "time-between-eviction-runs": "10s" } } }, "rabbitmq": { "host": "node1.nutgit.com", // 主机名 "port": 13394, // 端口 "virtual-host": "/wolf", // 虚拟主机 "username": "test", // 用户名 "password": "test" // 密码 }, "sparta": { "datasource": "mysql" } } } ================================================ FILE: system/setup/StorageSystem.json5 ================================================ { "PathScope" : { "Reinterpret": { "ARBOmnium" : "\\\\${SerKingpin}\\ARBOmnium", "OmniumEnderChest" : "${ARBOmnium}/EnderChest", "OmniumFacility" : "${OmniumEnderChest}/Facility", "OmniumMegaH" : "${OmniumEnderChest}/MegaH", "OmniumKingpin0" : "${OmniumEnderChest}/Kingpin0", "OmniumKingStream0" : "${OmniumEnderChest}/SauronL3Stream", "PaladinHive" : "\\\\${SerPaladinMT}\\PaladinHive", "EnderPaladin" : "${PaladinHive}/EnderChest", "PalSauotion0" : "${EnderPaladin}/Sauotion0", "PalSauotion1" : "${EnderPaladin}/Sauotion1", "PalSaurmion2" : "${EnderPaladin}/Saurmion2", "PalSauotion3" : "${EnderPaladin}/Sauotion3", "PalSaurtion4" : "${EnderPaladin}/Saurtion4", "PalSauotion5" : "${EnderPaladin}/Sauotion5", "PalSauegaion0" : "${EnderPaladin}/Sauegaion0", "PalSauegaion1" : "${EnderPaladin}/Sauegaion1", "SystemConfRoot" : "./system/setup" }, }, "Protocols" : { "files" : { "Provide": "" }, // Default local filesystem. "http" : { "Provide": "org.apache.commons.vfs2.provider.http5.Http5FileProvider" }, "https" : { "Provide": "org.apache.commons.vfs2.provider.http5s.Http5sFileProvider" }, "smb" : { "Provide": "" }, // Using windows. "webdav" : { "Provide": "org.apache.commons.vfs2.provider.webdav.WebdavFileProvider" }, "hdfs" : { "Provide": "" }, // TODO }, "CacheStrategy" : "ON_CALL", "FilesCache" : "org.apache.commons.vfs2.cache.SoftRefFilesCache", } ================================================ FILE: system/setup/config.json5 ================================================ { "System" : { "MinionName" : "ShadowKingpin", "ServiceID" : "Shadow-Kingpin-Prime", "ServiceArch" : "Master", // Master, Paladin, Minion, Slave "MasterQuery" : false, "DebugMode" : true, // For, C/C++ Edition, Java todo. "Tracer" : { "ConsoleTrace" : true, "InfoTracer" : "./system/logs/${pid}_SysTrace.log", "ErrTracer" : "./system/logs/${pid}_SysError.log", }, "Middleware" : { "RDBs": { "Configs": { "Engine": "com.pinecone.tritium.ally.rdb.GenericIbatisClient", "Enable": true, "JDBC": { "Driver" : "com.mysql.cj.jdbc.Driver", "ExURL" : "autoReconnect=true" }, "Ibatis": { "Environment" : "development", "DataSource" : "org.apache.ibatis.datasource.pooled.PooledDataSource", "TransactionFactory": "org.apache.ibatis.transaction.jdbc.JdbcTransactionFactory", "PooledConfig" : { "InitialSize": 0, "MaxActive" : 20, "MaxIdle" : 20, "MinIdle" : 1, "MaxWait" : 60000 }, "DataAccessObject" : { "Scanner" : "com.pinecone.slime.jelly.source.ibatis.IbatisDAOScanner", "ScanScopes" : [] } } }, "Databases": { "MySQLKingSystem": { "dbType" : "mysql", "host" : "b-serverkingpin", "username" : "root", "password" : "", "database" : "hydranium", "port" : 3306, "charset" : "utf8", "tablePrefix": "nona_", "Enable" : true }, "MySQLKingHydranium": { "dbType" : "mysql", "host" : "b-serverkingpin", "username" : "root", "password" : "", "database" : "hydranium", "port" : 33062, "charset" : "utf8", "tablePrefix": "nona_", "Enable" : true }, "MySQLKingData0": { "dbType" : "mysql", "host" : "b-serverkingpin", "username" : "root", "password" : "", "database" : "nonaron", "port" : 33062, "charset" : "utf8", "tablePrefix": "nona_", "Enable" : true } } }, "Indexables": { "Configs": { }, "Databases": { "RedisKingpin": { "host" : "b-serverkingpin", "password" : "", "port" : 6379, "Enable" : true } } }, "Messagers": { "Configs": { "Enable" : true, "IsRecipient" : false, // Is recipient or server. "AutoStartInMasterMode" : false // For client or messenger. }, "Messagers": { "CenterMessagram" : "./system/setup/CenterMessagram.json5", "RabbitMQKingpin" : { "host" : "b-serverkingpin", "username" : "undefined", "password" : "", "port" : 5672, "vhost" : "/wolf", "frame_max" : 131072, "channel_max" : 0, "heartbeat" : 0, "Enable" : false }, "WolfKing" : { // For RPC-Server "host" : "0.0.0.0", "port" : 5777, "username" : "undefined", // TODO, UMCs "password" : "", // TODO, UMCs "SocketTimeout" : 800, "KeepAliveTimeout" : 3, //3600, "MaximumConnections" : 1e6, "ExtraHeadCoder" : "com.pinecone.hydra.umc.msg.extra.GenericExtraHeadCoder", "DefaultExtraEncode" : "JSONString", // JSONString / Binary "Engine" : "com.pinecone.hydra.umc.wolf.server.WolfMCServer", "MessageHandler" : "WolfMCExpress", "IsRecipient" : true, "Enable" : false }, "WolfMCKingpin" : { // For RPC-Client "host" : "localhost", "port" : 5777, "SocketTimeout" : 800, "KeepAliveTimeout" : 3, //3600, // 0 to close keep-alive "ParallelChannels" : 5, "ExtraHeadCoder" : "com.pinecone.hydra.umc.msg.extra.GenericExtraHeadCoder", "DefaultExtraEncode" : "JSONString", // JSONString / Binary "Engine" : "com.pinecone.hydra.umc.wolf.client.WolfMCClient", "MessageHandler" : "WolfMCExpress", "AutoReconnect" : true, "EnableHeartbeat" : false, "HeartbeatInterval" : 2000, "Enable" : true } } } }, "Servers" : "./system/setup/ServersScope.json5", "StorageSystem" : "./system/setup/StorageSystem.json5", "WolfKingOFS" : { // TODO, OFS "ParentServiceID" : "", // "" is the root server service(Kingpin), else is the paladin server service. "EnableWolfUMService" : true, "Port" : 7577, "KeepAliveTimeout" : 3600, // 0 to close keep-alive "MaximumClients" : 10000, "MaxSoloQueue" : 100, // Inner solo task-thread maximum MessageQueue length. "UIOFS" : { // Ulfhedinn Unified Interface OSS file system "EnableDirectPost" : true, // Enable any-size file frame, e.g. Tiny 4K file (1MB <). "FileFrameSize" : 1048576, // Multiple files of FileFrame size 1 MB "FilePagedPoolSize" : 104857600, // FileBufferPagedPool:: Buffered file caches size(100 MB) in resident memory waiting for store in disk. "ForceBufferedWrite" : false // Any disk-IO must store in resident memory first waiting for write. } }, "SystemDaemon" : { "SurveillanceTime": 1000 }, "SystemCabinet" : { "MicroSystems": { //"Crusade": "./system/setup/cabinet/Crusade.json5" } } }, "MasterOrchestrator": { "Orchestration" : { "Name": "ServgramOrchestrator", "Type": "Parallel", // Enum: { Sequential, Parallel, Loop } // Servgram-Classes scanning package-scopes "ServgramScopes": [ "com.sauron.heist.heistron" ], "Transactions": [ { "Name": "Heist", "Type": "Sequential" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/, "Primary": true } ] }, "Servgrams": { "Heist": "./system/setup/heist.json5", "Sparta": "./system/setup/SpringBootApplication.json5", "SpartaAccountService": "./system/setup/sparta/AccountServiceSpring.json5", "SpartaUCDNService": "./system/setup/sparta/SpartaUCDNService.json5", } } } ================================================ FILE: system/setup/heist.json5 ================================================ { "ExertAffinity" : "SerKingpin", // Which server to execute, if is in joint-mission which must located at ONLY SerKingpin. "TraceLifecycle" : true, "EnableCmdCall" : true, // Designate with startup command: --heist==Heist "Orchestration" : { "Name": "HeistronOrchestrator", "Type": "Parallel", // Enum: { Sequential, Parallel, Loop } "DirectlyLoad" : { "Prefix": [], "Suffix": [ "Heist" ] }, "ServgramScopes": [ "com.sauron.shadow.heists", "com.sauron.zeron.chronicle", "com.sauron.crusade.heists", "com.sauron.zeron.heists" ], "Transactions": [ //{ "Name": "heist_intel_personage_news", "Type": "Parallel" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ }, //{ "Name": "FuturesDelivery", "Type": "Parallel" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ }, //{ "Name": "TradingChronicle", "Type": "Parallel" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ }, //{ "Name": "FestivalEvent", "Type": "Parallel" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ }, //{ "Name": "FiscalEvent", "Type": "Parallel" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ }, //{ "Name": "Chronicle", "Type": "Parallel" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ }, //{ "Name": "Prometheus", "Type": "Parallel" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ }, { "Name": "Void", "Type": "Sequential" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ }, //{ "Name": "Void", "Type": "Sequential" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ }, //{ "Name": "Void", "Type": "Sequential" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ } ] }, "ConfigScope" : { "LocalConfigs": { "PathScopes": [ "${SystemConfRoot}/heists/", "${SystemConfRoot}/chronicle/" ], "FileExtends": [ "json5", "jplus", "json" ] } }, "TemplatedConfig" : { "HeistArch" : "Radium", "HeistType" : "Templated", "HeistURL" : "", "StorageDriver" : "${OmniumKingpin0}", "IndexPath" : "${this.DriverPath}Sauron\\${ProjectName}\\index\\", "SpoilPath" : "${this.DriverPath}Sauron\\${ProjectName}\\Pages\\", "FragBase" : 0, "FragRange" : 100, "WorkingPath" : ".\\Sauron\\Heist\\${ProjectName}\\", "InfoTracer" : "heistInfo.log", "ErrTracer" : "heistErr.log", "SysTracer" : "heistSys.log", "DyingMsgFile" : "dyingMsg.json5", // Enable full-status tombstone dying msg. Empty string to close. "MasterConfQuery" : false, // Enable Task::ConfQuery from master server, else use next arguments. "TaskFrom" : 0, "TaskTo" : 0, "MaximumThread" : 5, "ReaverTasks" : 10000, "EnableRawIdQuery" : false, // 'True' to open the MutualIndexFile. "MutualIndexFile" : "index_list.json5", "FromDeathPoint" : true, "Metier" : "Stalker", // Reaver(To loot), Stalker(To search index), Embezzler(To profile and store spoils) "FailureConf" : { "FailedFileSize" : 2000, "FileRetrieveTime" : 3 // 1 for no retrieve retrying. }, "ExParentHeist" : "", "Children" : { }, "Orchestration" : { "Type": "Parallel", // Enum: { Sequential, Parallel, Loop } "ServgramScopes": [ ], "Transactions": [ ] } }, "Heists" : { "Void" : "${SystemConfRoot}/heists/Void.json5", "Urukhai" : "${SystemConfRoot}/heists/UrukhaiHeists.json5", "Wikipedia" : "${SystemConfRoot}/heists/Wikipedia.json5", "IMDB" : { "HeistType" : "Templated", "HeistURL" : "https://www.imdb.com", "IndexPath" : "\\\\B-SERVERKINGPIN\\ARBOmnium\\EnderChest\\Facility\\Heist\\IMDB\\index\\", "SpoilPath" : "\\\\B-SERVERKINGPIN\\ARBOmnium\\EnderChest\\Facility\\Heist\\IMDB\\Pages\\", "FragBase" : 10000, "FragRange" : 1000000, "WorkingPath" : "\\\\B-SERVERKINGPIN\\ARBOmnium\\EnderChest\\Facility\\Sauron\\Heist\\IMDB\\", "TaskFrom" : 0, "TaskTo" : 11064506, // 11064506 "MaximumThread" : 5, "ReaverTasks" : 1000, "FromDeathPoint" : true, "Metier" : "Stalker", "IndexSniffer" : { "Type" : "NextPageBased", "IndexApiHref" : "/search/title/?release_date=1970-01-01,2023-01-01&count=250", // IMDB Only support Video since 1970. To update use last date. "NextSelector" : ".lister-page-next.next-page", "NextPageKeyWord" : "Next »" } }, "DouBan" : "${SystemConfRoot}/heists/DouBan.json5", "NeteaseMusic" : "${SystemConfRoot}/heists/NeteaseMusic.json5", "DownCC" : "${SystemConfRoot}/heists/DownCC.json5", "GeoNames" : "${SystemConfRoot}/heists/GeoNames.json5", "ArtStation" : "${SystemConfRoot}/heists/ArtStation.json5", "MobyGames" : "${SystemConfRoot}/heists/MobyGames.json5", "AZLyrics" : { "HeistType" : "Templated", "HeistURL" : "https://www.azlyrics.com", "IndexPath" : "${OmniumFacility}\\Heist\\AZLyrics\\index\\", "SpoilPath" : "${OmniumFacility}\\Heist\\AZLyrics\\pages\\", "WorkingPath" : "${OmniumFacility}\\Sauron\\Heist\\AZLyrics\\", "TaskFrom" : 0, "TaskTo" : 500000, // 2491995 "MaximumThread" : 5, "Metier" : "Stalker", "FailureConf" : { "FailedFileSize" : 10000, "FileRetrieveTime" : 1 // 1 for no retrieve retrying. }, "BrowserSimConf" : { "HeadConfigGroup" : [ { "MaxHTTPPackageSize" : 1024, "AgentName" : "Mozilla/5.0", "UserAgentHead" : "User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.4750.0 Safari/537.36\n" } ], "GroupConfusion" : true, "EnableRandomDelay" : true, "RandomDelayMin" : 25000, "RandomDelayMax" : 30000 }, "IndexSniffer" : { "Type" : "NextPageBased", "IndexApiHref" : "/wiki/Special:AllPages", "NextSelector" : ".mw-allpages-nav", "NextPageKeyWord" : "Next page" } }, "LyricsTranslate" : { "HeistType" : "Sophisticate", "HeistURL" : "https://lyricstranslate.com", "IndexPath" : "${OmniumFacility}\\Heist\\LyricsTranslate\\index\\", "SpoilPath" : "${OmniumKingpin0}/Sauron/Heist/LyricsTranslate/Pages/", "FragBase" : 10000, "FragRange" : 1000000, "WorkingPath" : "${OmniumFacility}\\Sauron\\Heist\\LyricsTranslate\\", "TaskFrom" : 0, "TaskTo" : 2355032, //2355032 "MaximumThread" : 10, "ReaverTasks" : 1000, "MutualIndexFile" : "MutualSongIndex.json5", "FromDeathPoint" : true, //"Metier" : "Stalker", "Metier" : "Reaver", "ArtistsAPIHref" : "/en/artists", "IndexSniffer" : { "Type" : "NextPageBased", "IndexApiHref" : "", "NextSelector" : ".pager-next", "NextPageKeyWord" : "next", // Defaulted "StorageFmt" : "index_${mutualId}.html", "NextHrefFmt" : "${DomainHref}${this}" } }, "LatinIsSimple" : { "HeistType" : "Templated", "HeistURL" : "https://www.latin-is-simple.com", "IndexPath" : "${OmniumFacility}\\Heist\\LatinIsSimple\\index\\", "SpoilPath" : "${OmniumFacility}\\Heist\\LatinIsSimple\\Pages\\", "FragBase" : 1000, "FragRange" : 100000, "WorkingPath" : "${OmniumFacility}\\Sauron\\Heist\\LatinIsSimple\\", "TaskFrom" : 0, "TaskTo" : 49980, "MaximumThread" : 5, "ReaverTasks" : 1000, "FromDeathPoint" : true, "Metier" : "Reaver", //"Metier" : "Stalker", //"Metier" : "Embezzler", "CategoryIdxMap" : [ "Nouns", "Verbs", "Adjectives", "Adverbs", "OtherWords", "Phrases" ], "ArtistsAPIHref" : "/en/artists", "IndexSniffer" : { "Type" : "NextPageBased", "IndexApiHref" : "", "NextSelector" : ".pager-next", "NextPageKeyWord" : "next", // Defaulted "StorageFmt" : "index_${mutualId}.html", "NextHrefFmt" : "${DomainHref}${this}" } }, "Prometheus" : "${SystemConfRoot}/heists/Prometheus.json5", "FiscalEvent" : "${SystemConfRoot}/heists/FiscalEvent.json5", "FestivalEvent" : "${SystemConfRoot}/heists/FestivalEvent.json5", "TradingChronicle" : "${SystemConfRoot}/heists/TradingChronicle.json5", "FuturesDelivery" : "${SystemConfRoot}/heists/FuturesDelivery.json5" }, "Components" : { "HttpBrowser" : { "HeadConfigGroup" : [ { "AgentName" : "Mozilla/5.0", "UserAgent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.4750.0 Safari/537.36\n" }, { "AgentName" : "Baiduspider", "UserAgent" : "Mozilla/5.0 (compatible; Baiduspider/2.0;+http://www.baidu.com/search/spider.html)\n" } ], "AgentConfusion" : true, "SystemProxy" : { "host": "127.0.0.1", "port": 7890 }, "ProxyGroup" : [ { "host": "127.0.0.1", "port": 7890 } ], //"ProxyStrategy" : "NoProxy", "ProxyStrategy" : "SystemOnly", //"ProxyStrategy" : "ProxyGroup", "EnableRandomDelay" : false, "RandomDelayMin" : 9200, "RandomDelayMax" : 10500, "SocketTimeout" : 60000, "Charset" : "UTF-8" } } } ================================================ FILE: system/setup/heists/Apesk.json5 ================================================ { "HeistArch" : "Radium", "HeistType" : "Templated", "HeistURL" : "https://apesk.com", //"WorkingMode" : "", // Exhaust all possible inlet pages (Artist pages as inlet) // "SubHref" : "/p/result_for_gzh.asp?rid=", // "IndexPath" : "\\\\b-serverkingpin/ARBOmnium/EnderChest/Kingpin0/Sauron/Heist/Apesk/mbti/index/", // "SpoilPath" : "\\\\b-serverkingpin/ARBOmnium/EnderChest/Kingpin0/Sauron/Heist/Apesk/mbti/pages/", // "TaskFrom" : 20419496, // "TaskTo" : 50274406, "SubHref" : "/p/result_for_gzh.asp?rid=", "IndexPath" : "\\\\b-serverkingpin/ARBOmnium/EnderChest/Kingpin0/Sauron/Heist/Apesk/big5/index/", "SpoilPath" : "\\\\b-serverkingpin/ARBOmnium/EnderChest/Kingpin0/Sauron/Heist/Apesk/big5/pages/", "WorkingPath" : "\\\\b-serverkingpin/ARBOmnium/EnderChest/ARBOmnium/Sauron/Heist/Apesk/", "MaximumThread" : 1, "FragBase" : 10000, "FragRange" : 1000000, "FailureConf" : { "FailedFileSize" : 1000, "FileRetrieveTime" : 1 }, "HttpBrowser" : { "HeadConfigGroup" : [ { "AgentName" : "Mozilla/5.0", "UserAgent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.4750.0 Safari/537.36\n" }, { "AgentName" : "Baiduspider", "UserAgent" : "Mozilla/5.0 (compatible; Baiduspider/2.0;+http://www.baidu.com/search/spider.html)\n" } ], "AgentConfusion" : true, "SystemProxy" : { "host": "127.0.0.1", "port": 7890 }, "ProxyGroup" : [ { "host": "127.0.0.1", "port": 7890 } ], //"ProxyStrategy" : "NoProxy", "ProxyStrategy" : "SystemOnly", //"ProxyStrategy" : "ProxyGroup", "EnableRandomDelay" : false, "RandomDelayMin" : 1000, "RandomDelayMax" : 2000, "SocketTimeout" : 20000, "Charset" : "UTF-8" }, "FromDeathPoint" : true, //"Metier" : "Stalker", "Metier" : "Reaver", //"Metier" : "Embezzler", "QueryCookie" : "__cf_bm=ESNiAVu2p_Y6rt7WJ7vJ7y33tb127eCuyKzMP8Rm7oc-1687420936-0-Ac+moMROCH1X8OGPiE5dFWa+RHj2/FPNrWoFM/s02gXSOrvdxl/x5663yOEIvOgeWxGcFIpZT4fYrytMMCPcuTEbOWFXbbgRNOUr65juI3JH", "InletSitemap" : "sitemap-index.xml" } ================================================ FILE: system/setup/heists/ArtStation.json5 ================================================ { "HeistType" : "Templated", "HeistURL" : "https://www.artstation.com", //"WorkingMode" : "", // Exhaust all possible inlet pages (Artist pages as inlet) "IndexPath" : "\\\\B-ServerPaladin/PaladinHive/EnderChest/Sauotion5/Sauron/Heist/ArtStation/inlet/index/", "SpoilPath" : "\\\\B-ServerPaladin/PaladinHive/EnderChest/Sauotion5/Sauron/Heist/ArtStation/inlet/pages/", "WorkingPath" : "\\\\b-serverkingpin/ARBOmnium/EnderChest/ARBOmnium/Sauron/Heist/ArtStation/", "TaskFrom" : 0000000, "TaskTo" : 1383854, "MaximumThread" : 5, "FragBase" : 10000, "FragRange" : 1000000, "FailureConf" : { "FailedFileSize" : 1000, "FileRetrieveTime" : 1 }, "HttpBrowser" : { "HeadConfigGroup" : [ { "AgentName" : "Mozilla/5.0", "UserAgent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.4750.0 Safari/537.36\n" }, { "AgentName" : "Baiduspider", "UserAgent" : "Mozilla/5.0 (compatible; Baiduspider/2.0;+http://www.baidu.com/search/spider.html)\n" } ], "AgentConfusion" : true, "SystemProxy" : { "host": "127.0.0.1", "port": 7890 }, "ProxyGroup" : [ { "host": "127.0.0.1", "port": 7890 } ], //"ProxyStrategy" : "NoProxy", "ProxyStrategy" : "SystemOnly", //"ProxyStrategy" : "ProxyGroup", "EnableRandomDelay" : false, "RandomDelayMin" : 1000, "RandomDelayMax" : 2000, "SocketTimeout" : 20000, "Charset" : "UTF-8" }, "FromDeathPoint" : true, "Metier" : "Stalker", //"Metier" : "Reaver", //"Metier" : "Embezzler", "QueryCookie" : "__cf_bm=ESNiAVu2p_Y6rt7WJ7vJ7y33tb127eCuyKzMP8Rm7oc-1687420936-0-Ac+moMROCH1X8OGPiE5dFWa+RHj2/FPNrWoFM/s02gXSOrvdxl/x5663yOEIvOgeWxGcFIpZT4fYrytMMCPcuTEbOWFXbbgRNOUr65juI3JH", "SiteMaps": { "sitemap_inlet": { "href": "https://www.artstation.com/sitemap.xml" }, } } ================================================ FILE: system/setup/heists/Chronicle.json5 ================================================ { "HeistType" : "Period", "ChronicPerAcc" : 750, // ms "ChronicPeriods" : [ "0 0 0 * * *", "0 0 3 * * *", "0 0 6 * * *", "0 0 9 * * *", "0 0 12 * * *", "0 0 15 * * *", "0 0 18 * * *", "0 0 21 * * *" //,"* * * * * *" ], "PrimaryRDB" : "MySQLKingData0", "NewsDataTable" : "nona_news_index_chronic", "RaiderMarshaling" : { "DirectlyLoad" : { "Prefix": [], "Suffix": [ "Raider", "Clerk" ] }, "RaiderScopes": [ "com.sauron.zeron.heists" ] }, "FromDeathPoint" : true, //"Metier" : "Stalker", "Metier" : "Reaver", //"Metier" : "Embezzler", "Children" : { "Newstron": { "RaiderMarshaling" : { "UsingSedation" : true, "UsingUniformFeast" : true, "Raiders" : { "SimpleAjaxBased" : { "Tasks": [ { "ObjectName": "JinRiTouTiaoTops", "Api": "https://www.toutiao.com/hot-event/hot-board/?origin=toutiao_pc" }, { "ObjectName": "BaiduIndex" , "Api": "https://index.baidu.com/Interface/homePage/wiseConfig" }, { "ObjectName": "WeiboHot" , "Api": "https://weibo.com/ajax/side/hotSearch" }, { "ObjectName": "BaiduTiebaHot" , "Api": "https://tieba.baidu.com/hottopic/browse/topicList" } ] }, "Zhihu" : { "HotlineApi": "https://www.zhihu.com/api/v4/creators/rank/hot?domain=0", //https://www.zhihu.com/api/v4/creators/rank/hot?domain=0&limit=20&offset=20&period=hour "Topstory" : "https://www.zhihu.com/api/v3/feed/topstory/hot-lists/total?limit=100", "TopSearch" : "https://www.zhihu.com/api/v4/creators/rank/hot?domain=0&period=hour", "Global" : { "IndexFrom" : 0, "IndexTo" : 100, "IndexStep" : 100 } }, "Baidu" : { // "ChronicPeriods" : [ // "0 0 0 * * *", "* * * * * *" // ], "TopHref": "https://top.baidu.com/board?tab=realtime" }, "BaiduTieba" : { "TopHref": "https://search.prod.di.api.cnn.io/content?q=&size=%d&from=0&page=1&sort=newest&request_id=%s" }, "Google" : { "NewsHref": "https://www.google.com/search?q=news&source=lnms&tbm=nws&start=", "TopN": 5 }, "CNN" : { "NewsHref": "https://search.prod.di.api.cnn.io/content?q=&size=%d&from=0&page=1&sort=newest&request_id=%s", "TopN": 50, "request_id": "pdx-search" }, "Personage" : { "ChronicPeriods" : [ "0/30 * * * * *" ] } }, "MarshalingList" : [ "SimpleAjaxBased", "Zhihu", "Baidu", "Google", "CNN", "Personage" ] }, "Orchestration" : { "Type": "Parallel", // Enum: { Sequential, Parallel, Loop } "Transactions": [ //{ "Name": "Jesus", "Type": "Sequential" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ }, ] }, "HttpBrowser" : { } } }, "Orchestration" : { "Name": "ChronicleOrchestrator", "Type": "Parallel", // Enum: { Sequential, Parallel, Loop } "Transactions": [ { "Name": "Newstron", "Type": "Parallel" }, //{ "Name": "Newstron", "Type": "Parallel" } ] }, "HttpBrowser" : { } } ================================================ FILE: system/setup/heists/DeviantArt.json5 ================================================ { "HeistArch" : "Radium", "HeistType" : "Templated", "HeistURL" : "https://www.deviantart.com", // https://www.deviantart.com/sitemap-index.xml.gz //"WorkingMode" : "", // Exhaust all possible inlet pages (Artist pages as inlet) "IndexPath" : "\\\\B-ServerPaladin/PaladinHive/EnderChest/Sauotion5/Sauron/Heist/DeviantArt/inlet/index/", "SpoilPath" : "\\\\B-ServerPaladin/PaladinHive/EnderChest/Sauotion5/Sauron/Heist/DeviantArt/inlet/pages/", "WorkingPath" : "\\\\b-serverkingpin/ARBOmnium/EnderChest/ARBOmnium/Sauron/Heist/DeviantArt/", "TaskFrom" : 0000000, "TaskTo" : 1383854, "MaximumThread" : 5, "FragBase" : 10000, "FragRange" : 1000000, "FailureConf" : { "FailedFileSize" : 1000, "FileRetrieveTime" : 1 }, "HttpBrowser" : { "HeadConfigGroup" : [ { "AgentName" : "Mozilla/5.0", "UserAgent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.4750.0 Safari/537.36\n" }, { "AgentName" : "Baiduspider", "UserAgent" : "Mozilla/5.0 (compatible; Baiduspider/2.0;+http://www.baidu.com/search/spider.html)\n" } ], "AgentConfusion" : true, "SystemProxy" : { "host": "127.0.0.1", "port": 7890 }, "ProxyGroup" : [ { "host": "127.0.0.1", "port": 7890 } ], //"ProxyStrategy" : "NoProxy", "ProxyStrategy" : "SystemOnly", //"ProxyStrategy" : "ProxyGroup", "EnableRandomDelay" : false, "RandomDelayMin" : 1000, "RandomDelayMax" : 2000, "SocketTimeout" : 20000, "Charset" : "UTF-8" }, "FromDeathPoint" : true, "Metier" : "Stalker", //"Metier" : "Reaver", //"Metier" : "Embezzler", "QueryCookie" : "__cf_bm=ESNiAVu2p_Y6rt7WJ7vJ7y33tb127eCuyKzMP8Rm7oc-1687420936-0-Ac+moMROCH1X8OGPiE5dFWa+RHj2/FPNrWoFM/s02gXSOrvdxl/x5663yOEIvOgeWxGcFIpZT4fYrytMMCPcuTEbOWFXbbgRNOUr65juI3JH", "InletSitemap" : "sitemap-index.xml" } ================================================ FILE: system/setup/heists/DouBan.json5 ================================================ { "HeistType" : "Templated", "HeistURL" : "https://www.douban.com", "HeistTopicName" : "book",//"music", // Null using id-index. "IndexPath" : "${OmniumKingpin0}/Sauron/Heist/DouBan/index/", "SpoilPath" : "${OmniumKingpin0}/Sauron/Heist/DouBan/Pages/", //"SpoilPath" : "${OmniumKingStream0}/Sauron/Heist/DouBan/Pages/", "WorkingPath" : "${ARBOmnium}/Sauron/Heist/DouBan/", "TaskFrom" : 4900000, "TaskTo" : 5811459, //1002823, // book 5811459 ilmen 38091 location 37372 movie 7131301 music 1002823 www 420893077 8700 435014842 //"TaskFrom" : 0, //"TaskTo" : 100, //1002823, // book 5811459 ilmen 38091 location 37372 movie 7131301 music 1002823 www 420893077 8700 435014842 "MaximumThread" : 1, // Do not too fast, fuck douban ! "ReaverTasks" : 1000, "FailureConf" : { "FailedFileSize" : 14000, "FileRetrieveTime" : 1 }, "FromDeathPoint" : true, "Metier" : "Reaver", //"Metier" : "Embezzler", "BrowserSimConf" : { "HeadConfigGroup" : [ // { // "MaxHTTPPackageSize" : 1024, // "AgentName" : "Mozilla/5.0", // "UserAgentHead" : "User-Agent: Mozilla/5.0 (compatible; Baiduspider/2.0;+http://www.baidu.com/search/spider.html)\n" // }, { "MaxHTTPPackageSize" : 1024, "AgentName" : "Mozilla/5.0", "UserAgentHead" : "User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.4750.0 Safari/537.36\n" } ], "GroupConfusion" : true, "EnableRandomDelay" : true, "RandomDelayMin" : 9200, "RandomDelayMax" : 10500 }, "EdgeGuerrillaHeist" : { "EnableGuerrilla" : false, "KingpinHost" : "127.0.0.1", "KingpinPort" : 9901 }, "SitemapApiHref" : "sitemap_index.xml", "MapIndexPath" : "${OmniumFacility}/Heist/DouBan/", "UsingBuffIdxQuery" : true, "BuffIdxQuerySize" : 10000, // -1 for SELECT ALL. } ================================================ FILE: system/setup/heists/DownloadCNet.json5 ================================================ { "HeistType" : "Templated", "HeistURL" : "https://download.cnet.com", //"WorkingMode" : "", // Exhaust all possible inlet pages (Artist pages as inlet) "IndexPath" : "\\\\B-ServerPaladin/PaladinHive/EnderChest/Sauotion3/Sauron/Heist/DownloadCNet/index/", "SpoilPath" : "\\\\B-ServerPaladin/PaladinHive/EnderChest/Sauotion3/Sauron/Heist/DownloadCNet/pages/", "WorkingPath" : "\\\\b-serverkingpin/ARBOmnium/EnderChest/ARBOmnium/Sauron/Heist/DownloadCNet/", "TaskFrom" : 0000000, "TaskTo" : 1383854, "MaximumThread" : 5, "FragBase" : 10000, "FragRange" : 1000000, "FailureConf" : { "FailedFileSize" : 1000, "FileRetrieveTime" : 1 }, "HttpBrowser" : { "HeadConfigGroup" : [ { "AgentName" : "Mozilla/5.0", "UserAgent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.4750.0 Safari/537.36\n" }, { "AgentName" : "Baiduspider", "UserAgent" : "Mozilla/5.0 (compatible; Baiduspider/2.0;+http://www.baidu.com/search/spider.html)\n" } ], "AgentConfusion" : true, "SystemProxy" : { "host": "127.0.0.1", "port": 7890 }, "ProxyGroup" : [ { "host": "127.0.0.1", "port": 7890 } ], //"ProxyStrategy" : "NoProxy", "ProxyStrategy" : "SystemOnly", //"ProxyStrategy" : "ProxyGroup", "EnableRandomDelay" : false, "RandomDelayMin" : 1000, "RandomDelayMax" : 2000, "SocketTimeout" : 20000, "Charset" : "UTF-8" }, "FromDeathPoint" : true, //"Metier" : "Stalker", "Metier" : "Reaver", //"Metier" : "Embezzler", "SiteMaps": { "products": { "href": "https://download.cnet.com/sitemaps/products_index.xml" }, } } ================================================ FILE: system/setup/heists/NeteaseMusic.json5 ================================================ { "HeistType" : "Templated", "HeistURL" : "https://music.163.com", //"WorkingMode" : "", // Exhaust all possible inlet pages (Artist pages as inlet) "SubPathNode" : "Pages/", "WorkingMode" : "Expansion", //"SubPathNode" : "${this.WorkingMode}/", "IndexPath" : "${OmniumKingpin0}/Sauron/Heist/NeteaseMusic/index/", //"SpoilPath" : "I:/Sauron/Heist/NeteaseMusic/${this.SubPathNode}", "SpoilPath" : "${OmniumKingpin0}/Sauron/Heist/NeteaseMusic/Pages/", "WorkingPath" : "${ARBOmnium}/Sauron/Heist/NeteaseMusic/", "TaskFrom" : 1000000, // 1872 -> Due to the range of Netease is indefinite, this record is the min inlet point. "TaskTo" : 10000000, // 13959689 !!! -> Exhaustion methodology to exhaust all possible artists, this record is an overestimated compact upper bound. "MaximumThread" : 4, "ReaverTasks" : 1000, "FailureConf" : { "FailedFileSize" : 14000, // For Netease abort this validates metric, but use key-word to validate. "FileRetrieveTime" : 1 }, "FromDeathPoint" : true, //"Metier" : "Stalker", "Metier" : "Reaver", //"Metier" : "Embezzler", "BrowserSimConf" : { "HeadConfigGroup" : [ { "MaxHTTPPackageSize" : 64, "AgentName" : "Mozilla/5.0", "UserAgentHead" : "User-Agent: Mozilla/5.0 (compatible; Baiduspider/2.0;+http://www.baidu.com/search/spider.html)\n" }, { "MaxHTTPPackageSize" : 64, "AgentName" : "Mozilla/5.0", "UserAgentHead" : "User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.4750.0 Safari/537.36\n" } ], "GroupConfusion" : true, "EnableRandomDelay" : false, "RandomDelayMin" : 9200, "RandomDelayMax" : 10500, "CompressHTTPCon" : false, // Only read compressed stream-content, but will disable persistent-connection. } } ================================================ FILE: system/setup/heists/PubChem.json5 ================================================ { "HeistType" : "Templated", "HeistURL" : "https://pubchem.ncbi.nlm.nih.gov", //"WorkingMode" : "", // Exhaust all possible inlet pages (Artist pages as inlet) "IndexPath" : "\\\\b-serverkingpin/ARBOmnium/EnderChest/Kingpin0/Sauron/Heist/PubChem/index/", "SpoilPath" : "\\\\B-ServerPaladin/PaladinHive/EnderChest/Sauotion4/Sauron/Heist/PubChem/pages/", "WorkingPath" : "\\\\b-serverkingpin/ARBOmnium/EnderChest/ARBOmnium/Sauron/Heist/PubChem/", "TaskFrom" : 00111000, "TaskTo" : 01000000, //20501203 "MaximumThread" : 8, "FragBase" : 10000, "FragRange" : 1000000, "FailureConf" : { "FailedFileSize" : 1000, "FileRetrieveTime" : 1 }, "HttpBrowser" : { "HeadConfigGroup" : [ { "AgentName" : "Mozilla/5.0", "UserAgent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.4750.0 Safari/537.36\n" }, { "AgentName" : "Baiduspider", "UserAgent" : "Mozilla/5.0 (compatible; Baiduspider/2.0;+http://www.baidu.com/search/spider.html)\n" } ], "AgentConfusion" : true, "SystemProxy" : { "host": "127.0.0.1", "port": 7890 }, "ProxyGroup" : [ { "host": "127.0.0.1", "port": 7890 } ], //"ProxyStrategy" : "NoProxy", "ProxyStrategy" : "SystemOnly", //"ProxyStrategy" : "ProxyGroup", "EnableRandomDelay" : true, "RandomDelayMin" : 1000, "RandomDelayMax" : 2000, "SocketTimeout" : 20000, "Charset" : "UTF-8" }, "FromDeathPoint" : true, //"Metier" : "Stalker", "Metier" : "Reaver", //"Metier" : "Embezzler", "SiteMaps": { annotation: { "href": "https://pubchem.ncbi.nlm.nih.gov/sitemap/sitemap.cgi?p=annotation/sitemap_index.xml" }, // "assay": { // "href": "https://pubchem.ncbi.nlm.nih.gov/sitemap/sitemap.cgi?p=assay/sitemap_index.xml" // }, //// "source": { //// "href": "https://pubchem.ncbi.nlm.nih.gov/sitemap/sitemap.cgi?p=source/sitemap.xml" //// }, // "gene": { // "href": "https://pubchem.ncbi.nlm.nih.gov/sitemap/sitemap.cgi?p=gene/sitemap_index.xml" // }, // "protein": { // "href": "https://pubchem.ncbi.nlm.nih.gov/sitemap/sitemap.cgi?p=protein/sitemap_index.xml" // }, // "patent": { // "href": "https://pubchem.ncbi.nlm.nih.gov/sitemap/sitemap.cgi?p=patent/sitemap_index.xml" // }, //// "concept": { //// "href": "https://pubchem.ncbi.nlm.nih.gov/sitemap/sitemap.cgi?p=concept/sitemap_1.xml" //// }, // "cell": { // "href": "https://pubchem.ncbi.nlm.nih.gov/sitemap/sitemap.cgi?p=cell/sitemap_index.xml" // }, // "taxonomy": { // "href": "https://pubchem.ncbi.nlm.nih.gov/sitemap/sitemap.cgi?p=taxonomy/sitemap_index.xml" // } } } ================================================ FILE: system/setup/heists/Steam.json5 ================================================ { "HeistArch" : "Radium", "HeistType" : "Templated", "HeistURL" : "https://store.steampowered.com", //"https://store.steampowered.com/search/?ndl=1&ignore_preferences=1&page=", //"WorkingMode" : "", // Exhaust all possible inlet pages (Artist pages as inlet) "IndexPath" : "\\\\B-ServerPaladin/PaladinHive/EnderChest/Sauotion5/Sauron/Heist/Steam/inlet/index/", "SpoilPath" : "\\\\B-ServerPaladin/PaladinHive/EnderChest/Sauotion5/Sauron/Heist/Steam/inlet/pages/", "WorkingPath" : "\\\\b-serverkingpin/ARBOmnium/EnderChest/ARBOmnium/Sauron/Heist/Steam/", "TaskFrom" : 0000000, "TaskTo" : 0006169, "MaximumThread" : 8, "FragBase" : 10000, "FragRange" : 1000000, "FailureConf" : { "FailedFileSize" : 1000, "FileRetrieveTime" : 1 }, "HttpBrowser" : { "HeadConfigGroup" : [ { "AgentName" : "Mozilla/5.0", "UserAgent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.4750.0 Safari/537.36\n" }, { "AgentName" : "Baiduspider", "UserAgent" : "Mozilla/5.0 (compatible; Baiduspider/2.0;+http://www.baidu.com/search/spider.html)\n" } ], "AgentConfusion" : true, "SystemProxy" : { "host": "127.0.0.1", "port": 7890 }, "ProxyGroup" : [ { "host": "127.0.0.1", "port": 7890 } ], //"ProxyStrategy" : "NoProxy", "ProxyStrategy" : "SystemOnly", //"ProxyStrategy" : "ProxyGroup", "EnableRandomDelay" : false, "RandomDelayMin" : 1000, "RandomDelayMax" : 2000, "SocketTimeout" : 20000, "Charset" : "UTF-8" }, "FromDeathPoint" : true, //"Metier" : "Stalker", "Metier" : "Reaver", //"Metier" : "Embezzler", "QueryCookie" : "__cf_bm=ESNiAVu2p_Y6rt7WJ7vJ7y33tb127eCuyKzMP8Rm7oc-1687420936-0-Ac+moMROCH1X8OGPiE5dFWa+RHj2/FPNrWoFM/s02gXSOrvdxl/x5663yOEIvOgeWxGcFIpZT4fYrytMMCPcuTEbOWFXbbgRNOUr65juI3JH", "ExtendMode" : "Instance", "InstanceExtend" : "FetchInletList" } ================================================ FILE: system/setup/heists/UrukhaiHeists.json5 ================================================ /** Bean Nuts Hazelnut Sauron Nonabyte(Nonaron) Heistum-Urukhai-heists Model Configs Templated Reprogrammable Auto-Crawler Sub-system [ ReaverSystem ->override-> OrcsSystem ] Nomenclature: for those Explicit-Index or Explicit-Inlet-Terminator naming with corporeal-fantasy-figure. [ Troll, Orc, Ghoul, Hellhound ] for those Conundrum-Index or Conundrum-Inlet-Terminator naming with incorporeal-fantasy-figure. [ Wraith, Ghast ] this->Archetypes: { "Troll" => [SimpletonIndex] DOM-Page-Index-Based Auto-Massive-Crawler [DataUnitRange: 1 GB ~ 1 TB], "Orc" => [SimpletonIndex] Sitemap-Index-Based Auto-Massive-Crawler [DataUnitRange: 1 GB ~ 1 TB], "Ghoul" => [SavageSniffer ] Recursive Whole-Site-Links-Parse Savage-Sniffer-Crawler [DataUnitRange: 100 GB ≤], "Cerberus" => [PursuitSniffer] Full-Text-Index-Based Pursuit-Sniffer-Crawler [DataUnitRange: Unlimited] } Templated: this->Archetypes: [ DictionaryWebsites, WikiLikesWebsites, MovieWebsites, NewsWebsites ] **/ { "HeistType" : "Templated", //"HeistURL" : "", // @Override "IndexPath" : "\\\\B-SERVERKINGPIN\\ARBOmnium\\EnderChest\\Facility\\Heist\\Urukhai\\index\\", "SpoilPath" : "\\\\B-SERVERKINGPIN\\ARBOmnium\\EnderChest\\MegaH\\Sauron\\Urukhai\\pages\\", "FragBase" : 10000, "FragRange" : 1000000, "WorkingPath" : "\\\\B-SERVERKINGPIN\\ARBOmnium\\EnderChest\\Facility\\Sauron\\Heist\\Urukhai\\", "InfoTracer" : "heistInfo.log", "ErrTracer" : "heistErr.log", "SysTracer" : "heistSys.log", "DyingMsgFile" : "dyingMsg.json5", // Enable full-status tombstone dying msg. Empty string to close. "TaskFrom" : 0, "TaskTo" : 1000000, // 16906325 "MaximumThread" : 5, "ReaverTasks" : 10000, "FailureConf" : { "FailedFileSize" : 2000, "FileRetrieveTime" : 3 }, "FromDeathPoint" : true, "Metier" : "Reaver", "SnifferMode" : "IndexSniffer", "IndexSniffer" : { "Type" : "NextPageBased", "IndexApiHref" : "wiki/Special:AllPages", "NextPageClassName" : ".mw-allpages-nav", "NextPageKeyWord" : "Next page", // Defaulted "StorageFmt" : "index_${id}.html", "NextHrefFmt" : "${DomainHref}${this}" }, /*"IndexSniffer" : { "Type" : "DOMPageIdBased", "IndexApiHref" : "wiki/Special:AllPages", "PageIdFrom" : 0, "PageIdTo" : 10000, },*/ /*"IndexSniffer" : { "Type" : "SitemapBased", "SitemapApiHref" : "sitemap_index.xml", "MapIndexPath" : "\\\\B-SERVERKINGPIN\\ARBOmnium\\EnderChest\\Facility\\Heist\\DouBan\\" },*/ "ExtendMode" : "Instance", //"InstanceExtend" : "WikipediaCN", "InstanceExtend" : "LatinIsSimple", //"InstanceExtend" : "AZLyrics", "Children" : { "LatinIsSimple" : { "HeistURL" : "https://www.latin-is-simple.com", "IndexPath" : "\\\\B-SERVERKINGPIN\\ARBOmnium\\EnderChest\\Facility\\Heist\\LatinIsSimple\\index\\", "SpoilPath" : "\\\\B-SERVERKINGPIN\\ARBOmnium\\EnderChest\\Facility\\Heist\\LatinIsSimple\\pages\\", "WorkingPath" : "\\\\B-SERVERKINGPIN\\ARBOmnium\\EnderChest\\Facility\\Sauron\\Heist\\LatinIsSimple\\", "TaskFrom" : 0, "TaskTo" : 500000, // 2491995 "MaximumThread" : 5, "ReaverTasks" : 10000, "Metier" : "Embezzler", //"Stalker", "SnifferMode" : "GraphCerberus", "IndexSniffer" : { "IndexApiHref" : "/en/vocabulary/noun/", "NextSelector" : ".next", "NextPageKeyWord" : "Next", "NextHrefFmt" : "${DomainHref}${IndexApiHref}${this}" }, "GraphCerberus" : { "DomainHref" : "https://www.latin-is-simple.com", "DirectionPath" : [ { "StratumName" : "MajorStratum", "SiblingLayers" : [ { "LayerName" : "EnumAllTypeWords", "DomainHref" : "https://www.latin-is-simple.com", "IndexMajorHref" : "/en/vocabulary/", "LayerType" : "SnifferLayer", "StoragePathFmt" : "", "NextHrefFmt" : "https:${this}", "NextLayerSelector" : "section:nth-child(7) p:nth-child(2) a", "NextLayerFetchType" : "BFS", "NextLayers" : { "LayerName" : "FetchStorageLayer", "DomainHref" : "https://www.latin-is-simple.com", "IndexMajorHref" : "", "LayerType" : "FetchStorageLayer", "CategoryIdxMap" : [ "Nouns", "Verbs", "Adjectives", "Adverbs", "OtherWords", "Phrases", "Groups" ], "StoragePathFmt" : "${parent.IndexPath}${LayerID}_${this.CategoryIdxMap[LayerID]}/${PageID}.html", "NextHrefFmt" : "${DomainHref}${IterMajorHref}${this}", "NextHrefKeyWord" : "Next", "NextLayerSelector" : ".next a", "NextLayerFetchType" : "ChainIterUntil", "NextLayers" : null } } ] } ] }, "DOMCentaur" : { "Templated": [ [{ // Make array "WordGroup" : [ "$primary td:nth-child(1)" ], "EnglishGroup" : [ "$array td:nth-child(1)" ] }], ] } } } } ================================================ FILE: system/setup/heists/Void.json5 ================================================ { "HeistType" : "Templated", "HeistURL" : "https://pubchem.ncbi.nlm.nih.gov", //"WorkingMode" : "", // Exhaust all possible inlet pages (Artist pages as inlet) "IndexPath" : "${OmniumKingpin0}/ARBOmnium/EnderChest/Kingpin0/Sauron/Heist/PubChem/index/", "SpoilPath" : "${OmniumKingpin0}/ARBOmnium/EnderChest/Kingpin0/Sauron/Heist/PubChem/pages/", "WorkingPath" : "\\\\b-serverkingpin/ARBOmnium/EnderChest/ARBOmnium/Sauron/Heist/PubChem/", "TaskFrom" : 0, "TaskTo" : 1, "MaximumThread" : 5, "FragBase" : 10000, "FragRange" : 1000000, "FailureConf" : { "FailedFileSize" : 1000, "FileRetrieveTime" : 1, }, "FromDeathPoint" : true, //"Metier" : "Stalker", "Metier" : "Reaver", //"Metier" : "Embezzler", "Children" : { "Jesus": { "IndexPath" : "Hello hi, I am Jesucristo", "parentk" : "${super.k}", "TaskFrom" : 777, "TaskTo" : 778, "Orchestration" : { "Type": "Parallel", // Enum: { Sequential, Parallel, Loop } "Transactions": [ //{ "Name": "Jesus", "Type": "Sequential" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ }, ] }, "HttpBrowser" : { "Charset" : "UTF-10" } }, "Satan": { "IndexPath" : "Hello hi, I am Satanas", "TaskFrom" : 666, "TaskTo" : 667, }, "Rick": { "IndexPath" : "Hello hi, I am Rick", "TaskFrom" : 137, "TaskTo" : 138, "Children" : { "Sauron": { "IndexPath" : "Hello hi, I am Sauron", "TaskFrom" : 999, "TaskTo" : 1000, }, "Cthulhu": { "IndexPath" : "Hello hi, I am Cthulhu", "TaskFrom" : 1024, "TaskTo" : 1025, }, "Absolute": { "IndexPath" : "Hello hi, I am Elder Brain", "TaskFrom" : 2048, "TaskTo" : 2049, } }, "Orchestration" : { "Type": "Parallel", "Transactions": [ { "Name": "Sauron", "Type": "Sequential" }, { "Name": "Child" , "Type": "ParallelActions", "Transactions": [ { "Name": "Cthulhu", "Type": "Parallel" }, { "Name": "Absolute", "Type": "Parallel" } ] }, ] } } }, "Orchestration" : { "Name": "VoidOrchestrator", "Type": "Parallel", // Enum: { Sequential, Parallel, Loop } "Transactions": [ /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ { "Name": "Jesus", "Type": "Sequential" }, { "Name": "Satan", "Type": "Sequential" }, { "Name": "Rick" , "Type": "Sequential" } ] }, "HttpBrowser" : { "RandomDelayMin" : 7418, "Charset" : "UTF-9" } } ================================================ FILE: system/setup/heists/Wikipedia.json5 ================================================ { "HeistType" : "Templated", "HeistURL" : "https://en.wikipedia.org", "IndexPath" : "\\\\B-SERVERKINGPIN\\ARBOmnium\\EnderChest\\Facility\\Heist\\Wiki\\index\\", "SpoilPath" : "\\\\B-SERVERKINGPIN\\ARBOmnium\\EnderChest\\MegaH\\Sauron\\Wiki\\Pages\\", //"Z:\\ARBFacility\\Wiki\\Pages\\", "FragBase" : 10000, "FragRange" : 1000000, "WorkingPath" : "\\\\B-SERVERKINGPIN\\ARBOmnium\\EnderChest\\Facility\\Sauron\\Heist\\Wiki\\", "TaskFrom" : 0, "TaskTo" : 1000000, // 16906325 "MaximumThread" : 5, "ReaverTasks" : 10000, "FromDeathPoint" : true, "Metier" : "Reaver", "IndexSniffer" : { "Type" : "NextPageBased", "IndexApiHref" : "/wiki/Special:AllPages", "NextSelector" : ".mw-allpages-nav", "NextPageKeyWord" : "Next page" }, "ExtendMode" : "Instance", //"InstanceExtend" : "WikipediaCN", //"InstanceExtend" : "YiXueCom", "InstanceExtend" : "Wiktionary", //"InstanceExtend" : "Wikipedia", "Children" : { "Wikipedia" : { "Metier" : "Embezzler" }, "WikipediaCN" : { "HeistURL" : "https://zh.wikipedia.org", "IndexPath" : "\\\\B-SERVERKINGPIN\\ARBOmnium\\EnderChest\\Facility\\WikiCN\\index\\", "SpoilPath" : "\\\\B-SERVERKINGPIN\\ARBOmnium\\EnderChest\\Kingpin0\\Sauron\\Heist\\WikiCN\\pages\\", "WorkingPath" : "\\\\B-SERVERKINGPIN\\ARBOmnium\\EnderChest\\Facility\\Sauron\\Heist\\WikiCN\\", "TaskFrom" : 0, "TaskTo" : 500000, // 2491995 "MaximumThread" : 5, "ReaverTasks" : 10000, "Metier" : "Reaver", "IndexSniffer" : { "IndexApiHref" : "/wiki/Special:%E6%89%80%E6%9C%89%E9%A1%B5%E9%9D%A2", "NextSelector" : ".mw-allpages-nav", "NextPageKeyWord" : "下一页" } }, "Wiktionary" : { "HeistURL" : "https://en.wiktionary.org", "IndexPath" : "${OmniumKingStream0}\\Sauron\\Heist\\Wiktionary\\index\\", "SpoilPath" : "${OmniumKingStream0}\\Sauron\\Heist\\Wiktionary\\pages\\", "WorkingPath" : "${OmniumFacility}\\Sauron\\Heist\\Wiktionary\\", "TaskFrom" : 0, "TaskTo" : 7268746, // 2491995 "MaximumThread" : 8, "ReaverTasks" : 1000000, //"SQLPath" : "${OmniumKingpin0}\\Sauron\\Heist\\", "SQLPath" : "${OmniumFacility}\\SQLs/", //"SQLPath" : "E:\\", "Metier" : "Embezzler", //"Metier" : "Reaver", "WordTypeProperties" : { "Noun": "Noun", "Proper noun": "ProperNoun", "Adjective": "Adjective", "Adverb": "Adverb", "Verb": "Verb", "Conjugation": "Conjugation", "Interjection": "Interjection", "Article": "Article", "Preposition": "Preposition", "Abbreviations": "Abbreviations", "Abbreviation": "Abbreviation", "Determiner": "Determiner", "Particle": "Particle", "Letter": "Letter", "Pronoun": "Pronoun", "Auxiliary": "Auxiliary", "Contraction": "Contraction", "Numeral": "Numeral", "Proverb": "Proverb", "Participle": "Participle", "Conjunction": "Conjunction", "Phrase": "Phrase", "Number": "Number", "Postposition": "PostPosition", "Symbol": "Symbol", "Suffix": "Suffix", "Root": "Root", "Prefix": "Prefix", "Han character": "HanCharacter", "Decimal fractions": "DecimalFractions", "Affix": "Affix","Stem": "Stem", "Preverb": "Preverb", "Infix": "Infix","Interfix": "Interfix", "Romanization": "Romanization", "Proverbs": "Proverbs", "Abstract nouns": "AbstractNouns", "Concrete nouns": "ConcreteNouns" }, "IndexSniffer" : { "IndexApiHref" : "/wiki/Special:AllPages", "NextSelector" : ".mw-allpages-nav", "NextPageKeyWord" : "Next page" } }, "YiXueCom" : { "HeistURL" : "https://www.yixue.com", "IndexPath" : "\\\\B-SERVERKINGPIN\\ARBOmnium\\EnderChest\\Facility\\Heist\\YiXueCom\\index\\", "SpoilPath" : "\\\\B-SERVERKINGPIN\\ARBOmnium\\EnderChest\\Facility\\Heist\\YiXueCom\\Pages\\", "WorkingPath" : "\\\\B-SERVERKINGPIN\\ARBOmnium\\EnderChest\\Facility\\Sauron\\Heist\\YiXueCom\\", "TaskFrom" : 0, "TaskTo" : 500000, "MaximumThread" : 5, "ReaverTasks" : 10000, "Metier" : "Stalker", "IndexSniffer" : { "IndexApiHref" : "/%E7%89%B9%E6%AE%8A:%E6%89%80%E6%9C%89%E9%A1%B5%E9%9D%A2", "NextSelector" : ".mw-allpages-nav", "NextPageKeyWord" : "下一页" } } } } ================================================ FILE: system/setup/lords/odin.json5 ================================================ { "Name" : "KernelOdinLord", "MainClass": "com.walnut.odin.system.Odin", "LifecycleWithPrimarySystem" : false, "metaDependent": { "atlasDatabase" : "MySQLKingHydranium", "taskInstrument" : "MySQLKingHydranium", "controlRPCDriver" : "TaskWolfKing", "processManager" : "__SystemTaskManager__", }, "kernelConfig": { "instanceTitleTimeFormat": "yyyy_MM_dd_HH_mm_ss", "defaultDateTimeFormat": "yyyy-MM-dd HH:mm:ss", "scheduleScanThreadCount": 8, "scheduleScanIdWindow": 1000 }, "scheduler": { "partitionName": "__DEFAULT__", "globalDispatcher": { "__DEFAULT__": { "name": "__DEFAULT__", "globalConcurrentInstance": 100000, "quota": { "L0": { "priority": 50, "maximumRatio": 0.3, "minimumRatio": 0.1, }, "L1": { "priority": 40, "maximumRatio": 0.2, "minimumRatio": 0.1, }, "L2": { "priority": 30, "maximumRatio": 0.2, "minimumRatio": 0.1, }, "L3": { "priority": 20, "maximumRatio": 0.15, "minimumRatio": 0.05, }, "L4": { "priority": 10, "maximumRatio": 0.1, "minimumRatio": 0.05, }, "L5": { "priority": 0, "maximumRatio": 0.05, "minimumRatio": 0.01, }, "unlimited": { "priority": 500, "maximumRatio": -1.0, "minimumRatio": -1.0, "maximumCnt": -1, "minimumCnt": -1, }, "default": { "priority": -1, "maximumRatio": -1.0, "minimumRatio": -1.0, "maximumCnt": 1000, "minimumCnt": 1000, }, } } }, }, } ================================================ FILE: system/setup/lords/redqueen.json5 ================================================ { "Name" : "KernelRedQueenLord", "MainClass": "com.acorn.redqueen.RedQueen", "LifecycleWithPrimarySystem" : true } ================================================ FILE: system/setup/lords/skynet.json5 ================================================ { "Name" : "KernelSkynetLord", "MainClass": "com.acorn.skynet.Skynet", "LifecycleWithPrimarySystem" : true } ================================================ FILE: system/setup/sparta/AccountServiceSpring.json5 ================================================ { "server": { "port": 8081, "servlet": { "context-path": "/" } }, "spring": { "servlet": { "multipart": { "max-file-size": "4096MB", "max-request-size": "4096MB" } }, "datasource": { // "url": "jdbc:mysql://localhost:3306/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true", // "username": "root", // "password": "123456", "url": "jdbc:mysql://b-serverkingpin:33062/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true", "username": "root", "password": "", "driver-class-name": "com.mysql.cj.jdbc.Driver" }, "mybatis":{ "mapper-locations": "classpath:mapper/*.xml" }, "sparta": { "datasource": "mysql" } } } ================================================ FILE: system/setup/sparta/SpartaUCDNService.json5 ================================================ { "server": { "port": 8082, "servlet": { "context-path": "/" } }, "spring": { "servlet": { "multipart": { "max-file-size": "4096MB", "max-request-size": "4096MB" } }, "datasource": { // "url": "jdbc:mysql://localhost:3306/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true", // "username": "root", // "password": "123456", "url": "jdbc:mysql://localhost:3306/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true&autoReconnect=true&charset=utf8", "username": "root", "password": "123456", "driver-class-name": "com.mysql.cj.jdbc.Driver" }, "mybatis":{ "mapper-locations": "classpath:mapper/*.xml" }, "sparta": { "datasource": "mysql", } }, "service": { "LocalUploadTemporaryWorkingDirectory": "D:/文件系统/temp", "TemporaryFileExtends": ".temp", "PrimaryUniformFileSystem": { "DefaultVolumeGuid":'1b18a5e-0002af-0000-3c', "DefaultTempFilePath": 'D:/文件系统/temp/' }, "PrimaryUniformVolumeManager": { "DefaultVolumeGuid":'1b18a5e-0002af-0000-3c', "DefaultTempFilePath": 'D:/文件系统/temp/' }, "ClusterFileSynchronizationConfig": { "fileFrameSize": 972800, "batchTransmitMemberThreshold": 10, "sessionExpiredTimeMillis": 7200000, "fileCloudDistributeTransmitTopic": "ucdn-file-cloud-distribute-transmit-topic", "fileCloudDistributeEventTopic": "ucdn-file-cloud-distribute-event-topic", "fileServiceTransmitGroup": "UCDNFileServiceTransmitGroup", "temporaryFileExtends": ".temp", "majorTemporaryClusterFileDirectory": "D:/文件系统/temp", "localMasterTemporaryClusterFileDirectory": "D:/文件系统/frameTemp" // Online, replace this as equals to the `majorTemporaryClusterFileDirectory` } } } ================================================ FILE: system/setup/sparta/SpartaUISService.json5 ================================================ { "server": { "port": 8080, "servlet": { "context-path": "/" } }, "spring": { "servlet": { "multipart": { "max-file-size": "4096MB", "max-request-size": "4096MB" } }, "datasource": { // "url": "jdbc:mysql://localhost:3306/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true", // "username": "root", // "password": "123456", "url": "jdbc:mysql://localhost:3306/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true&autoReconnect=true&charset=utf8", "username": "root", "password": "123456", "driver-class-name": "com.mysql.cj.jdbc.Driver", //德鲁伊连接池配置 "type": "com.alibaba.druid.pool.DruidDataSource", "druid": { "initial-size": 5, "min-idle": 5, "max-active": 20, "max-wait": 60000, "time-between-eviction-runs-millis": 60000, "min-evictable-idle-time-millis": 300000 } }, "mybatis":{ "mapper-locations": "classpath:mapper/*.xml" }, "sparta": { "datasource": "mysql", } }, "service": { } } ================================================ FILE: system/setup/sparta/SpartaUOFSService.json5 ================================================ { "server": { "port": 8080, "servlet": { "context-path": "/" } }, "spring": { "servlet": { "multipart": { "max-file-size": "4096MB", "max-request-size": "4096MB" } }, "datasource": { // "url": "jdbc:mysql://localhost:3306/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true", // "username": "root", // "password": "123456", "url": "jdbc:mysql://localhost:3306/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true&autoReconnect=true&charset=utf8", "username": "root", "password": "123456", "driver-class-name": "com.mysql.cj.jdbc.Driver" }, "mybatis":{ "mapper-locations": "classpath:mapper/*.xml" }, "sparta": { "datasource": "mysql", } }, "service": { "LocalUploadTemporaryWorkingDirectory": "D:/文件系统/temp", "TemporaryFileExtends": ".temp", "PrimaryUniformFileSystem": { "DefaultVolumeGuid":'1b18a5e-0002af-0000-3c', "DefaultTempFilePath": 'D:/文件系统/temp/' }, "PrimaryUniformVolumeManager": { "DefaultVolumeGuid":'1b18a5e-0002af-0000-3c', "DefaultTempFilePath": 'D:/文件系统/temp/' }, "ClusterFileSynchronizationConfig": { "fileFrameSize": 972800, "batchTransmitMemberThreshold": 10, "sessionExpiredTimeMillis": 7200000, "fileCloudDistributeTransmitTopic": "ucdn-file-cloud-distribute-transmit-topic", "fileCloudDistributeEventTopic": "ucdn-file-cloud-distribute-event-topic", "fileServiceTransmitGroup": "UCDNFileServiceTransmitGroup", "temporaryFileExtends": ".temp", "majorTemporaryClusterFileDirectory": "D:/文件系统/temp", "localMasterTemporaryClusterFileDirectory": "D:/文件系统/frameTemp" // Online, replace this as equals to the `majorTemporaryClusterFileDirectory` } } } ================================================ FILE: system/setup/sparta/SpartaUTASKService.json5 ================================================ { "server": { "port": 5080, "servlet": { "context-path": "/" } }, "spring": { "servlet": { "multipart": { "max-file-size": "4096MB", "max-request-size": "4096MB" } }, "datasource": { // "url": "jdbc:mysql://localhost:3306/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true", // "username": "root", // "password": "123456", "url": "jdbc:mysql://localhost:3306/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true&autoReconnect=true&charset=utf8", "username": "root", "password": "$IDCWolf19310918", "driver-class-name": "com.mysql.cj.jdbc.Driver", //德鲁伊连接池配置 "type": "com.alibaba.druid.pool.DruidDataSource", "druid": { "initial-size": 5, "min-idle": 5, "max-active": 20, "max-wait": 60000, "time-between-eviction-runs-millis": 60000, "min-evictable-idle-time-millis": 300000 } }, "mybatis":{ "mapper-locations": "classpath:mapper/*.xml" }, "sparta": { "datasource": "mysql", } }, "service": { } }