Repository: refactorfirst/RefactorFirst Branch: main Commit: 559631f04db0 Files: 187 Total size: 1.1 MB Directory structure: gitextract_nn926aoz/ ├── .github/ │ ├── FUNDING.yml │ └── workflows/ │ ├── codesee-arch-diagram.yml │ ├── maven-pr.yml │ ├── maven.yml │ └── release.yml ├── .gitignore ├── CITATIONS.md ├── LICENSE ├── README.md ├── change-proneness-ranker/ │ ├── pom.xml │ └── src/ │ ├── main/ │ │ └── java/ │ │ └── org/ │ │ └── hjug/ │ │ └── git/ │ │ ├── ChangePronenessRanker.java │ │ ├── GitLogReader.java │ │ └── ScmLogInfo.java │ └── test/ │ └── java/ │ └── org/ │ └── hjug/ │ └── git/ │ ├── ChangePronenessRankerTest.java │ └── GitLogReaderTest.java ├── cli/ │ ├── .gitignore │ ├── pom.xml │ └── src/ │ └── main/ │ └── java/ │ └── org/ │ └── hjug/ │ └── refactorfirst/ │ ├── Main.java │ ├── ReportCommand.java │ └── ReportType.java ├── codebase-graph-builder/ │ ├── pom.xml │ └── src/ │ ├── main/ │ │ └── java/ │ │ └── org/ │ │ └── hjug/ │ │ └── graphbuilder/ │ │ ├── CodebaseGraphDTO.java │ │ ├── DependencyCollector.java │ │ ├── GraphBuilderConfig.java │ │ ├── GraphDependencyCollector.java │ │ ├── JavaGraphBuilder.java │ │ └── visitor/ │ │ ├── BaseCodebaseVisitor.java │ │ ├── BaseTypeProcessor.java │ │ ├── FqnCapturingProcessor.java │ │ ├── JavaClassDeclarationVisitor.java │ │ ├── JavaFqnCapturingVisitor.java │ │ ├── JavaMethodDeclarationVisitor.java │ │ ├── JavaVariableTypeVisitor.java │ │ ├── JavaVisitor.java │ │ └── TypeDependencyExtractor.java │ └── test/ │ ├── java/ │ │ └── org/ │ │ └── hjug/ │ │ └── graphbuilder/ │ │ ├── JavaGraphBuilderTest.java │ │ └── visitor/ │ │ ├── JavaClassDeclarationVisitorTest.java │ │ ├── JavaFqnCapturingVisitorTest.java │ │ ├── JavaInitializerBlockVisitorTest.java │ │ ├── JavaLambdaVisitorTest.java │ │ ├── JavaMethodDeclarationVisitorTest.java │ │ ├── JavaMethodInvocationVisitorTest.java │ │ ├── JavaNewClassVisitorFullTest.java │ │ ├── JavaNewClassVisitorTest.java │ │ ├── JavaVariableTypeVisitorTest.java │ │ ├── JavaVisitorTest.java │ │ └── testclasses/ │ │ ├── A.java │ │ ├── B.java │ │ ├── C.java │ │ ├── D.java │ │ ├── E.java │ │ ├── F.java │ │ ├── G.java │ │ ├── H.java │ │ ├── MyAnnotation.java │ │ ├── MyOtherAnnotation.java │ │ ├── initializers/ │ │ │ ├── ComplexInitializerClass.java │ │ │ └── InitializerBlockTestClass.java │ │ ├── lambda/ │ │ │ ├── DataProcessor.java │ │ │ ├── HelperClass.java │ │ │ ├── LambdaTestClass.java │ │ │ └── NestedLambdaTestClass.java │ │ ├── methodInvocation/ │ │ │ ├── A.java │ │ │ ├── B.java │ │ │ ├── C.java │ │ │ └── D.java │ │ └── newClass/ │ │ ├── A.java │ │ ├── B.java │ │ └── C.java │ └── resources/ │ └── javaSrcDirectory/ │ └── com/ │ └── ideacrest/ │ └── parser/ │ └── testclasses/ │ ├── A.java │ ├── B.java │ ├── C.java │ ├── D.java │ └── E.java ├── cost-benefit-calculator/ │ ├── pom.xml │ └── src/ │ ├── main/ │ │ └── java/ │ │ └── org/ │ │ └── hjug/ │ │ └── cbc/ │ │ ├── CostBenefitCalculator.java │ │ ├── CycleNode.java │ │ ├── CycleRanker.java │ │ ├── RankedCycle.java │ │ └── RankedDisharmony.java │ └── test/ │ ├── java/ │ │ └── org/ │ │ └── hjug/ │ │ └── cbc/ │ │ └── CostBenefitCalculatorTest.java │ └── resources/ │ ├── hudson/ │ │ └── model/ │ │ └── User.java │ └── org/ │ └── apache/ │ └── myfaces/ │ └── tobago/ │ └── facelets/ │ ├── AttributeHandler.java │ ├── AttributeHandler2.java │ └── AttributeHandlerAndSorter.java ├── coverage/ │ └── pom.xml ├── effort-ranker/ │ ├── pom.xml │ └── src/ │ ├── main/ │ │ └── java/ │ │ └── org/ │ │ └── hjug/ │ │ └── metrics/ │ │ ├── CBOClass.java │ │ ├── Disharmony.java │ │ ├── GodClass.java │ │ ├── GodClassRanker.java │ │ └── rules/ │ │ └── CBORule.java │ └── test/ │ └── java/ │ └── org/ │ └── hjug/ │ └── metrics/ │ ├── CBOClassParsingTest.java │ ├── GodClassParsingTest.java │ └── GodClassRankerTest.java ├── graph-algorithms/ │ ├── pom.xml │ └── src/ │ ├── main/ │ │ └── java/ │ │ └── org/ │ │ └── hjug/ │ │ ├── dsm/ │ │ │ ├── CircularReferenceChecker.java │ │ │ ├── DSM.java │ │ │ ├── EdgeRemovalCalculator.java │ │ │ ├── EdgeToRemoveInfo.java │ │ │ ├── OptimalBackEdgeRemover.java │ │ │ ├── SparseGraphCircularReferenceChecker.java │ │ │ └── SparseIntDWGEdgeRemovalCalculator.java │ │ └── feedback/ │ │ ├── SuperTypeToken.java │ │ ├── arc/ │ │ │ ├── EdgeInfo.java │ │ │ ├── EdgeInfoCalculator.java │ │ │ ├── approximate/ │ │ │ │ ├── FeedbackArcSetResult.java │ │ │ │ └── FeedbackArcSetSolver.java │ │ │ ├── exact/ │ │ │ │ ├── FeedbackArcSetResult.java │ │ │ │ └── MinimumFeedbackArcSetSolver.java │ │ │ └── pageRank/ │ │ │ ├── DIAGRAM.md │ │ │ ├── LineDigraph.java │ │ │ └── PageRankFAS.java │ │ └── vertex/ │ │ ├── approximate/ │ │ │ ├── FeedbackVertexSetResult.java │ │ │ └── FeedbackVertexSetSolver.java │ │ └── kernelized/ │ │ ├── DIAGRAM.md │ │ ├── DirectedFeedbackVertexSetResult.java │ │ ├── DirectedFeedbackVertexSetSolver.java │ │ ├── EnhancedParameterComputer.java │ │ ├── FeedbackVertexSetComputer.java │ │ ├── ModulatorComputer.java │ │ ├── ParameterComputer.java │ │ └── TreewidthComputer.java │ └── test/ │ └── java/ │ └── org/ │ └── hjug/ │ ├── dsm/ │ │ ├── CircularReferenceCheckerTests.java │ │ ├── DSMTest.java │ │ ├── EdgeRemovalCalculatorTest.java │ │ └── OptimalBackEdgeRemoverTest.java │ └── feedback/ │ ├── SuperTypeTokenTest.java │ ├── arc/ │ │ ├── approximate/ │ │ │ ├── FeedbackArcSetBenchmarkTest.java │ │ │ ├── FeedbackArcSetExample.java │ │ │ └── FeedbackArcSetSolverTest.java │ │ ├── exact/ │ │ │ ├── MinimumFeedbackArcSetBenchmarkTest.java │ │ │ ├── MinimumFeedbackArcSetExample.java │ │ │ └── MinimumFeedbackArcSetSolverTest.java │ │ └── pageRank/ │ │ ├── PageRankFASExample.java │ │ └── PageRankFASTest.java │ └── vertex/ │ ├── approximate/ │ │ ├── FeedbackVertexSetBenchmarkTest.java │ │ ├── FeedbackVertexSetExample.java │ │ └── FeedbackVertexSetSolverTest.java │ └── kernelized/ │ ├── DirectedFeedbackVertexSetBenchmarkTest.java │ ├── DirectedFeedbackVertexSetExample.java │ ├── DirectedFeedbackVertexSetSolverTest.java │ ├── ModulatorComputerTest.java │ ├── ParameterComputerExample.java │ └── ParameterComputerTest.java ├── graph-data-generator/ │ ├── pom.xml │ └── src/ │ ├── main/ │ │ └── java/ │ │ └── org/ │ │ └── hjug/ │ │ └── gdg/ │ │ └── GraphDataGenerator.java │ └── test/ │ └── java/ │ └── org/ │ └── hjug/ │ └── gdg/ │ └── GraphDataGeneratorTest.java ├── jreleaser.yml ├── lombok.config ├── pom.xml ├── refactor-first-gradle-plugin/ │ ├── build.gradle │ ├── gradle/ │ │ └── wrapper/ │ │ ├── gradle-wrapper.jar │ │ └── gradle-wrapper.properties │ ├── gradlew │ ├── gradlew.bat │ ├── pom.xml │ ├── settings.gradle │ └── src/ │ └── main/ │ └── java/ │ └── org/ │ └── hjug/ │ └── gradlereport/ │ └── RefactorFirstPlugin.java ├── refactor-first-maven-plugin/ │ ├── pom.xml │ └── src/ │ └── main/ │ └── java/ │ └── org/ │ └── hjug/ │ └── mavenreport/ │ ├── RefactorFirstHtmlReport.java │ ├── RefactorFirstMavenCsvReport.java │ ├── RefactorFirstMavenJsonReport.java │ ├── RefactorFirstMavenReport.java │ └── RefactorFirstSimpleHtmlReport.java ├── report/ │ ├── .gitignore │ ├── pom.xml │ └── src/ │ ├── main/ │ │ └── java/ │ │ └── org/ │ │ └── hjug/ │ │ └── refactorfirst/ │ │ └── report/ │ │ ├── CsvReport.java │ │ ├── HtmlReport.java │ │ ├── ReportWriter.java │ │ ├── SimpleHtmlReport.java │ │ └── json/ │ │ ├── JsonReport.java │ │ ├── JsonReportDisharmonyEntry.java │ │ └── JsonReportExecutor.java │ └── test/ │ ├── java/ │ │ └── org/ │ │ └── hjug/ │ │ └── refactorfirst/ │ │ └── report/ │ │ ├── HtmlReportTest.java │ │ └── SimpleHtmlReportTest.java │ └── resources/ │ ├── highlight.html │ ├── sigmaPlayground.html │ └── spriteText.html ├── spring-petclinic-rest-report.html └── test-resources/ ├── pom.xml └── src/ └── main/ └── resources/ ├── AttributeHandler.java ├── AttributeHandler2.java ├── AttributeHandlerAndSorter.java ├── AttributeHandlerJavaEleven.java ├── Attributes.java └── Console.java ================================================ FILE CONTENTS ================================================ ================================================ FILE: .github/FUNDING.yml ================================================ github: jimbethancourt #open_collective: RefactorFirst #ko_fi: jimbethancourt #liberapay: jimbethancourt #patreon: jimbethancourt ================================================ FILE: .github/workflows/codesee-arch-diagram.yml ================================================ # This workflow was added by CodeSee. Learn more at https://codesee.io/ # This is v2.0 of this workflow file on: push: branches: - main pull_request_target: types: [opened, synchronize, reopened] name: CodeSee permissions: read-all jobs: codesee: runs-on: ubuntu-latest continue-on-error: true name: Analyze the repo with CodeSee steps: - uses: Codesee-io/codesee-action@v2 with: codesee-token: ${{ secrets.CODESEE_ARCH_DIAG_API_TOKEN }} ================================================ FILE: .github/workflows/maven-pr.yml ================================================ # This workflow will build a Java project with Maven # For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven name: Java CI with Maven (PR) on: pull_request: branches: [ main ] jobs: build: runs-on: ubuntu-latest steps: - name: Check out Git repository uses: actions/checkout@v4 - name: Set up JDK 11 uses: actions/setup-java@v4 with: java-version: 11 distribution: 'zulu' - name: Build With Maven run: mvn -B verify # Comment "Build With Maven" and uncomment the below when you want a snapshot build to be deployed # *********Don't forget to switch to Java 1.8 as well******** # - name: Publish Maven snapshot # uses: samuelmeuli/action-maven-publish@v1 # with: # gpg_private_key: ${{ secrets.gpg_private_key }} # gpg_passphrase: ${{ secrets.gpg_passphrase }} # nexus_username: ${{ secrets.nexus_username }} # nexus_password: ${{ secrets.nexus_password }} # maven_profiles: snapshot-release ================================================ FILE: .github/workflows/maven.yml ================================================ # This workflow will build a Java project with Maven # For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven name: Java CI with Maven on: push: branches: [ main ] jobs: build: runs-on: ubuntu-latest steps: - name: Check out Git repository uses: actions/checkout@v4 - name: Set up JDK 11 uses: actions/setup-java@v4 with: java-version: 11 distribution: 'zulu' - name: Build With Maven env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any run: mvn -B verify # Comment "Build With Maven" and uncomment the below when you want a snapshot build to be deployed # *********Don't forget to switch to Java 1.8 as well******** # - name: Publish Maven snapshot # uses: samuelmeuli/action-maven-publish@v1 # with: # gpg_private_key: ${{ secrets.gpg_private_key }} # gpg_passphrase: ${{ secrets.gpg_passphrase }} # nexus_username: ${{ secrets.nexus_username }} # nexus_password: ${{ secrets.nexus_password }} # maven_profiles: snapshot-release ================================================ FILE: .github/workflows/release.yml ================================================ # Based on https://github.com/jagodevreede/semver-check/blob/c9353fa86eb9ae8f6b309057748672a6c1e0f435/.github/workflows/release.yml # From https://foojay.io/today/how-to-publish-a-java-maven-project-to-maven-central-using-jreleaser-and-github-actions-2025-guide/ # If this doesn't work, try https://jreleaser.org/guide/latest/continuous-integration/github-actions.html name: Release on: workflow_dispatch: inputs: version: description: 'Release version' required: true nextVersion: description: 'Next version after release (-SNAPSHOT will be added automatically)' required: true jobs: build: permissions: contents: write runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v3 with: fetch-depth: 0 - name: Set up JDK 11 uses: actions/setup-java@v3 with: java-version: '11' distribution: 'temurin' cache: maven - name: Set release version run: mvn --no-transfer-progress --batch-mode versions:set -DnewVersion=${{ github.event.inputs.version }} - name: Commit & Push changes uses: actions-js/push@master with: github_token: ${{ secrets.JRELEASER_GITHUB_TOKEN }} message: "build: Releasing version ${{ github.event.inputs.version }}" - name: Stage release run: mvn --no-transfer-progress --batch-mode -Ppublish clean deploy -DaltDeploymentRepository=local::default::file://`pwd`/target/staging-deploy - name: Run JReleaser uses: jreleaser/release-action@v2 with: setup-java: false version: 1.20.0 env: JRELEASER_PROJECT_VERSION: ${{ github.event.inputs.version }} JRELEASER_GITHUB_TOKEN: ${{ secrets.JRELEASER_GITHUB_TOKEN }} JRELEASER_GPG_PASSPHRASE: ${{ secrets.JRELEASER_GPG_PASSPHRASE }} JRELEASER_GPG_PUBLIC_KEY: ${{ secrets.JRELEASER_GPG_PUBLIC_KEY }} JRELEASER_GPG_SECRET_KEY: ${{ secrets.JRELEASER_GPG_SECRET_KEY }} JRELEASER_DEPLOY_MAVEN_MAVENCENTRAL_RELEASE_DEPLOY_USERNAME: ${{ secrets.JRELEASER_MAVENCENTRAL_SONATYPE_USERNAME }} JRELEASER_DEPLOY_MAVEN_MAVENCENTRAL_RELEASE_DEPLOY_PASSWORD: ${{ secrets.JRELEASER_MAVENCENTRAL_SONATYPE_PASSWORD }} - name: Set release version run: mvn --no-transfer-progress --batch-mode versions:set -DnewVersion=${{ github.event.inputs.nextVersion }}-SNAPSHOT - name: Commit & Push changes uses: actions-js/push@master with: github_token: ${{ secrets.JRELEASER_GITHUB_TOKEN }} message: "build: Setting SNAPSHOT version ${{ github.event.inputs.nextVersion }}-SNAPSHOT" tags: false - name: JReleaser release output if: always() uses: actions/upload-artifact@v4 with: name: jreleaser-release path: | out/jreleaser/trace.log out/jreleaser/output.properties ================================================ FILE: .gitignore ================================================ # Created by https://www.toptal.com/developers/gitignore/api/java,maven,intellij,eclipse # Edit at https://www.toptal.com/developers/gitignore?templates=java,maven,intellij,eclipse ### Eclipse ### .metadata bin/ tmp/ *.tmp *.bak *.swp *~.nib local.properties .settings/ .loadpath .recommenders # External tool builders .externalToolBuilders/ # Locally stored "Eclipse launch configurations" *.launch # PyDev specific (Python IDE for Eclipse) *.pydevproject # CDT-specific (C/C++ Development Tooling) .cproject # CDT- autotools .autotools # Java annotation processor (APT) .factorypath # PDT-specific (PHP Development Tools) .buildpath # sbteclipse plugin .target # Tern plugin .tern-project # TeXlipse plugin .texlipse # STS (Spring Tool Suite) .springBeans # Code Recommenders .recommenders/ # Annotation Processing .apt_generated/ .apt_generated_test/ # Scala IDE specific (Scala & Java development for Eclipse) .cache-main .scala_dependencies .worksheet # Uncomment this line if you wish to ignore the project description file. # Typically, this file would be tracked if it contains build/dependency configurations: #.project ### Eclipse Patch ### # Spring Boot Tooling .sts4-cache/ ### Intellij ### # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 # User-specific stuff .idea/**/workspace.xml .idea/**/tasks.xml .idea/**/usage.statistics.xml .idea/**/dictionaries .idea/**/shelf # Generated files .idea/**/contentModel.xml # Sensitive or high-churn files .idea/**/dataSources/ .idea/**/dataSources.ids .idea/**/dataSources.local.xml .idea/**/sqlDataSources.xml .idea/**/dynamic.xml .idea/**/uiDesigner.xml .idea/**/dbnavigator.xml # Gradle .idea/**/gradle.xml .idea/**/libraries build/ .gradle/ # Gradle and Maven with auto-import # When using Gradle or Maven with auto-import, you should exclude module files, # since they will be recreated, and may cause churn. Uncomment if using # auto-import. # .idea/artifacts # .idea/compiler.xml # .idea/jarRepositories.xml # .idea/modules.xml # .idea/*.iml # .idea/modules # *.iml # *.ipr # CMake cmake-build-*/ # Mongo Explorer plugin .idea/**/mongoSettings.xml # File-based project format *.iws # IntelliJ out/ # mpeltonen/sbt-idea plugin .idea_modules/ # JIRA plugin atlassian-ide-plugin.xml # Cursive Clojure plugin .idea/replstate.xml # Crashlytics plugin (for Android Studio and IntelliJ) com_crashlytics_export_strings.xml crashlytics.properties crashlytics-build.properties fabric.properties # Editor-based Rest Client .idea/httpRequests # Android studio 3.1+ serialized cache file .idea/caches/build_file_checksums.ser .idea/* .idea ### Intellij Patch ### # Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 *.iml # modules.xml # .idea/misc.xml # *.ipr # Sonarlint plugin # https://plugins.jetbrains.com/plugin/7973-sonarlint .idea/**/sonarlint/ # SonarQube Plugin # https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin .idea/**/sonarIssues.xml # Markdown Navigator plugin # https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced .idea/**/markdown-navigator.xml .idea/**/markdown-navigator-enh.xml .idea/**/markdown-navigator/ # Cache file creation bug # See https://youtrack.jetbrains.com/issue/JBR-2257 .idea/$CACHE_FILE$ # CodeStream plugin # https://plugins.jetbrains.com/plugin/12206-codestream .idea/codestream.xml ### Java ### # Compiled class file *.class # Log file *.log # BlueJ files *.ctxt # Mobile Tools for Java (J2ME) .mtj.tmp/ # Package Files # *.war *.nar *.ear *.zip *.tar.gz *.rar # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml hs_err_pid* ### Maven ### target/ pom.xml.tag pom.xml.releaseBackup pom.xml.versionsBackup pom.xml.next release.properties dependency-reduced-pom.xml buildNumber.properties .mvn/timing.properties # https://github.com/takari/maven-wrapper#usage-without-binary-jar .mvn/wrapper/maven-wrapper.jar # End of https://www.toptal.com/developers/gitignore/api/java,maven,intellij,eclipse ./jreleaser-cli.jar ================================================ FILE: CITATIONS.md ================================================ # Research Citations ## Directed Feedback Arc Set **Title:** Computing a Feedback Arc Set Using PageRank **Authors:** Geladaris, Lionakis, and Tollis **Year:** 2023 **Publication:** International Symposium on Graph Drawing and Network Visualization **Links:** https://arxiv.org/abs/2208.09234 https://doi.org/10.1007/978-3-031-22203-0_14 **Summary:** The new technique produces solutions that are better than the ones produced by the best previously known heuristics, often reducing the FAS size by more than 50%. ## Directed Feedback Vertex Set **Title:** Wannabe Bounded Treewidth Graphs Admit a Polynomial Kernel for Directed Feedback Vertex Set **Authors:** Authors: Daniel Lokshtanov, Maadapuzhi-Sridharan Ramanujan, Saket Saurabh, Roohani Sharma, Meirav Zehavi **Year:** 2025 **Publication:** ACM Transactions on Computation Theory, Volume 17, Issue 1 **Links:** https://doi.org/10.1145/3711669 ## Tech Debt Prioritization **Title:** Prioritizing Design Debt Investment Opportunities **Authors:** Nico Zazworka, Carolyn Seaman, and Forrest Shull **Year:** 2011 **Publication:** MTD '11: Proceedings of the 2nd Workshop on Managing Technical Debt **Links:** https://dl.acm.org/doi/10.1145/1985362.1985372 ================================================ FILE: LICENSE ================================================ Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ================================================ FILE: README.md ================================================ # RefactorFirst This tool for Java codebases will help you identify what you should refactor first: - God Classes - Highly Coupled classes - Class Cycles (with cycle images!) It scans your Git repository generates a single page application by runing: - Cycle analysis on your source code using the [OpenRewrite](https://github.com/openrewrite/rewrite) Java parser and [JGraphT](https://jgrapht.org/) - What-if analysis to identify the most optimal relationships in a class cycle to remove - PMD's God Class Rule - PMD's Coupling Between Objects Code map viewers are powered by [3D Force Graph](https://vasturiano.github.io/3d-force-graph), [sigma.js](https://www.sigmajs.org/), and [GraphViz DOT](https://graphviz.org/docs/layouts/dot/)
If there are more than 4000 classes + relationships, a simplified 3D viewer will be used to avoid slowdowns. Features will be toggleable in the 3D UI in a future release. ## Decomposing and Removing Cycles Cycle analysis is performed with cutting-edge [Directed Feedback Vertex Set](https://dl.acm.org/doi/10.1145/3711669) and [Directed Feedback Arc Set](https://arxiv.org/abs/2208.09234) algorithms to identify the optimal classes and relationships between classes for removal to get rid of cycles in your codebase. These algorithms are powerful and will push your CPU to its limits for large codebases, though it does play nice and shouldn't slow your computer down. These graph algorithms can be used outside RefactorFirst. See [DIAGRAM.md](./graph-algorithms/src/main/java/org/hjug/feedback/vertex/kernelized/DIAGRAM.md) for the flow of the vertex kernelized algorithm. See [DIAGRAM.md](./graph-algorithms/src/main/java/org/hjug/feedback/arc/pageRank/DIAGRAM.md) for more details on the arc kernelized algorithm. ### How to understand the Relationship Removal Priority table The Relationship Removal Priority table shows the most optimal relationships to remove from your codebase to remove all cycles. The table is sorted by the number of cycles that a relationship exists in and then the change proneness of the classes in the relationship. - Classes that should be broken apart / removed from the codebase are bold. - If only one class is bold, the shared functionality should be moved to the non-bold class. - If neither class or both classes are bold, examine both classes and reassess the responsibilities of the classes and refactor to remove the relationship. Take a look at the [Spring Petclinic REST project sample report](https://rawcdn.githack.com/refactorfirst/RefactorFirst/c46d26211a91ffbe08d4089e04a85ff31eb093c0/spring-petclinic-rest-report.html)! The graphs generated in the report will look similar to this one: ![image info](./RefactorFirst_Sample_Report.png) ## Please Note: Java 11 (or newer) required to run RefactorFirst **Java 21 codebase analysis is supported!** The change to require Java 11 is needed to address vulnerability CVE-2023-4759 in JGit Please use a recent JDK release of the Java version you are using. If you use an old JDK release of your chosen Java version, you may encounter issues during analysis. ## There are several ways to run the analysis on your codebase: ### From The Command Line As an HTML Report Run the following command from the root of your project (the source code does not need to be built): ```bash mvn org.hjug.refactorfirst.plugin:refactor-first-maven-plugin:0.8.0:htmlReport ``` View the report at ```target/site/refactor-first-report.html``` ### [As Part of GitHub Actions Output](https://github.blog/news-insights/product-news/supercharging-github-actions-with-job-summaries/) This will generate a simplified HTML report (no graphs or images) as the output of a GitHub Action step ```bash mvn -B clean test \ org.hjug.refactorfirst.plugin:refactor-first-maven-plugin:0.8.0:simpleHtmlReport \ && echo "$(cat target/site/refactor-first-report.html)" >> $GITHUB_STEP_SUMMARY ``` ### As Part of a Build Add the following to your project in the build section. **showDetails** will show God Class metrics and rankings in the generated table. ```xml ... org.hjug.refactorfirst.plugin refactor-first-maven-plugin 0.8.0 false ... ``` ### As a Maven Report Add the following to your project in the reports section. A RefactorFirst report will show up in the site report when you run ```mvn site``` ```xml ... org.hjug.refactorfirst.plugin refactor-first-maven-plugin 0.8.0 ... ``` ## Configuraiton Options Care has been taken to use sensible defaults, though if you wish to override these defaults you can specify the following parameters. Specify with -D if running on the command line. e.g. ```-DbackEdgeAnalysisCount=0 `DanalyzeCycles=false``` or in the configuration section (as in the above examples) if including in a Maven build. |Option|Action| Default | |------|------|-----------------------------------------------------------| |showDetails|Shows God Class metrics| false | |backEdgeAnalysisCount|Number of back edges in a cycle to analyze.
If total number of back edges is greater than the value specified, it analyzes the number of minimum weight edges specified.
**If 0 is specified, all back edges will be analyzed**| 50 | |analyzeCycles|Analyzes the 10 largest cycles (will be configurable in the future)| true | |minifyHtml|Minifies the generated HTML report. Only available on ```htmlReport``` and ```simpleHtmlReport``` goals. May cause issues with large reports.| false | |excludeTests|Exclude test classes from analysis| true | |testSrcDirectory|Excludes classes containing this pattern from analysis| ```src/test``` and ```src\test``` | |projectName|The name of your project to be displayed on the report| Your Maven project name | |projectVersion|The version of your project to be displayed on the report| Your Maven project version | |outputDirectory|The location the project report will be written| ```${projectDir}/target/site/refactor-first-report.html``` ### Seeing Errors? If you see an error similar to ``` Execution default-site of goal org.apache.maven.plugins:maven-site-plugin:3.3:site failed: A required class was missing while executing org.apache.maven.plugins:maven-site-plugin:3.3:site: org/apache/maven/doxia/siterenderer/DocumentContent ``` you will need to add the following to your pom.xml: ```xml org.apache.maven.plugins maven-site-plugin 3.12.1 org.apache.maven.plugins maven-project-info-reports-plugin 3.4.5 ``` ## But I'm using Gradle / my project layout isn't typical! I would like to create a Gradle plugin and (possibly) support non-conventional projects in the future, but in the meantime you can create a dummy POM file in the same directory as your .git directory: ```xml 4.0.0 com.mycompany.app my-app 1.0-SNAPSHOT ``` and then (assuming Maven is installed) run ```bash mvn org.hjug.refactorfirst.plugin:refactor-first-maven-plugin:0.8.0:htmlReport ``` ## Viewing the Report View the report at ```target/site/refactor-first-report.html``` Once the plugin finishes executing (it may take a while for a large / old codebase), open the file **target/site/refactor-first-report.html** in the root of the project. It will contain a graph similar to the one above, and a table that lists God classes in the recommended order that they should be refactored. The classes in the top left of the graph are the easiest to refactor while also having the biggest positive impact to team productivity. If highly coupled classes are detected, a graph and table listing Highly Coupled Classes in will be generated. ## I have the report. Now What??? Work with your Product Owner to prioritize the technical debt that has been identified. It may help to explain it as hidden negative value that is slowing team porductivity. If you have IntelliJ Ultimate, you can install the [Method Reference Diagram](https://plugins.jetbrains.com/plugin/7996-java-method-reference-diagram) plugin to help you determine how the identified God classes and Highly Coupled classes can be refactored. ## Additional Details This plugin will work on both single module and multi-module Maven projects that have a typical Maven project layout. This tool is based on the paper **[Prioritizing Design Debt Investment Opportunities](https://dl.acm.org/doi/10.1145/1985362.1985372)** by Nico Zazworka, Carolyn Seaman, and Forrest Shull. The presentation based on the paper is available at https://resources.sei.cmu.edu/asset_files/Presentation/2011_017_001_516911.pdf ## Limitations * My time. This is a passion project and is developed in my spare time. ## Feedback and Collaboration Welcome There is still much to be done. Your feedback and collaboration would be greatly appreciated in the form of feature requests, bug submissions, and PRs. If you find this plugin useful, please star this repository and share with your friends & colleagues and on social media. ## Future Plans * Improve class cycle analysis * Add a Gradle plugin. * Incorporate Unit Test coverage metrics to quickly identify the safety of refactoring classes. * Incorporate bug counts per class to the Impact (Y-Axis) calculation. * Incorporate more disharmonies from Object Oriented Metrics In Practice (Lanza and Marinescu, 2004). ## Note: If you are a user of Version 0.1.0 or 0.1.1, you may notice that the list of God classes found by the plugin has changed. This is due to changes in PMD. # Thank You! Enjoy! ================================================ FILE: change-proneness-ranker/pom.xml ================================================ 4.0.0 org.hjug.refactorfirst refactor-first 0.8.1-SNAPSHOT org.hjug.refactorfirst.changepronenessranker change-proneness-ranker RefactorFirst Change Proneness Ranker ${project.basedir}/src/test/resources org.eclipse.jgit org.eclipse.jgit org.hjug.refactorfirst.testresources test-resources ================================================ FILE: change-proneness-ranker/src/main/java/org/hjug/git/ChangePronenessRanker.java ================================================ package org.hjug.git; import java.io.IOException; import java.util.*; import lombok.extern.slf4j.Slf4j; import org.eclipse.jgit.api.errors.GitAPIException; @Slf4j public class ChangePronenessRanker { private final TreeMap changeCountsByTimeStamps = new TreeMap<>(); private final Map cachedScmLogInfos = new HashMap<>(); public ChangePronenessRanker(GitLogReader repositoryLogReader) { try { log.info("Capturing change count based on commit timestamps"); changeCountsByTimeStamps.putAll(repositoryLogReader.captureChangeCountByCommitTimestamp()); } catch (IOException | GitAPIException e) { log.error("Error reading from repository: {}", e.getMessage()); } } public void rankChangeProneness(List scmLogInfos) { for (ScmLogInfo scmLogInfo : scmLogInfos) { if (!cachedScmLogInfos.containsKey(scmLogInfo.getPath())) { int commitsInRepositorySinceCreation = changeCountsByTimeStamps.tailMap(scmLogInfo.getEarliestCommit()).values().stream() .mapToInt(i -> i) .sum(); scmLogInfo.setChangeProneness((float) scmLogInfo.getCommitCount() / commitsInRepositorySinceCreation); cachedScmLogInfos.put(scmLogInfo.getPath(), scmLogInfo); } else { scmLogInfo.setChangeProneness( cachedScmLogInfos.get(scmLogInfo.getPath()).getChangeProneness()); } } scmLogInfos.sort(Comparator.comparing(ScmLogInfo::getChangeProneness)); int rank = 0; for (ScmLogInfo scmLogInfo : scmLogInfos) { scmLogInfo.setChangePronenessRank(++rank); } } } ================================================ FILE: change-proneness-ranker/src/main/java/org/hjug/git/GitLogReader.java ================================================ package org.hjug.git; import java.io.*; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.stream.IntStream; import lombok.extern.slf4j.Slf4j; import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.errors.GitAPIException; import org.eclipse.jgit.diff.DiffEntry; import org.eclipse.jgit.diff.DiffFormatter; import org.eclipse.jgit.lib.*; import org.eclipse.jgit.revwalk.*; import org.eclipse.jgit.storage.file.FileRepositoryBuilder; import org.eclipse.jgit.treewalk.CanonicalTreeParser; import org.eclipse.jgit.treewalk.TreeWalk; import org.eclipse.jgit.util.io.NullOutputStream; @Slf4j public class GitLogReader implements AutoCloseable { static final String JAVA_FILE_TYPE = ".java"; private Repository gitRepository; private Git git; public GitLogReader() {} public GitLogReader(File basedir) throws IOException { FileRepositoryBuilder repositoryBuilder = new FileRepositoryBuilder().findGitDir(basedir); String gitIndexFileEnvVariable = System.getenv("GIT_INDEX_FILE"); if (Objects.nonNull(gitIndexFileEnvVariable) && !gitIndexFileEnvVariable.trim().isEmpty()) { log.debug("Setting Index File based on Env Variable GIT_INDEX_FILE {}", gitIndexFileEnvVariable); repositoryBuilder = repositoryBuilder.setIndexFile(new File(gitIndexFileEnvVariable)); } git = Git.open(repositoryBuilder.getGitDir()); gitRepository = git.getRepository(); } GitLogReader(Git git) { this.git = git; gitRepository = git.getRepository(); } @Override public void close() throws Exception { git.close(); } public File getGitDir(File basedir) { FileRepositoryBuilder repositoryBuilder = new FileRepositoryBuilder().findGitDir(basedir); return repositoryBuilder.getGitDir(); } // log --follow implementation may be worth adopting in the future // https://github.com/spearce/jgit/blob/master/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/RevWalkTextBuiltin.java /** * Returns the number of commits and earliest commit for a given path * TODO: Move to a different class??? * * @param path * @return a LogInfo object * @throws GitAPIException */ public ScmLogInfo fileLog(String path) throws GitAPIException, IOException { ObjectId branchId = gitRepository.resolve("HEAD"); Iterable revCommits = git.log().add(branchId).addPath(path).call(); int commitCount = 0; int earliestCommit = Integer.MAX_VALUE; int mostRecentCommit = 0; for (RevCommit revCommit : revCommits) { int commitTime = revCommit.getCommitTime(); if (commitCount == 0) { mostRecentCommit = commitTime; } if (commitTime < earliestCommit) { earliestCommit = commitTime; } commitCount++; } if (commitCount == 0) { return new ScmLogInfo(path, null, earliestCommit, earliestCommit, commitCount); } return new ScmLogInfo(path, null, earliestCommit, mostRecentCommit, commitCount); } // based on https://stackoverflow.com/questions/27361538/how-to-show-changes-between-commits-with-jgit public TreeMap captureChangeCountByCommitTimestamp() throws IOException, GitAPIException { TreeMap changesByCommitTimestamp = new TreeMap<>(); ObjectId branchId = gitRepository.resolve("HEAD"); List commitList = new ArrayList<>(); git.log().add(branchId).call().forEach(commitList::add); if (commitList.isEmpty()) { return changesByCommitTimestamp; } // Handle first / initial commit changesByCommitTimestamp.putAll(walkFirstCommit(commitList.get(commitList.size() - 1))); if (commitList.size() < 2) { return changesByCommitTimestamp; } // Process adjacent commit pairs in parallel; each pair is independent ConcurrentMap concurrentResults = new ConcurrentHashMap<>(); IntStream.range(0, commitList.size() - 1).parallel().forEach(i -> { RevCommit newer = commitList.get(i); RevCommit older = commitList.get(i + 1); try { int count = 0; for (DiffEntry entry : getDiffEntries(newer, older)) { if (entry.getNewPath().endsWith(JAVA_FILE_TYPE) || entry.getOldPath().endsWith(JAVA_FILE_TYPE)) { count++; } } if (count > 0) { concurrentResults.put(newer.getCommitTime(), count); } } catch (IOException e) { log.error("Error getting diff entries: {}", e.getMessage()); } }); changesByCommitTimestamp.putAll(concurrentResults); return changesByCommitTimestamp; } private List getDiffEntries(RevCommit newCommit, RevCommit oldCommit) throws IOException { try (ObjectReader reader = gitRepository.newObjectReader(); DiffFormatter df = new DiffFormatter(NullOutputStream.INSTANCE)) { df.setRepository(gitRepository); CanonicalTreeParser oldTreeIter = new CanonicalTreeParser(); oldTreeIter.reset(reader, newCommit.getTree()); CanonicalTreeParser newTreeIter = new CanonicalTreeParser(); newTreeIter.reset(reader, oldCommit.getTree()); return df.scan(oldTreeIter, newTreeIter); } } Map walkFirstCommit(RevCommit firstCommit) throws IOException { Map changesByCommitTimestamp = new TreeMap<>(); int firstCommitCount = 0; ObjectId treeId = firstCommit.getTree(); try (TreeWalk treeWalk = new TreeWalk(gitRepository)) { treeWalk.setRecursive(false); treeWalk.reset(treeId); while (treeWalk.next()) { if (treeWalk.isSubtree()) { treeWalk.enterSubtree(); } else { if (treeWalk.getPathString().endsWith(JAVA_FILE_TYPE)) { firstCommitCount++; } } } } if (firstCommitCount > 0) { changesByCommitTimestamp.put(firstCommit.getCommitTime(), firstCommitCount); } return changesByCommitTimestamp; } } ================================================ FILE: change-proneness-ranker/src/main/java/org/hjug/git/ScmLogInfo.java ================================================ package org.hjug.git; import lombok.Data; @Data public class ScmLogInfo { private String path; private String className; private int earliestCommit; private int mostRecentCommit; private int commitCount; private float changeProneness; private int changePronenessRank; public ScmLogInfo(String path, String className, int earliestCommit, int mostRecentCommit, int commitCount) { this.path = path; this.className = className; this.earliestCommit = earliestCommit; this.mostRecentCommit = mostRecentCommit; this.commitCount = commitCount; } } ================================================ FILE: change-proneness-ranker/src/test/java/org/hjug/git/ChangePronenessRankerTest.java ================================================ package org.hjug.git; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.io.IOException; import java.util.*; import org.eclipse.jgit.api.errors.GitAPIException; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; class ChangePronenessRankerTest { private ChangePronenessRanker changePronenessRanker; private GitLogReader repositoryLogReader; @BeforeEach public void setUp() { repositoryLogReader = mock(GitLogReader.class); } // TODO: this should probably be a cucumber test @Test void testChangePronenessCalculation() throws IOException, GitAPIException { ScmLogInfo scmLogInfo = new ScmLogInfo("path", null, 1595275997, 0, 1); TreeMap commitsWithChangeCounts = new TreeMap<>(); commitsWithChangeCounts.put(scmLogInfo.getEarliestCommit(), scmLogInfo.getCommitCount()); commitsWithChangeCounts.put(scmLogInfo.getEarliestCommit() + 5 * 60, 3); commitsWithChangeCounts.put(scmLogInfo.getEarliestCommit() + 10 * 60, 3); when(repositoryLogReader.captureChangeCountByCommitTimestamp()).thenReturn(commitsWithChangeCounts); changePronenessRanker = new ChangePronenessRanker(repositoryLogReader); List scmLogInfos = new ArrayList<>(); scmLogInfos.add(scmLogInfo); changePronenessRanker.rankChangeProneness(scmLogInfos); // 1 commit of a class we're interested in, 6 commits of other files after it Assertions.assertEquals((float) 1 / 7, scmLogInfo.getChangeProneness(), 0.1); } @Test void testRankChangeProneness() throws IOException, GitAPIException { // more recent commit ScmLogInfo newerCommit = new ScmLogInfo("file1", null, 1595275997, 0, 1); TreeMap commitsWithChangeCounts = new TreeMap<>(); commitsWithChangeCounts.put(newerCommit.getEarliestCommit(), newerCommit.getCommitCount()); commitsWithChangeCounts.put(newerCommit.getEarliestCommit() + 5 * 60, 3); commitsWithChangeCounts.put(newerCommit.getEarliestCommit() + 10 * 60, 3); // older commit ScmLogInfo olderCommit = new ScmLogInfo("file2", null, 1595175997, 0, 1); commitsWithChangeCounts.put(olderCommit.getEarliestCommit(), olderCommit.getCommitCount()); commitsWithChangeCounts.put(olderCommit.getEarliestCommit() + 5 * 60, 5); commitsWithChangeCounts.put(olderCommit.getEarliestCommit() + 10 * 60, 5); when(repositoryLogReader.captureChangeCountByCommitTimestamp()).thenReturn(commitsWithChangeCounts); changePronenessRanker = new ChangePronenessRanker(repositoryLogReader); List scmLogInfos = new ArrayList<>(); scmLogInfos.add(newerCommit); scmLogInfos.add(olderCommit); changePronenessRanker.rankChangeProneness(scmLogInfos); // ranks higher since fewer commits since initial commit Assertions.assertEquals(2, newerCommit.getChangePronenessRank()); // ranks lower since there have been more commits since initial commit Assertions.assertEquals(1, olderCommit.getChangePronenessRank()); } } ================================================ FILE: change-proneness-ranker/src/test/java/org/hjug/git/GitLogReaderTest.java ================================================ package org.hjug.git; import static java.nio.charset.StandardCharsets.UTF_8; import java.io.*; import java.util.*; import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.errors.GitAPIException; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.revwalk.RevCommit; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; public class GitLogReaderTest { // Borrowed bits and pieces from // https://gist.github.com/rherrmann/0c682ea327862cb6847704acf90b1d5d @TempDir public File tempFolder; private Git git; private Repository repository; @BeforeEach public void setUp() throws GitAPIException { git = Git.init().setDirectory(tempFolder).call(); repository = git.getRepository(); } @AfterEach public void tearDown() { repository.close(); } @Test void testFileLog() throws IOException, GitAPIException, InterruptedException { // This path works when referencing the full Tobago repository // String filePath = "tobago-core/src/main/java/org/apache/myfaces/tobago/facelets/AttributeHandler.java"; GitLogReader gitLogReader = new GitLogReader(git); String attributeHandler = "AttributeHandler.java"; InputStream resourceAsStream = getClass().getClassLoader().getResourceAsStream(attributeHandler); writeFile(attributeHandler, convertInputStreamToString(resourceAsStream)); git.add().addFilepattern(".").call(); RevCommit firstCommit = git.commit().setMessage("message").call(); // Sleeping for one second to guarantee commits have different time stamps Thread.sleep(1000); // write contents of updated file to original file InputStream resourceAsStream2 = getClass().getClassLoader().getResourceAsStream("AttributeHandler2.java"); writeFile(attributeHandler, convertInputStreamToString(resourceAsStream2)); git.add().addFilepattern(".").call(); RevCommit secondCommit = git.commit().setMessage("message").call(); ScmLogInfo scmLogInfo = gitLogReader.fileLog(attributeHandler); Assertions.assertEquals(2, scmLogInfo.getCommitCount()); Assertions.assertEquals(firstCommit.getCommitTime(), scmLogInfo.getEarliestCommit()); Assertions.assertEquals(secondCommit.getCommitTime(), scmLogInfo.getMostRecentCommit()); } @Test void testWalkFirstCommit() throws IOException, GitAPIException { GitLogReader gitLogReader = new GitLogReader(git); String attributeHandler = "AttributeHandler.java"; InputStream resourceAsStream = getClass().getClassLoader().getResourceAsStream(attributeHandler); writeFile(attributeHandler, convertInputStreamToString(resourceAsStream)); git.add().addFilepattern(".").call(); RevCommit commit = git.commit().setMessage("message").call(); Map result = gitLogReader.walkFirstCommit(commit); Assertions.assertTrue(result.containsKey(commit.getCommitTime())); Assertions.assertEquals(1, result.get(commit.getCommitTime()).intValue()); } @Test void testCaptureChangCountByCommitTimestamp() throws Exception { GitLogReader gitLogReader = new GitLogReader(git); String attributeHandler = "AttributeHandler.java"; InputStream resourceAsStream = getClass().getClassLoader().getResourceAsStream(attributeHandler); writeFile(attributeHandler, convertInputStreamToString(resourceAsStream)); git.add().addFilepattern(".").call(); RevCommit firstCommit = git.commit().setMessage("message").call(); // Sleeping for one second to guarantee commits have different time stamps Thread.sleep(1000); // write contents of updated file to original file InputStream resourceAsStream2 = getClass().getClassLoader().getResourceAsStream("AttributeHandler2.java"); writeFile(attributeHandler, convertInputStreamToString(resourceAsStream2)); InputStream resourceAsStream3 = getClass().getClassLoader().getResourceAsStream("Attributes.java"); writeFile("Attributes.java", convertInputStreamToString(resourceAsStream3)); git.add().addFilepattern(".").call(); RevCommit secondCommit = git.commit().setMessage("message").call(); Map commitCounts = gitLogReader.captureChangeCountByCommitTimestamp(); Assertions.assertEquals(1, commitCounts.get(firstCommit.getCommitTime()).intValue()); Assertions.assertEquals( 2, commitCounts.get(secondCommit.getCommitTime()).intValue()); } private void writeFile(String name, String content) throws IOException { File file = new File(git.getRepository().getWorkTree(), name); try (FileOutputStream outputStream = new FileOutputStream(file)) { outputStream.write(content.getBytes(UTF_8)); } } private String convertInputStreamToString(InputStream inputStream) throws IOException { ByteArrayOutputStream result = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; int length; while ((length = inputStream.read(buffer)) != -1) { result.write(buffer, 0, length); } return result.toString("UTF-8"); } } ================================================ FILE: cli/.gitignore ================================================ target/ !.mvn/wrapper/maven-wrapper.jar !**/src/main/**/target/ !**/src/test/**/target/ ### IntelliJ IDEA ### .idea/modules.xml .idea/jarRepositories.xml .idea/compiler.xml .idea/libraries/ *.iws *.iml *.ipr ### Eclipse ### .apt_generated .classpath .factorypath .project .settings .springBeans .sts4-cache ### NetBeans ### /nbproject/private/ /nbbuild/ /dist/ /nbdist/ /.nb-gradle/ build/ !**/src/main/**/build/ !**/src/test/**/build/ ### VS Code ### .vscode/ ### Mac OS ### .DS_Store ================================================ FILE: cli/pom.xml ================================================ 4.0.0 org.hjug.refactorfirst refactor-first 0.8.1-SNAPSHOT jar org.hjug.refactorfirst.report cli RefactorFirst CLI 11 11 UTF-8 info.picocli picocli 4.7.4 org.hjug.refactorfirst.report report org.slf4j slf4j-simple com.google.guava guava org.apache.maven maven-core org.apache.maven.plugins maven-shade-plugin 3.5.0 package shade org.hjug.refactorfirst.Main *:* META-INF/*.SF META-INF/*.DSA META-INF/*.RSA org.skife.maven really-executable-jar-maven-plugin 2.1.1 rf package really-executable-jar ================================================ FILE: cli/src/main/java/org/hjug/refactorfirst/Main.java ================================================ package org.hjug.refactorfirst; import picocli.CommandLine; public class Main { public static void main(String[] args) { int exitCode = new CommandLine(new ReportCommand()) .setCaseInsensitiveEnumValuesAllowed(true) .execute(args); System.exit(exitCode); } } ================================================ FILE: cli/src/main/java/org/hjug/refactorfirst/ReportCommand.java ================================================ package org.hjug.refactorfirst; import static picocli.CommandLine.Option; import java.io.File; import java.io.FileReader; import java.util.concurrent.Callable; import lombok.extern.slf4j.Slf4j; import org.apache.maven.model.Model; import org.apache.maven.model.io.xpp3.MavenXpp3Reader; import org.apache.maven.project.MavenProject; import org.hjug.refactorfirst.report.CsvReport; import org.hjug.refactorfirst.report.HtmlReport; import org.hjug.refactorfirst.report.SimpleHtmlReport; import org.hjug.refactorfirst.report.json.JsonReportExecutor; import picocli.CommandLine.Command; @Command(mixinStandardHelpOptions = true, description = "Generate a report") @Slf4j public class ReportCommand implements Callable { @Option( names = {"-d", "--details"}, defaultValue = "false", description = "Show detailed report") private boolean showDetails; @Option( names = {"-eac", "--edge-analysis-count"}, defaultValue = "50", description = "Back Edge Analysis Count") protected int backEdgeAnalysisCount; @Option( names = {"-c", "--analyze-cycles"}, defaultValue = "true", description = "Analyze Cycles") private boolean analyzeCycles; @Option( names = {"-m", "--minify-html"}, defaultValue = "false", description = "Minify HTML output") private boolean minifiyHtml; @Option( names = {"-xt", "--exclude-tests"}, defaultValue = "true", description = "Exclude tests from analysis") private boolean excludeTests; /** * The test source directory containing test class sources. */ @Option( names = {"-tsd", "--output"}, description = "Test source directory. Defaults to test/src or test\\src based on your OS. Default is intentionally generic.") private String testSourceDirectory; @Option( names = {"-p", "--project"}, description = "Project name") private String projectName; @Option( names = {"-v", "--version"}, description = "Project version") private String projectVersion; @Option( names = {"-o", "--output"}, defaultValue = ".", description = "Output directory") private String outputDirectory; @Option( names = {"-b", "--base-dir"}, defaultValue = ".", description = "Base directory of the project") private File baseDir; @Option( names = {"-t", "--type"}, description = "Report type: ${COMPLETION-CANDIDATES}", defaultValue = "HTML") private ReportType reportType; @Override public Integer call() { // TODO: add support for inferring arguments from gradle properties inferArgumentsFromMavenProject(); populateDefaultArguments(); switch (reportType) { case SIMPLE_HTML: SimpleHtmlReport simpleHtmlReport = new SimpleHtmlReport(); simpleHtmlReport.execute( backEdgeAnalysisCount, analyzeCycles, showDetails, minifiyHtml, excludeTests, testSourceDirectory, projectName, projectVersion, baseDir, outputDirectory); return 0; case HTML: HtmlReport htmlReport = new HtmlReport(); htmlReport.execute( backEdgeAnalysisCount, analyzeCycles, showDetails, minifiyHtml, excludeTests, testSourceDirectory, projectName, projectVersion, baseDir, outputDirectory); return 0; case JSON: JsonReportExecutor jsonReportExecutor = new JsonReportExecutor(); jsonReportExecutor.execute(baseDir, outputDirectory); return 0; case CSV: CsvReport csvReport = new CsvReport(); csvReport.execute(showDetails, projectName, projectVersion, outputDirectory, baseDir); return 0; } return 0; } private void populateDefaultArguments() { if (projectName == null || projectName.isEmpty()) { projectName = "my-project"; } if (projectVersion == null || projectVersion.isEmpty()) { projectVersion = "0.0.0"; } } private void inferArgumentsFromMavenProject() { if (baseDir.isDirectory()) { File[] potentialPomFiles = baseDir.listFiles(f -> f.getName().equals("pom.xml")); File pomFile = null; if (potentialPomFiles != null && potentialPomFiles.length > 0) { pomFile = potentialPomFiles[0]; } if (pomFile != null) { Model model; FileReader reader; MavenXpp3Reader mavenreader = new MavenXpp3Reader(); try { reader = new FileReader(pomFile); model = mavenreader.read(reader); model.setPomFile(pomFile); } catch (Exception ex) { log.info("Unable to infer arguments from pom file"); return; } MavenProject project = new MavenProject(model); // only override project name and version if they are not set if (projectName == null || projectName.isEmpty()) { projectName = project.getName(); } if (projectVersion == null || projectVersion.isEmpty()) { projectVersion = project.getVersion(); } } } } } ================================================ FILE: cli/src/main/java/org/hjug/refactorfirst/ReportType.java ================================================ package org.hjug.refactorfirst; public enum ReportType { SIMPLE_HTML, HTML, JSON, CSV; } ================================================ FILE: codebase-graph-builder/pom.xml ================================================ 4.0.0 org.hjug.refactorfirst refactor-first 0.8.1-SNAPSHOT org.hjug.refactorfirst.codebasegraphbuilder codebase-graph-builder RefactorFirst Codebase Graph Builder org.openrewrite.recipe rewrite-recipe-bom 3.4.0 pom import org.slf4j slf4j-api org.jgrapht jgrapht-core org.openrewrite rewrite-java-21 org.openrewrite rewrite-java-17 org.openrewrite rewrite-java-11 org.openrewrite rewrite-java io.quarkus.gizmo gizmo 1.9.0 org.openrewrite rewrite-core ================================================ FILE: codebase-graph-builder/src/main/java/org/hjug/graphbuilder/CodebaseGraphDTO.java ================================================ package org.hjug.graphbuilder; import java.util.Map; import lombok.Data; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultWeightedEdge; @Data public class CodebaseGraphDTO { private final Graph classReferencesGraph; private final Graph packageReferencesGraph; // used for looking up files where classes reside private final Map classToSourceFilePathMapping; } ================================================ FILE: codebase-graph-builder/src/main/java/org/hjug/graphbuilder/DependencyCollector.java ================================================ package org.hjug.graphbuilder; public interface DependencyCollector { /** * Records a dependency from one class to another * * @param fromClassFqn The fully qualified name of the class that depends on another * @param toClassFqn The fully qualified name of the class being depended upon */ void addClassDependency(String fromClassFqn, String toClassFqn); /** * Records a dependency from one package to another * * @param fromPackageName The package that depends on another * @param toPackageName The package being depended upon */ void addPackageDependency(String fromPackageName, String toPackageName); /** * Records the source file location for a class * * @param classFqn The fully qualified name of the class * @param sourceFilePath The path to the source file containing the class */ void recordClassLocation(String classFqn, String sourceFilePath); /** * Registers a package as being part of the codebase * * @param packageName The package name to register */ void registerPackage(String packageName); } ================================================ FILE: codebase-graph-builder/src/main/java/org/hjug/graphbuilder/GraphBuilderConfig.java ================================================ package org.hjug.graphbuilder; import lombok.Builder; import lombok.Value; @Value @Builder public class GraphBuilderConfig { @Builder.Default boolean excludeTests = true; @Builder.Default String testSourceDirectory = "src/test"; public static GraphBuilderConfig defaultConfig() { return GraphBuilderConfig.builder().build(); } } ================================================ FILE: codebase-graph-builder/src/main/java/org/hjug/graphbuilder/GraphDependencyCollector.java ================================================ package org.hjug.graphbuilder; import java.util.HashSet; import java.util.Set; import lombok.Getter; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultWeightedEdge; public class GraphDependencyCollector implements DependencyCollector { @Getter private final Graph classReferencesGraph; @Getter private final Graph packageReferencesGraph; @Getter private final Set packagesInCodebase = new HashSet<>(); public GraphDependencyCollector( Graph classReferencesGraph, Graph packageReferencesGraph) { this.classReferencesGraph = classReferencesGraph; this.packageReferencesGraph = packageReferencesGraph; } @Override public void addClassDependency(String fromClassFqn, String toClassFqn) { if (fromClassFqn.equals(toClassFqn)) { return; } classReferencesGraph.addVertex(fromClassFqn); classReferencesGraph.addVertex(toClassFqn); if (!classReferencesGraph.containsEdge(fromClassFqn, toClassFqn)) { classReferencesGraph.addEdge(fromClassFqn, toClassFqn); } else { DefaultWeightedEdge edge = classReferencesGraph.getEdge(fromClassFqn, toClassFqn); classReferencesGraph.setEdgeWeight(edge, classReferencesGraph.getEdgeWeight(edge) + 1); } } @Override public void addPackageDependency(String fromPackageName, String toPackageName) { if (fromPackageName.equals(toPackageName)) { return; } packageReferencesGraph.addVertex(fromPackageName); packageReferencesGraph.addVertex(toPackageName); if (!packageReferencesGraph.containsEdge(fromPackageName, toPackageName)) { packageReferencesGraph.addEdge(fromPackageName, toPackageName); } else { DefaultWeightedEdge edge = packageReferencesGraph.getEdge(fromPackageName, toPackageName); packageReferencesGraph.setEdgeWeight(edge, packageReferencesGraph.getEdgeWeight(edge) + 1); } } @Override public void recordClassLocation(String classFqn, String sourceFilePath) { // This will be handled by JavaVisitor which maintains the mapping } @Override public void registerPackage(String packageName) { packagesInCodebase.add(packageName); } } ================================================ FILE: codebase-graph-builder/src/main/java/org/hjug/graphbuilder/JavaGraphBuilder.java ================================================ package org.hjug.graphbuilder; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.*; import java.util.stream.Collectors; import java.util.stream.Stream; import lombok.extern.slf4j.Slf4j; import org.hjug.graphbuilder.visitor.JavaMethodDeclarationVisitor; import org.hjug.graphbuilder.visitor.JavaVariableTypeVisitor; import org.hjug.graphbuilder.visitor.JavaVisitor; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultDirectedWeightedGraph; import org.jgrapht.graph.DefaultWeightedEdge; import org.openrewrite.ExecutionContext; import org.openrewrite.InMemoryExecutionContext; import org.openrewrite.java.JavaParser; @Slf4j public class JavaGraphBuilder { /** * Given a java source directory, return a CodebaseGraphDTO using default configuration * * @param srcDirectory The source directory to analyze * @param excludeTests Whether to exclude test files * @param testSourceDirectory The test source directory pattern to exclude * @return CodebaseGraphDTO * @throws IOException */ public CodebaseGraphDTO getCodebaseGraphDTO(String srcDirectory, boolean excludeTests, String testSourceDirectory) throws IOException { GraphBuilderConfig config = GraphBuilderConfig.builder() .excludeTests(excludeTests) .testSourceDirectory(testSourceDirectory) .build(); return getCodebaseGraphDTO(srcDirectory, config); } /** * Given a java source directory and configuration, return a CodebaseGraphDTO * * @param srcDirectory The source directory to analyze * @param config The configuration for the graph builder * @return CodebaseGraphDTO * @throws IOException */ public CodebaseGraphDTO getCodebaseGraphDTO(String srcDirectory, GraphBuilderConfig config) throws IOException { if (srcDirectory == null || srcDirectory.isEmpty()) { throw new IllegalArgumentException("Source directory cannot be null or empty"); } return processWithOpenRewrite(srcDirectory, config); } private CodebaseGraphDTO processWithOpenRewrite(String srcDir, GraphBuilderConfig config) throws IOException { File srcDirectory = new File(srcDir); JavaParser javaParser = JavaParser.fromJavaVersion().build(); ExecutionContext ctx = new InMemoryExecutionContext(Throwable::printStackTrace); final Graph classReferencesGraph = new DefaultDirectedWeightedGraph<>(DefaultWeightedEdge.class); final Graph packageReferencesGraph = new DefaultDirectedWeightedGraph<>(DefaultWeightedEdge.class); final GraphDependencyCollector dependencyCollector = new GraphDependencyCollector(classReferencesGraph, packageReferencesGraph); final JavaVisitor javaVisitor = new JavaVisitor<>(dependencyCollector); final JavaVariableTypeVisitor javaVariableTypeVisitor = new JavaVariableTypeVisitor<>(dependencyCollector); final JavaMethodDeclarationVisitor javaMethodDeclarationVisitor = new JavaMethodDeclarationVisitor<>(dependencyCollector); try (Stream pathStream = Files.walk(Paths.get(srcDirectory.getAbsolutePath()))) { List list; if (config.isExcludeTests()) { list = pathStream .filter(file -> !file.toString().contains(config.getTestSourceDirectory())) .collect(Collectors.toList()); } else { list = pathStream.collect(Collectors.toList()); } javaParser .parse(list, Paths.get(srcDirectory.getAbsolutePath()), ctx) .forEach(cu -> { javaVisitor.visit(cu, ctx); javaVariableTypeVisitor.visit(cu, ctx); javaMethodDeclarationVisitor.visit(cu, ctx); }); } removeClassesNotInCodebase(dependencyCollector.getPackagesInCodebase(), classReferencesGraph); return new CodebaseGraphDTO( classReferencesGraph, packageReferencesGraph, javaVisitor.getClassToSourceFilePathMapping()); } // remove node if package not in codebase void removeClassesNotInCodebase( Set packagesInCodebase, Graph classReferencesGraph) { // collect nodes to remove Set classesToRemove = new HashSet<>(); for (String classFqn : classReferencesGraph.vertexSet()) { if (!packagesInCodebase.contains(getPackage(classFqn))) { classesToRemove.add(classFqn); } } classReferencesGraph.removeAllVertices(classesToRemove); } String getPackage(String fqn) { // handle no package if (!fqn.contains(".")) { return ""; } int lastIndex = fqn.lastIndexOf("."); return fqn.substring(0, lastIndex); } } ================================================ FILE: codebase-graph-builder/src/main/java/org/hjug/graphbuilder/visitor/BaseCodebaseVisitor.java ================================================ package org.hjug.graphbuilder.visitor; import lombok.Getter; import org.hjug.graphbuilder.DependencyCollector; import org.openrewrite.java.JavaIsoVisitor; @Getter public abstract class BaseCodebaseVisitor

extends JavaIsoVisitor

{ protected final DependencyCollector dependencyCollector; protected BaseCodebaseVisitor(DependencyCollector dependencyCollector) { this.dependencyCollector = dependencyCollector; } protected abstract String getCurrentOwnerFqn(); } ================================================ FILE: codebase-graph-builder/src/main/java/org/hjug/graphbuilder/visitor/BaseTypeProcessor.java ================================================ package org.hjug.graphbuilder.visitor; import lombok.extern.slf4j.Slf4j; import org.hjug.graphbuilder.DependencyCollector; import org.openrewrite.Cursor; import org.openrewrite.java.service.AnnotationService; import org.openrewrite.java.tree.Expression; import org.openrewrite.java.tree.J; import org.openrewrite.java.tree.JavaType; import org.openrewrite.java.tree.TypeTree; @Slf4j public abstract class BaseTypeProcessor { private final TypeDependencyExtractor typeDependencyExtractor = new TypeDependencyExtractor(); protected abstract DependencyCollector getDependencyCollector(); protected void processType(String ownerFqn, JavaType javaType) { if (javaType == null || javaType instanceof JavaType.Unknown) { return; } for (String dependency : typeDependencyExtractor.extractDependencies(javaType)) { getDependencyCollector().addClassDependency(ownerFqn, dependency); } } protected void processAnnotation(String ownerFqn, J.Annotation annotation, Cursor cursor) { if (annotation.getType() instanceof JavaType.Unknown) { return; } JavaType.Class type = (JavaType.Class) annotation.getType(); if (null != type) { String annotationFqn = type.getFullyQualifiedName(); log.debug("Variable Annotation FQN: {}", annotationFqn); getDependencyCollector().addClassDependency(ownerFqn, annotationFqn); if (null != annotation.getArguments()) { for (Expression argument : annotation.getArguments()) { processType(ownerFqn, argument.getType()); } } } } protected void processTypeParameter(String ownerFqn, J.TypeParameter typeParameter, Cursor cursor) { if (null != typeParameter.getBounds()) { for (TypeTree bound : typeParameter.getBounds()) { processType(ownerFqn, bound.getType()); } } if (!typeParameter.getAnnotations().isEmpty()) { for (J.Annotation annotation : typeParameter.getAnnotations()) { processAnnotation(ownerFqn, annotation, cursor); } } } protected void processAnnotations(String ownerFqn, Cursor cursor) { AnnotationService annotationService = new AnnotationService(); for (J.Annotation annotation : annotationService.getAllAnnotations(cursor)) { processAnnotation(ownerFqn, annotation, cursor); } } protected String getPackageFromFqn(String fqn) { if (!fqn.contains(".")) { return ""; } int lastIndex = fqn.lastIndexOf("."); return fqn.substring(0, lastIndex); } } ================================================ FILE: codebase-graph-builder/src/main/java/org/hjug/graphbuilder/visitor/FqnCapturingProcessor.java ================================================ package org.hjug.graphbuilder.visitor; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import org.openrewrite.java.tree.J; public interface FqnCapturingProcessor { default J.ClassDeclaration captureClassDeclarations( J.ClassDeclaration classDecl, Map> fqns) { // get class fqn (including "$") String fqn = classDecl.getType().getFullyQualifiedName(); String currentPackage = getPackage(fqn); String className = getClassName(fqn); Map classesInPackage = fqns.getOrDefault(currentPackage, new HashMap<>()); if (className.contains("$")) { String normalizedClassName = className.replace('$', '.'); List parts = Arrays.asList(normalizedClassName.split("\\.")); for (int i = 0; i < parts.size(); i++) { String key = String.join(".", parts.subList(i, parts.size())); classesInPackage.put(key, currentPackage + "." + normalizedClassName); } } else { classesInPackage.put(className, fqn); } fqns.put(currentPackage, classesInPackage); return classDecl; } default String getPackage(String fqn) { // handle no package if (!fqn.contains(".")) { return ""; } int lastIndex = fqn.lastIndexOf("."); return fqn.substring(0, lastIndex); } /** * * @param fqn * @return Class name (including "$") after last period in FQN */ default String getClassName(String fqn) { // handle no package if (!fqn.contains(".")) { return fqn; } int lastIndex = fqn.lastIndexOf("."); return fqn.substring(lastIndex + 1); } } ================================================ FILE: codebase-graph-builder/src/main/java/org/hjug/graphbuilder/visitor/JavaClassDeclarationVisitor.java ================================================ package org.hjug.graphbuilder.visitor; import java.util.List; import lombok.extern.slf4j.Slf4j; import org.hjug.graphbuilder.DependencyCollector; import org.openrewrite.java.tree.*; @Slf4j public class JavaClassDeclarationVisitor

extends BaseCodebaseVisitor

{ private final BaseTypeProcessor typeProcessor; private String currentOwnerFqn; public JavaClassDeclarationVisitor(DependencyCollector dependencyCollector) { super(dependencyCollector); this.typeProcessor = new BaseTypeProcessor() { @Override protected DependencyCollector getDependencyCollector() { return dependencyCollector; } }; } @Override public J.ClassDeclaration visitClassDeclaration(J.ClassDeclaration classDecl, P p) { JavaType.FullyQualified type = classDecl.getType(); if (type == null) { log.warn("ClassDeclaration has null type, skipping: {}", classDecl.getSimpleName()); return classDecl; } String owningFqn = type.getFullyQualifiedName(); String previousOwner = currentOwnerFqn; currentOwnerFqn = owningFqn; try { typeProcessor.processType(owningFqn, type); TypeTree extendsTypeTree = classDecl.getExtends(); if (null != extendsTypeTree) { typeProcessor.processType(owningFqn, extendsTypeTree.getType()); } List implementsTypeTree = classDecl.getImplements(); if (null != implementsTypeTree) { for (TypeTree typeTree : implementsTypeTree) { typeProcessor.processType(owningFqn, typeTree.getType()); } } for (J.Annotation leadingAnnotation : classDecl.getLeadingAnnotations()) { typeProcessor.processAnnotation(owningFqn, leadingAnnotation, getCursor()); } if (null != classDecl.getTypeParameters()) { for (J.TypeParameter typeParameter : classDecl.getTypeParameters()) { typeProcessor.processTypeParameter(owningFqn, typeParameter, getCursor()); } } return super.visitClassDeclaration(classDecl, p); } finally { currentOwnerFqn = previousOwner; } } @Override public J.MethodInvocation visitMethodInvocation(J.MethodInvocation method, P p) { J.MethodInvocation methodInvocation = super.visitMethodInvocation(method, p); if (currentOwnerFqn == null) { return methodInvocation; } JavaType.Method methodType = methodInvocation.getMethodType(); if (null != methodType && null != methodType.getDeclaringType()) { typeProcessor.processType(currentOwnerFqn, methodType.getDeclaringType()); } if (null != methodInvocation.getTypeParameters() && !methodInvocation.getTypeParameters().isEmpty()) { for (Expression typeParameter : methodInvocation.getTypeParameters()) { typeProcessor.processType(currentOwnerFqn, typeParameter.getType()); } } return methodInvocation; } @Override public J.NewClass visitNewClass(J.NewClass newClass, P p) { J.NewClass result = super.visitNewClass(newClass, p); if (currentOwnerFqn != null) { typeProcessor.processType(currentOwnerFqn, newClass.getType()); } return result; } @Override public J.Lambda visitLambda(J.Lambda lambda, P p) { if (currentOwnerFqn != null && lambda.getType() != null) { typeProcessor.processType(currentOwnerFqn, lambda.getType()); } // Recursively visit the lambda body to capture method invocations and type references // The super.visitLambda call will traverse into the lambda's body and parameters return super.visitLambda(lambda, p); } @Override public J.If visitIf(J.If iff, P p) { return super.visitIf(iff, p); } @Override public J.ForLoop visitForLoop(J.ForLoop forLoop, P p) { return super.visitForLoop(forLoop, p); } @Override public J.ForEachLoop visitForEachLoop(J.ForEachLoop forEachLoop, P p) { return super.visitForEachLoop(forEachLoop, p); } @Override public J.WhileLoop visitWhileLoop(J.WhileLoop whileLoop, P p) { return super.visitWhileLoop(whileLoop, p); } @Override public J.DoWhileLoop visitDoWhileLoop(J.DoWhileLoop doWhileLoop, P p) { return super.visitDoWhileLoop(doWhileLoop, p); } @Override public J.Switch visitSwitch(J.Switch switchStatement, P p) { return super.visitSwitch(switchStatement, p); } @Override public J.Try visitTry(J.Try tryStatement, P p) { J.Try result = super.visitTry(tryStatement, p); if (currentOwnerFqn != null && tryStatement.getCatches() != null) { for (J.Try.Catch catchClause : tryStatement.getCatches()) { if (catchClause.getParameter().getTree() instanceof J.VariableDeclarations) { J.VariableDeclarations varDecl = (J.VariableDeclarations) catchClause.getParameter().getTree(); if (varDecl.getTypeExpression() != null) { typeProcessor.processType( currentOwnerFqn, varDecl.getTypeExpression().getType()); } } } } return result; } @Override public J.InstanceOf visitInstanceOf(J.InstanceOf instanceOf, P p) { J.InstanceOf result = super.visitInstanceOf(instanceOf, p); if (currentOwnerFqn != null && instanceOf.getClazz() != null && instanceOf.getClazz() instanceof TypeTree) { typeProcessor.processType(currentOwnerFqn, ((TypeTree) instanceOf.getClazz()).getType()); } return result; } @Override public J.TypeCast visitTypeCast(J.TypeCast typeCast, P p) { J.TypeCast result = super.visitTypeCast(typeCast, p); if (currentOwnerFqn != null && typeCast.getClazz() != null) { typeProcessor.processType( currentOwnerFqn, typeCast.getClazz().getTree().getType()); } return result; } @Override public J.MemberReference visitMemberReference(J.MemberReference memberRef, P p) { J.MemberReference result = super.visitMemberReference(memberRef, p); if (currentOwnerFqn != null && memberRef.getType() != null) { typeProcessor.processType(currentOwnerFqn, memberRef.getType()); } return result; } @Override public J.NewArray visitNewArray(J.NewArray newArray, P p) { J.NewArray result = super.visitNewArray(newArray, p); if (currentOwnerFqn != null && newArray.getType() != null) { typeProcessor.processType(currentOwnerFqn, newArray.getType()); } return result; } @Override protected String getCurrentOwnerFqn() { return currentOwnerFqn; } } ================================================ FILE: codebase-graph-builder/src/main/java/org/hjug/graphbuilder/visitor/JavaFqnCapturingVisitor.java ================================================ package org.hjug.graphbuilder.visitor; import java.util.*; import lombok.Getter; import org.openrewrite.java.JavaIsoVisitor; import org.openrewrite.java.tree.J; /** * Captures Fully Qualified Names (FQN) of classes as they will be imported in import statements. * fqns map that is populated by this visitor is used to resolve Generic types. * * @param

*/ @Getter public class JavaFqnCapturingVisitor

extends JavaIsoVisitor

{ // consider using ConcurrentHashMap to scale performance // package -> name, FQN private final Map> fqnMap = new HashMap<>(); private final Set fqns = new HashSet<>(); @Override public J.ClassDeclaration visitClassDeclaration(J.ClassDeclaration classDecl, P p) { captureClassDeclarations(classDecl, fqnMap); return classDecl; } J.ClassDeclaration captureClassDeclarations(J.ClassDeclaration classDecl, Map> fqnMap) { String fqn = classDecl.getType().getFullyQualifiedName(); fqns.add(fqn); return classDecl; } String getPackage(String fqn) { // handle no package if (!fqn.contains(".")) { return ""; } int lastIndex = fqn.lastIndexOf("."); return fqn.substring(0, lastIndex); } /** * * @param fqn * @return Class name (including "$") after last period in FQN */ String getClassName(String fqn) { // handle no package if (!fqn.contains(".")) { return fqn; } int lastIndex = fqn.lastIndexOf("."); return fqn.substring(lastIndex + 1); } } ================================================ FILE: codebase-graph-builder/src/main/java/org/hjug/graphbuilder/visitor/JavaMethodDeclarationVisitor.java ================================================ package org.hjug.graphbuilder.visitor; import java.util.List; import lombok.extern.slf4j.Slf4j; import org.hjug.graphbuilder.DependencyCollector; import org.openrewrite.java.tree.J; import org.openrewrite.java.tree.JavaType; import org.openrewrite.java.tree.NameTree; import org.openrewrite.java.tree.TypeTree; @Slf4j public class JavaMethodDeclarationVisitor

extends BaseCodebaseVisitor

{ private final BaseTypeProcessor typeProcessor; public JavaMethodDeclarationVisitor(DependencyCollector dependencyCollector) { super(dependencyCollector); this.typeProcessor = new BaseTypeProcessor() { @Override protected DependencyCollector getDependencyCollector() { return dependencyCollector; } }; } @Override public J.MethodDeclaration visitMethodDeclaration(J.MethodDeclaration method, P p) { J.MethodDeclaration methodDeclaration = super.visitMethodDeclaration(method, p); JavaType.Method methodType = methodDeclaration.getMethodType(); if (null == methodType) { log.warn("MethodDeclaration has null methodType, skipping: {}", methodDeclaration.getSimpleName()); return methodDeclaration; } if (methodType.getDeclaringType() == null) { log.warn("MethodDeclaration has null declaring type, skipping: {}", methodDeclaration.getSimpleName()); return methodDeclaration; } String owner = methodType.getDeclaringType().getFullyQualifiedName(); TypeTree returnTypeExpression = methodDeclaration.getReturnTypeExpression(); if (returnTypeExpression != null) { JavaType returnType = returnTypeExpression.getType(); if (!(returnType instanceof JavaType.Primitive)) { typeProcessor.processType(owner, returnType); } } for (J.Annotation leadingAnnotation : methodDeclaration.getLeadingAnnotations()) { typeProcessor.processAnnotation(owner, leadingAnnotation, getCursor()); } if (null != methodDeclaration.getTypeParameters()) { for (J.TypeParameter typeParameter : methodDeclaration.getTypeParameters()) { typeProcessor.processTypeParameter(owner, typeParameter, getCursor()); } } List throwz = methodDeclaration.getThrows(); if (null != throwz && !throwz.isEmpty()) { for (NameTree thrown : throwz) { typeProcessor.processType(owner, thrown.getType()); } } return methodDeclaration; } @Override protected String getCurrentOwnerFqn() { return null; } } ================================================ FILE: codebase-graph-builder/src/main/java/org/hjug/graphbuilder/visitor/JavaVariableTypeVisitor.java ================================================ package org.hjug.graphbuilder.visitor; import java.util.List; import lombok.extern.slf4j.Slf4j; import org.hjug.graphbuilder.DependencyCollector; import org.openrewrite.java.tree.*; @Slf4j public class JavaVariableTypeVisitor

extends BaseCodebaseVisitor

{ private final BaseTypeProcessor typeProcessor; public JavaVariableTypeVisitor(DependencyCollector dependencyCollector) { super(dependencyCollector); this.typeProcessor = new BaseTypeProcessor() { @Override protected DependencyCollector getDependencyCollector() { return dependencyCollector; } }; } @Override public J.VariableDeclarations visitVariableDeclarations(J.VariableDeclarations multiVariable, P p) { J.VariableDeclarations variableDeclarations = super.visitVariableDeclarations(multiVariable, p); List variables = variableDeclarations.getVariables(); if (null == variables || variables.isEmpty() || null == variables.get(0).getVariableType()) { log.debug("Skipping variable declaration with null variable type"); return variableDeclarations; } JavaType owner = variables.get(0).getVariableType().getOwner(); String ownerFqn = ""; if (owner instanceof JavaType.Method) { JavaType.Method m = (JavaType.Method) owner; if (m.getDeclaringType() == null) { log.warn("Method owner has null declaring type, skipping variable declaration"); return variableDeclarations; } ownerFqn = m.getDeclaringType().getFullyQualifiedName(); } else if (owner instanceof JavaType.Class) { JavaType.Class c = (JavaType.Class) owner; ownerFqn = c.getFullyQualifiedName(); } else { log.debug("Unknown owner type: {}", owner != null ? owner.getClass() : "null"); return variableDeclarations; } log.debug("Processing variable declaration in: {}", ownerFqn); TypeTree typeTree = variableDeclarations.getTypeExpression(); JavaType javaType; if (null != typeTree) { javaType = typeTree.getType(); } else { return variableDeclarations; } typeProcessor.processAnnotations(ownerFqn, getCursor()); if (javaType instanceof JavaType.Primitive) { return variableDeclarations; } typeProcessor.processType(ownerFqn, javaType); return variableDeclarations; } @Override protected String getCurrentOwnerFqn() { return null; } } ================================================ FILE: codebase-graph-builder/src/main/java/org/hjug/graphbuilder/visitor/JavaVisitor.java ================================================ package org.hjug.graphbuilder.visitor; import java.util.*; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.hjug.graphbuilder.DependencyCollector; import org.openrewrite.java.tree.*; @Slf4j public class JavaVisitor

extends BaseCodebaseVisitor

{ @Getter private final Map classToSourceFilePathMapping = new HashMap<>(); private final JavaClassDeclarationVisitor

javaClassDeclarationVisitor; public JavaVisitor(DependencyCollector dependencyCollector) { super(dependencyCollector); javaClassDeclarationVisitor = new JavaClassDeclarationVisitor<>(dependencyCollector); } @Override public J.ClassDeclaration visitClassDeclaration(J.ClassDeclaration classDecl, P p) { return javaClassDeclarationVisitor.visitClassDeclaration(classDecl, p); } // Map each class to its source file @Override public J.CompilationUnit visitCompilationUnit(J.CompilationUnit cu, P p) { J.CompilationUnit compilationUnit = super.visitCompilationUnit(cu, p); J.Package packageDeclaration = compilationUnit.getPackageDeclaration(); if (null == packageDeclaration) { return compilationUnit; } dependencyCollector.registerPackage(packageDeclaration.getPackageName()); for (J.ClassDeclaration aClass : compilationUnit.getClasses()) { String classFqn = aClass.getType().getFullyQualifiedName(); String sourcePath = compilationUnit.getSourcePath().toUri().toString(); classToSourceFilePathMapping.put(classFqn, sourcePath); dependencyCollector.recordClassLocation(classFqn, sourcePath); } return compilationUnit; } @Override protected String getCurrentOwnerFqn() { return null; } } ================================================ FILE: codebase-graph-builder/src/main/java/org/hjug/graphbuilder/visitor/TypeDependencyExtractor.java ================================================ package org.hjug.graphbuilder.visitor; import java.util.HashSet; import java.util.Set; import lombok.extern.slf4j.Slf4j; import org.openrewrite.java.tree.JavaType; @Slf4j public class TypeDependencyExtractor { /** * Extracts all type dependencies from a JavaType * * @param javaType The type to extract dependencies from * @return Set of fully qualified type names that the given type depends on */ public Set extractDependencies(JavaType javaType) { Set dependencies = new HashSet<>(); if (javaType == null) { return dependencies; } extractDependenciesRecursive(javaType, dependencies); return dependencies; } private void extractDependenciesRecursive(JavaType javaType, Set dependencies) { if (javaType instanceof JavaType.Class) { extractFromClass((JavaType.Class) javaType, dependencies); } else if (javaType instanceof JavaType.Parameterized) { extractFromParameterized((JavaType.Parameterized) javaType, dependencies); } else if (javaType instanceof JavaType.GenericTypeVariable) { extractFromGenericTypeVariable((JavaType.GenericTypeVariable) javaType, dependencies); } else if (javaType instanceof JavaType.Array) { extractFromArray((JavaType.Array) javaType, dependencies); } } private void extractFromClass(JavaType.Class classType, Set dependencies) { log.debug("Class type FQN: {}", classType.getFullyQualifiedName()); dependencies.add(classType.getFullyQualifiedName()); extractAnnotations(classType, dependencies); } private void extractFromParameterized(JavaType.Parameterized parameterized, Set dependencies) { log.debug("Parameterized type FQN: {}", parameterized.getFullyQualifiedName()); dependencies.add(parameterized.getFullyQualifiedName()); extractAnnotations(parameterized, dependencies); log.debug("Nested Parameterized type parameters: {}", parameterized.getTypeParameters()); for (JavaType parameter : parameterized.getTypeParameters()) { extractDependenciesRecursive(parameter, dependencies); } } private void extractFromArray(JavaType.Array arrayType, Set dependencies) { log.debug("Array Element type: {}", arrayType.getElemType()); extractDependenciesRecursive(arrayType.getElemType(), dependencies); } private void extractFromGenericTypeVariable(JavaType.GenericTypeVariable typeVariable, Set dependencies) { log.debug("Type parameter type name: {}", typeVariable.getName()); for (JavaType bound : typeVariable.getBounds()) { if (bound instanceof JavaType.Class) { dependencies.add(((JavaType.Class) bound).getFullyQualifiedName()); } else if (bound instanceof JavaType.Parameterized) { dependencies.add(((JavaType.Parameterized) bound).getFullyQualifiedName()); } else { log.debug("Unknown type bound: {}", bound); } } } private void extractAnnotations(JavaType.FullyQualified fullyQualified, Set dependencies) { if (!fullyQualified.getAnnotations().isEmpty()) { for (JavaType.FullyQualified annotation : fullyQualified.getAnnotations()) { String annotationFqn = annotation.getFullyQualifiedName(); log.debug("Annotation FQN: {}", annotationFqn); dependencies.add(annotationFqn); } } } } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/JavaGraphBuilderTest.java ================================================ package org.hjug.graphbuilder; import static org.junit.jupiter.api.Assertions.*; import java.io.File; import java.io.IOException; import java.util.HashSet; import java.util.Set; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultWeightedEdge; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; class JavaGraphBuilderTest { JavaGraphBuilder javaGraphBuilder = new JavaGraphBuilder(); @DisplayName("When source directory input param is empty or null throw IllegalArgumentException.") @Test void parseSourceDirectoryEmptyTest() { Assertions.assertThrows( IllegalArgumentException.class, () -> javaGraphBuilder.getCodebaseGraphDTO("", false, "")); Assertions.assertThrows( IllegalArgumentException.class, () -> javaGraphBuilder.getCodebaseGraphDTO(null, false, "")); } @DisplayName("Given a valid source directory input parameter return a valid graph.") @Test void parseSourceDirectoryTest() throws IOException { File srcDirectory = new File("src/test/resources/javaSrcDirectory"); CodebaseGraphDTO dto = javaGraphBuilder.getCodebaseGraphDTO(srcDirectory.getAbsolutePath(), false, ""); Graph classReferencesGraph = dto.getClassReferencesGraph(); assertNotNull(classReferencesGraph); assertEquals(5, classReferencesGraph.vertexSet().size()); assertEquals(7, classReferencesGraph.edgeSet().size()); assertTrue(classReferencesGraph.containsVertex("com.ideacrest.parser.testclasses.A")); assertTrue(classReferencesGraph.containsVertex("com.ideacrest.parser.testclasses.B")); assertTrue(classReferencesGraph.containsVertex("com.ideacrest.parser.testclasses.C")); assertTrue(classReferencesGraph.containsVertex("com.ideacrest.parser.testclasses.D")); assertTrue(classReferencesGraph.containsVertex("com.ideacrest.parser.testclasses.E")); assertTrue(classReferencesGraph.containsEdge( "com.ideacrest.parser.testclasses.A", "com.ideacrest.parser.testclasses.B")); assertTrue(classReferencesGraph.containsEdge( "com.ideacrest.parser.testclasses.B", "com.ideacrest.parser.testclasses.C")); assertTrue(classReferencesGraph.containsEdge( "com.ideacrest.parser.testclasses.C", "com.ideacrest.parser.testclasses.A")); assertTrue(classReferencesGraph.containsEdge( "com.ideacrest.parser.testclasses.C", "com.ideacrest.parser.testclasses.E")); assertTrue(classReferencesGraph.containsEdge( "com.ideacrest.parser.testclasses.D", "com.ideacrest.parser.testclasses.A")); assertTrue(classReferencesGraph.containsEdge( "com.ideacrest.parser.testclasses.D", "com.ideacrest.parser.testclasses.C")); assertTrue(classReferencesGraph.containsEdge( "com.ideacrest.parser.testclasses.E", "com.ideacrest.parser.testclasses.D")); // confirm edge weight calculations assertEquals( 1, getEdgeWeight( classReferencesGraph, "com.ideacrest.parser.testclasses.A", "com.ideacrest.parser.testclasses.B")); assertEquals( 2, getEdgeWeight( classReferencesGraph, "com.ideacrest.parser.testclasses.E", "com.ideacrest.parser.testclasses.D")); } private static double getEdgeWeight( Graph classReferencesGraph, String sourceVertex, String targetVertex) { return classReferencesGraph.getEdgeWeight(classReferencesGraph.getEdge(sourceVertex, targetVertex)); } @Test void removeClassesNotInCodebase() throws IOException { File srcDirectory = new File("src/test/resources/javaSrcDirectory"); CodebaseGraphDTO dto = javaGraphBuilder.getCodebaseGraphDTO(srcDirectory.getAbsolutePath(), false, ""); Graph classReferencesGraph = dto.getClassReferencesGraph(); classReferencesGraph.addVertex("org.favioriteoss.FunClass"); classReferencesGraph.addVertex("org.favioriteoss.AnotherFunClass"); DefaultWeightedEdge edge1 = classReferencesGraph.addEdge("com.ideacrest.parser.testclasses.A", "org.favioriteoss.FunClass"); DefaultWeightedEdge edge2 = classReferencesGraph.addEdge("com.ideacrest.parser.testclasses.A", "org.favioriteoss.AnotherFunClass"); assertTrue(classReferencesGraph.containsVertex("org.favioriteoss.FunClass")); assertTrue(classReferencesGraph.containsVertex("org.favioriteoss.AnotherFunClass")); Set packagesInCodebase = new HashSet<>(); packagesInCodebase.add("com.ideacrest.parser.testclasses"); javaGraphBuilder.removeClassesNotInCodebase(packagesInCodebase, classReferencesGraph); assertFalse(classReferencesGraph.containsVertex("org.favioriteoss.FunClass")); assertFalse(classReferencesGraph.containsVertex("org.favioriteoss.AnotherFunClass")); assertFalse(classReferencesGraph.containsEdge(edge1)); assertFalse(classReferencesGraph.containsEdge(edge2)); } } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/JavaClassDeclarationVisitorTest.java ================================================ package org.hjug.graphbuilder.visitor; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.stream.Collectors; import org.hjug.graphbuilder.GraphDependencyCollector; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultDirectedWeightedGraph; import org.jgrapht.graph.DefaultWeightedEdge; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.openrewrite.ExecutionContext; import org.openrewrite.InMemoryExecutionContext; import org.openrewrite.java.JavaParser; class JavaClassDeclarationVisitorTest { @Test void visitClasses() throws IOException { File srcDirectory = new File("src/test/java/org/hjug/graphbuilder/visitor/testclasses"); org.openrewrite.java.JavaParser javaParser = JavaParser.fromJavaVersion().build(); ExecutionContext ctx = new InMemoryExecutionContext(Throwable::printStackTrace); Graph classReferencesGraph = new DefaultDirectedWeightedGraph<>(DefaultWeightedEdge.class); Graph packageReferencesGraph = new DefaultDirectedWeightedGraph<>(DefaultWeightedEdge.class); GraphDependencyCollector dependencyCollector = new GraphDependencyCollector(classReferencesGraph, packageReferencesGraph); JavaClassDeclarationVisitor javaVariableCapturingVisitor = new JavaClassDeclarationVisitor<>(dependencyCollector); List list = Files.walk(Paths.get(srcDirectory.getAbsolutePath())).collect(Collectors.toList()); javaParser.parse(list, Paths.get(srcDirectory.getAbsolutePath()), ctx).forEach(cu -> { javaVariableCapturingVisitor.visit(cu, ctx); }); Assertions.assertTrue(classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.A")); Assertions.assertTrue(classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.B")); Assertions.assertTrue(classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.C")); Assertions.assertTrue(classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.D")); Assertions.assertTrue( classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.MyAnnotation")); Assertions.assertFalse(classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.E")); Assertions.assertTrue(classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.F")); Assertions.assertTrue(classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.G")); } } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/JavaFqnCapturingVisitorTest.java ================================================ package org.hjug.graphbuilder.visitor; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.openrewrite.ExecutionContext; import org.openrewrite.InMemoryExecutionContext; import org.openrewrite.java.JavaParser; @Disabled class JavaFqnCapturingVisitorTest { @Test void visitClasses() throws IOException { File srcDirectory = new File("src/test/java/org/hjug/graphbuilder/visitor/testclasses"); org.openrewrite.java.JavaParser javaParser = JavaParser.fromJavaVersion().build(); ExecutionContext ctx = new InMemoryExecutionContext(Throwable::printStackTrace); JavaFqnCapturingVisitor javaFqnCapturingVisitor = new JavaFqnCapturingVisitor(); List list = Files.walk(Paths.get(srcDirectory.getAbsolutePath())).collect(Collectors.toList()); javaParser.parse(list, Paths.get(srcDirectory.getAbsolutePath()), ctx).forEach(cu -> { javaFqnCapturingVisitor.visit(cu, ctx); }); Map> fqns = javaFqnCapturingVisitor.getFqnMap(); Map processed = fqns.get("org.hjug.graphbuilder.visitor.testclasses"); Assertions.assertEquals("org.hjug.graphbuilder.visitor.testclasses.A", processed.get("A")); Assertions.assertEquals( "org.hjug.graphbuilder.visitor.testclasses.A.InnerClass", processed.get("A.InnerClass")); Assertions.assertEquals("org.hjug.graphbuilder.visitor.testclasses.A.InnerClass", processed.get("InnerClass")); Assertions.assertEquals( "org.hjug.graphbuilder.visitor.testclasses.A.InnerClass.InnerInner", processed.get("A.InnerClass.InnerInner")); Assertions.assertEquals( "org.hjug.graphbuilder.visitor.testclasses.A.InnerClass.InnerInner", processed.get("InnerClass.InnerInner")); Assertions.assertEquals( "org.hjug.graphbuilder.visitor.testclasses.A.InnerClass.InnerInner", processed.get("InnerInner")); Assertions.assertEquals( "org.hjug.graphbuilder.visitor.testclasses.A.InnerClass.InnerInner.MegaInner", processed.get("A.InnerClass.InnerInner.MegaInner")); Assertions.assertEquals( "org.hjug.graphbuilder.visitor.testclasses.A.InnerClass.InnerInner.MegaInner", processed.get("InnerClass.InnerInner.MegaInner")); Assertions.assertEquals( "org.hjug.graphbuilder.visitor.testclasses.A.InnerClass.InnerInner.MegaInner", processed.get("InnerInner.MegaInner")); Assertions.assertEquals( "org.hjug.graphbuilder.visitor.testclasses.A.InnerClass.InnerInner.MegaInner", processed.get("MegaInner")); Assertions.assertEquals( "org.hjug.graphbuilder.visitor.testclasses.A.StaticInnerClass", processed.get("A.StaticInnerClass")); Assertions.assertEquals( "org.hjug.graphbuilder.visitor.testclasses.A.StaticInnerClass", processed.get("StaticInnerClass")); Assertions.assertEquals("org.hjug.graphbuilder.visitor.testclasses.NonPublic", processed.get("NonPublic")); } } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/JavaInitializerBlockVisitorTest.java ================================================ package org.hjug.graphbuilder.visitor; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.stream.Collectors; import org.hjug.graphbuilder.GraphDependencyCollector; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultDirectedWeightedGraph; import org.jgrapht.graph.DefaultWeightedEdge; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.openrewrite.ExecutionContext; import org.openrewrite.InMemoryExecutionContext; import org.openrewrite.java.JavaParser; class JavaInitializerBlockVisitorTest { @Test void visitInstanceInitializerBlocks() throws IOException { File srcDirectory = new File("src/test/java/org/hjug/graphbuilder/visitor/testclasses/initializers"); JavaParser javaParser = JavaParser.fromJavaVersion().build(); ExecutionContext ctx = new InMemoryExecutionContext(Throwable::printStackTrace); Graph classReferencesGraph = new DefaultDirectedWeightedGraph<>(DefaultWeightedEdge.class); Graph packageReferencesGraph = new DefaultDirectedWeightedGraph<>(DefaultWeightedEdge.class); GraphDependencyCollector dependencyCollector = new GraphDependencyCollector(classReferencesGraph, packageReferencesGraph); JavaClassDeclarationVisitor classDeclarationVisitor = new JavaClassDeclarationVisitor<>(dependencyCollector); List list = Files.walk(Paths.get(srcDirectory.getAbsolutePath())).collect(Collectors.toList()); javaParser.parse(list, Paths.get(srcDirectory.getAbsolutePath()), ctx).forEach(cu -> { classDeclarationVisitor.visit(cu, ctx); }); // Verify that the test class is in the graph Assertions.assertTrue( classReferencesGraph.containsVertex( "org.hjug.graphbuilder.visitor.testclasses.initializers.InitializerBlockTestClass"), "InitializerBlockTestClass should be in the graph"); // Verify ArrayList is captured from instance initializer block: new ArrayList<>() Assertions.assertTrue( classReferencesGraph.containsVertex("java.util.ArrayList"), "ArrayList should be captured from instance initializer block"); // Verify edge from InitializerBlockTestClass to ArrayList exists Assertions.assertTrue( classReferencesGraph.containsEdge( "org.hjug.graphbuilder.visitor.testclasses.initializers.InitializerBlockTestClass", "java.util.ArrayList"), "Should have edge from InitializerBlockTestClass to ArrayList from initializer block"); // Verify HashMap is captured from instance initializer block: new HashMap<>() Assertions.assertTrue( classReferencesGraph.containsVertex("java.util.HashMap"), "HashMap should be captured from instance initializer block"); // Verify edge from InitializerBlockTestClass to HashMap exists Assertions.assertTrue( classReferencesGraph.containsEdge( "org.hjug.graphbuilder.visitor.testclasses.initializers.InitializerBlockTestClass", "java.util.HashMap"), "Should have edge from InitializerBlockTestClass to HashMap from initializer block"); // Verify StringBuilder is captured from instance initializer block: new StringBuilder() Assertions.assertTrue( classReferencesGraph.containsVertex("java.lang.StringBuilder"), "StringBuilder should be captured from instance initializer block"); // Verify edge from InitializerBlockTestClass to StringBuilder exists Assertions.assertTrue( classReferencesGraph.containsEdge( "org.hjug.graphbuilder.visitor.testclasses.initializers.InitializerBlockTestClass", "java.lang.StringBuilder"), "Should have edge from InitializerBlockTestClass to StringBuilder from initializer block"); } @Test void visitStaticInitializerBlocks() throws IOException { File srcDirectory = new File("src/test/java/org/hjug/graphbuilder/visitor/testclasses/initializers"); JavaParser javaParser = JavaParser.fromJavaVersion().build(); ExecutionContext ctx = new InMemoryExecutionContext(Throwable::printStackTrace); Graph classReferencesGraph = new DefaultDirectedWeightedGraph<>(DefaultWeightedEdge.class); Graph packageReferencesGraph = new DefaultDirectedWeightedGraph<>(DefaultWeightedEdge.class); GraphDependencyCollector dependencyCollector = new GraphDependencyCollector(classReferencesGraph, packageReferencesGraph); JavaClassDeclarationVisitor classDeclarationVisitor = new JavaClassDeclarationVisitor<>(dependencyCollector); List list = Files.walk(Paths.get(srcDirectory.getAbsolutePath())).collect(Collectors.toList()); javaParser.parse(list, Paths.get(srcDirectory.getAbsolutePath()), ctx).forEach(cu -> { classDeclarationVisitor.visit(cu, ctx); }); // Verify that the complex test class is in the graph Assertions.assertTrue( classReferencesGraph.containsVertex( "org.hjug.graphbuilder.visitor.testclasses.initializers.ComplexInitializerClass"), "ComplexInitializerClass should be in the graph"); // Verify ConcurrentHashMap is captured from static initializer block Assertions.assertTrue( classReferencesGraph.containsVertex("java.util.concurrent.ConcurrentHashMap"), "ConcurrentHashMap should be captured from static initializer block"); // Verify edge from ComplexInitializerClass to ConcurrentHashMap exists Assertions.assertTrue( classReferencesGraph.containsEdge( "org.hjug.graphbuilder.visitor.testclasses.initializers.ComplexInitializerClass", "java.util.concurrent.ConcurrentHashMap"), "Should have edge from ComplexInitializerClass to ConcurrentHashMap from static initializer"); // Verify AtomicInteger is captured from static initializer block Assertions.assertTrue( classReferencesGraph.containsVertex("java.util.concurrent.atomic.AtomicInteger"), "AtomicInteger should be captured from static initializer block"); // Verify edge from ComplexInitializerClass to AtomicInteger exists Assertions.assertTrue( classReferencesGraph.containsEdge( "org.hjug.graphbuilder.visitor.testclasses.initializers.ComplexInitializerClass", "java.util.concurrent.atomic.AtomicInteger"), "Should have edge from ComplexInitializerClass to AtomicInteger from static initializer"); // Verify nested classes are captured from instance initializer Assertions.assertTrue( classReferencesGraph.containsVertex( "org.hjug.graphbuilder.visitor.testclasses.initializers.ComplexInitializerClass$DataProcessor"), "DataProcessor nested class should be captured from instance initializer"); Assertions.assertTrue( classReferencesGraph.containsVertex( "org.hjug.graphbuilder.visitor.testclasses.initializers.ComplexInitializerClass$HelperService"), "HelperService nested class should be captured from instance initializer"); } } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/JavaLambdaVisitorTest.java ================================================ package org.hjug.graphbuilder.visitor; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.stream.Collectors; import org.hjug.graphbuilder.GraphDependencyCollector; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultDirectedWeightedGraph; import org.jgrapht.graph.DefaultWeightedEdge; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.openrewrite.ExecutionContext; import org.openrewrite.InMemoryExecutionContext; import org.openrewrite.java.JavaParser; class JavaLambdaVisitorTest { @Test void visitLambdaBodiesRecursively() throws IOException { File srcDirectory = new File("src/test/java/org/hjug/graphbuilder/visitor/testclasses/lambda"); JavaParser javaParser = JavaParser.fromJavaVersion().build(); ExecutionContext ctx = new InMemoryExecutionContext(Throwable::printStackTrace); Graph classReferencesGraph = new DefaultDirectedWeightedGraph<>(DefaultWeightedEdge.class); Graph packageReferencesGraph = new DefaultDirectedWeightedGraph<>(DefaultWeightedEdge.class); GraphDependencyCollector dependencyCollector = new GraphDependencyCollector(classReferencesGraph, packageReferencesGraph); JavaClassDeclarationVisitor classDeclarationVisitor = new JavaClassDeclarationVisitor<>(dependencyCollector); List list = Files.walk(Paths.get(srcDirectory.getAbsolutePath())).collect(Collectors.toList()); javaParser.parse(list, Paths.get(srcDirectory.getAbsolutePath()), ctx).forEach(cu -> { classDeclarationVisitor.visit(cu, ctx); }); // Verify that the main test class is in the graph Assertions.assertTrue( classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.lambda.LambdaTestClass"), "LambdaTestClass should be in the graph"); // Verify that HelperClass is captured as a dependency // This is from field declaration AND from lambda body: helper.process(item) Assertions.assertTrue( classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.lambda.HelperClass"), "HelperClass should be captured from lambda body method invocation"); // Verify edge from LambdaTestClass to HelperClass exists Assertions.assertTrue( classReferencesGraph.containsEdge( "org.hjug.graphbuilder.visitor.testclasses.lambda.LambdaTestClass", "org.hjug.graphbuilder.visitor.testclasses.lambda.HelperClass"), "Should have edge from LambdaTestClass to HelperClass"); // Verify that DataProcessor is captured from lambda body: new DataProcessor() Assertions.assertTrue( classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.lambda.DataProcessor"), "DataProcessor should be captured from new class instantiation in lambda body"); // Verify edge from LambdaTestClass to DataProcessor exists Assertions.assertTrue( classReferencesGraph.containsEdge( "org.hjug.graphbuilder.visitor.testclasses.lambda.LambdaTestClass", "org.hjug.graphbuilder.visitor.testclasses.lambda.DataProcessor"), "Should have edge from LambdaTestClass to DataProcessor from lambda body"); // Verify that StringBuilder is captured from lambda body: new StringBuilder(s) Assertions.assertTrue( classReferencesGraph.containsVertex("java.lang.StringBuilder"), "StringBuilder should be captured from new class instantiation in lambda body"); // Verify edge from LambdaTestClass to StringBuilder exists Assertions.assertTrue( classReferencesGraph.containsEdge( "org.hjug.graphbuilder.visitor.testclasses.lambda.LambdaTestClass", "java.lang.StringBuilder"), "Should have edge from LambdaTestClass to StringBuilder from lambda body"); // Verify that String is captured (from method invocations like s.toUpperCase()) Assertions.assertTrue( classReferencesGraph.containsVertex("java.lang.String"), "String should be captured from method invocations in lambda body"); // Verify edge weight - multiple lambda usages should increase edge weight DefaultWeightedEdge edge = classReferencesGraph.getEdge( "org.hjug.graphbuilder.visitor.testclasses.lambda.LambdaTestClass", "org.hjug.graphbuilder.visitor.testclasses.lambda.DataProcessor"); // DataProcessor is used twice: once in processWithLambda() and once in lambdaWithLocalVariable() Assertions.assertTrue( classReferencesGraph.getEdgeWeight(edge) >= 2.0, "Edge weight should reflect multiple uses of DataProcessor in lambda bodies"); } @Test void visitNestedLambdaBodiesRecursively() throws IOException { File srcDirectory = new File("src/test/java/org/hjug/graphbuilder/visitor/testclasses/lambda"); JavaParser javaParser = JavaParser.fromJavaVersion().build(); ExecutionContext ctx = new InMemoryExecutionContext(Throwable::printStackTrace); Graph classReferencesGraph = new DefaultDirectedWeightedGraph<>(DefaultWeightedEdge.class); Graph packageReferencesGraph = new DefaultDirectedWeightedGraph<>(DefaultWeightedEdge.class); GraphDependencyCollector dependencyCollector = new GraphDependencyCollector(classReferencesGraph, packageReferencesGraph); JavaClassDeclarationVisitor classDeclarationVisitor = new JavaClassDeclarationVisitor<>(dependencyCollector); List list = Files.walk(Paths.get(srcDirectory.getAbsolutePath())).collect(Collectors.toList()); javaParser.parse(list, Paths.get(srcDirectory.getAbsolutePath()), ctx).forEach(cu -> { classDeclarationVisitor.visit(cu, ctx); }); // Verify that the nested lambda test class is in the graph Assertions.assertTrue( classReferencesGraph.containsVertex( "org.hjug.graphbuilder.visitor.testclasses.lambda.NestedLambdaTestClass"), "NestedLambdaTestClass should be in the graph"); // Verify DataProcessor is captured from INNER lambda: new DataProcessor() inside nested lambda Assertions.assertTrue( classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.lambda.DataProcessor"), "DataProcessor should be captured from inner nested lambda body"); // Verify edge from NestedLambdaTestClass to DataProcessor exists Assertions.assertTrue( classReferencesGraph.containsEdge( "org.hjug.graphbuilder.visitor.testclasses.lambda.NestedLambdaTestClass", "org.hjug.graphbuilder.visitor.testclasses.lambda.DataProcessor"), "Should have edge from NestedLambdaTestClass to DataProcessor from nested lambda"); // Verify HelperClass is captured from nested lambda method invocation Assertions.assertTrue( classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.lambda.HelperClass"), "HelperClass should be captured from nested lambda method invocation"); // Verify edge from NestedLambdaTestClass to HelperClass exists Assertions.assertTrue( classReferencesGraph.containsEdge( "org.hjug.graphbuilder.visitor.testclasses.lambda.NestedLambdaTestClass", "org.hjug.graphbuilder.visitor.testclasses.lambda.HelperClass"), "Should have edge from NestedLambdaTestClass to HelperClass from nested lambda"); // Verify edge weight reflects multiple nested lambda usages DefaultWeightedEdge dataProcessorEdge = classReferencesGraph.getEdge( "org.hjug.graphbuilder.visitor.testclasses.lambda.NestedLambdaTestClass", "org.hjug.graphbuilder.visitor.testclasses.lambda.DataProcessor"); // DataProcessor is used in multiple nested lambdas: processNestedLambdas() and deeplyNestedLambdaWithNewClass() Assertions.assertTrue( classReferencesGraph.getEdgeWeight(dataProcessorEdge) >= 2.0, "Edge weight should reflect multiple uses of DataProcessor in nested lambda bodies"); // Verify that deeply nested instantiations are captured DefaultWeightedEdge helperEdge = classReferencesGraph.getEdge( "org.hjug.graphbuilder.visitor.testclasses.lambda.NestedLambdaTestClass", "org.hjug.graphbuilder.visitor.testclasses.lambda.HelperClass"); // HelperClass is used in field declaration and in nested lambdas Assertions.assertTrue( classReferencesGraph.getEdgeWeight(helperEdge) >= 2.0, "Edge weight should reflect HelperClass usage in nested lambda blocks"); } } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/JavaMethodDeclarationVisitorTest.java ================================================ package org.hjug.graphbuilder.visitor; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.stream.Collectors; import org.hjug.graphbuilder.GraphDependencyCollector; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultDirectedWeightedGraph; import org.jgrapht.graph.DefaultWeightedEdge; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.openrewrite.ExecutionContext; import org.openrewrite.InMemoryExecutionContext; import org.openrewrite.java.JavaParser; class JavaMethodDeclarationVisitorTest { @Test void visitMethodDeclarations() throws IOException { File srcDirectory = new File("src/test/java/org/hjug/graphbuilder/visitor/testclasses"); org.openrewrite.java.JavaParser javaParser = JavaParser.fromJavaVersion().build(); ExecutionContext ctx = new InMemoryExecutionContext(Throwable::printStackTrace); Graph classReferencesGraph = new DefaultDirectedWeightedGraph<>(DefaultWeightedEdge.class); Graph packageReferencesGraph = new DefaultDirectedWeightedGraph<>(DefaultWeightedEdge.class); GraphDependencyCollector dependencyCollector = new GraphDependencyCollector(classReferencesGraph, packageReferencesGraph); JavaMethodDeclarationVisitor methodDeclarationVisitor = new JavaMethodDeclarationVisitor<>(dependencyCollector); List list = Files.walk(Paths.get(srcDirectory.getAbsolutePath())).collect(Collectors.toList()); javaParser.parse(list, Paths.get(srcDirectory.getAbsolutePath()), ctx).forEach(cu -> { methodDeclarationVisitor.visit(cu, ctx); }); Assertions.assertTrue(classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.A")); // TODO: Assert stuff /* Assertions.assertTrue(methodDeclarationVisitor.getClassReferencesGraph().containsVertex("org.hjug.javaVariableVisitorTestClasses.A")); Assertions.assertTrue(methodDeclarationVisitor.getClassReferencesGraph().containsVertex("org.hjug.javaVariableVisitorTestClasses.B")); Assertions.assertTrue(methodDeclarationVisitor.getClassReferencesGraph().containsVertex("org.hjug.javaVariableVisitorTestClasses.C")); Assertions.assertFalse(methodDeclarationVisitor.getClassReferencesGraph().containsVertex("org.hjug.javaVariableVisitorTestClasses.D")); Assertions.assertTrue(methodDeclarationVisitor.getClassReferencesGraph().containsVertex("org.hjug.javaVariableVisitorTestClasses.MyAnnotation")); Assertions.assertFalse(methodDeclarationVisitor.getClassReferencesGraph().containsVertex("org.hjug.javaVariableVisitorTestClasses.E")); Assertions.assertTrue(methodDeclarationVisitor.getClassReferencesGraph().containsVertex("org.hjug.javaVariableVisitorTestClasses.F")); Assertions.assertTrue(methodDeclarationVisitor.getClassReferencesGraph().containsVertex("org.hjug.javaVariableVisitorTestClasses.G"));*/ } } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/JavaMethodInvocationVisitorTest.java ================================================ package org.hjug.graphbuilder.visitor; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.stream.Collectors; import org.hjug.graphbuilder.GraphDependencyCollector; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultWeightedEdge; import org.jgrapht.graph.SimpleDirectedWeightedGraph; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.openrewrite.ExecutionContext; import org.openrewrite.InMemoryExecutionContext; import org.openrewrite.java.JavaParser; class JavaMethodInvocationVisitorTest { @Test void visitMethodInvocations() throws IOException { File srcDirectory = new File("src/test/java/org/hjug/graphbuilder/visitor/testclasses/methodInvocation"); JavaParser javaParser = JavaParser.fromJavaVersion().build(); ExecutionContext ctx = new InMemoryExecutionContext(Throwable::printStackTrace); Graph classReferencesGraph = new SimpleDirectedWeightedGraph<>(DefaultWeightedEdge.class); Graph packageReferencesGraph = new SimpleDirectedWeightedGraph<>(DefaultWeightedEdge.class); GraphDependencyCollector dependencyCollector = new GraphDependencyCollector(classReferencesGraph, packageReferencesGraph); JavaClassDeclarationVisitor classDeclarationVisitor = new JavaClassDeclarationVisitor<>(dependencyCollector); JavaVariableTypeVisitor variableTypeVisitor = new JavaVariableTypeVisitor<>(dependencyCollector); List list = Files.walk(Paths.get(srcDirectory.getAbsolutePath())).collect(Collectors.toList()); javaParser.parse(list, Paths.get(srcDirectory.getAbsolutePath()), ctx).forEach(cu -> { classDeclarationVisitor.visit(cu, ctx); variableTypeVisitor.visit(cu, ctx); }); Graph graph = classReferencesGraph; Assertions.assertTrue(graph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.methodInvocation.A")); Assertions.assertTrue(graph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.methodInvocation.B")); Assertions.assertTrue(graph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.methodInvocation.C")); Assertions.assertEquals( 3, graph.getEdgeWeight(graph.getEdge( "org.hjug.graphbuilder.visitor.testclasses.methodInvocation.A", "org.hjug.graphbuilder.visitor.testclasses.methodInvocation.B"))); Assertions.assertEquals( 3, graph.getEdgeWeight(graph.getEdge( "org.hjug.graphbuilder.visitor.testclasses.methodInvocation.A", "org.hjug.graphbuilder.visitor.testclasses.methodInvocation.C"))); } } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/JavaNewClassVisitorFullTest.java ================================================ package org.hjug.graphbuilder.visitor; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.stream.Collectors; import org.hjug.graphbuilder.GraphDependencyCollector; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultWeightedEdge; import org.jgrapht.graph.SimpleDirectedWeightedGraph; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.openrewrite.ExecutionContext; import org.openrewrite.InMemoryExecutionContext; import org.openrewrite.java.JavaParser; public class JavaNewClassVisitorFullTest { @Test void visitNewClass() throws IOException { File srcDirectory = new File("src/test/java/org/hjug/graphbuilder/visitor/testclasses/newClass"); JavaParser javaParser = JavaParser.fromJavaVersion().build(); ExecutionContext ctx = new InMemoryExecutionContext(Throwable::printStackTrace); Graph classReferencesGraph = new SimpleDirectedWeightedGraph<>(DefaultWeightedEdge.class); Graph packageReferencesGraph = new SimpleDirectedWeightedGraph<>(DefaultWeightedEdge.class); GraphDependencyCollector dependencyCollector = new GraphDependencyCollector(classReferencesGraph, packageReferencesGraph); final JavaVisitor javaVisitor = new JavaVisitor<>(dependencyCollector); final JavaVariableTypeVisitor javaVariableTypeVisitor = new JavaVariableTypeVisitor<>(dependencyCollector); final JavaMethodDeclarationVisitor javaMethodDeclarationVisitor = new JavaMethodDeclarationVisitor<>(dependencyCollector); List list = Files.walk(Paths.get(srcDirectory.getAbsolutePath())).collect(Collectors.toList()); javaParser.parse(list, Paths.get(srcDirectory.getAbsolutePath()), ctx).forEach(cu -> { javaVisitor.visit(cu, ctx); javaVariableTypeVisitor.visit(cu, ctx); javaMethodDeclarationVisitor.visit(cu, ctx); }); Graph graph = classReferencesGraph; Assertions.assertTrue(graph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.newClass.A")); Assertions.assertTrue(graph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.newClass.B")); Assertions.assertTrue(graph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.newClass.C")); // capturing counts of all types Assertions.assertEquals( 6, graph.getEdgeWeight(graph.getEdge( "org.hjug.graphbuilder.visitor.testclasses.newClass.A", "org.hjug.graphbuilder.visitor.testclasses.newClass.B"))); Assertions.assertEquals( 3, graph.getEdgeWeight(graph.getEdge( "org.hjug.graphbuilder.visitor.testclasses.newClass.A", "org.hjug.graphbuilder.visitor.testclasses.newClass.C"))); } } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/JavaNewClassVisitorTest.java ================================================ package org.hjug.graphbuilder.visitor; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.stream.Collectors; import org.hjug.graphbuilder.GraphDependencyCollector; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultWeightedEdge; import org.jgrapht.graph.SimpleDirectedWeightedGraph; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.openrewrite.ExecutionContext; import org.openrewrite.InMemoryExecutionContext; import org.openrewrite.java.JavaParser; public class JavaNewClassVisitorTest { @Test void visitNewClass() throws IOException { File srcDirectory = new File("src/test/java/org/hjug/graphbuilder/visitor/testclasses/newClass"); JavaParser javaParser = JavaParser.fromJavaVersion().build(); ExecutionContext ctx = new InMemoryExecutionContext(Throwable::printStackTrace); Graph classReferencesGraph = new SimpleDirectedWeightedGraph<>(DefaultWeightedEdge.class); Graph packageReferencesGraph = new SimpleDirectedWeightedGraph<>(DefaultWeightedEdge.class); GraphDependencyCollector dependencyCollector = new GraphDependencyCollector(classReferencesGraph, packageReferencesGraph); JavaClassDeclarationVisitor classDeclarationVisitor = new JavaClassDeclarationVisitor<>(dependencyCollector); JavaVariableTypeVisitor variableTypeVisitor = new JavaVariableTypeVisitor<>(dependencyCollector); List list = Files.walk(Paths.get(srcDirectory.getAbsolutePath())).collect(Collectors.toList()); javaParser.parse(list, Paths.get(srcDirectory.getAbsolutePath()), ctx).forEach(cu -> { classDeclarationVisitor.visit(cu, ctx); variableTypeVisitor.visit(cu, ctx); }); Graph graph = classReferencesGraph; Assertions.assertTrue(graph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.newClass.A")); Assertions.assertTrue(graph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.newClass.B")); Assertions.assertTrue(graph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.newClass.C")); // only looking for what was visited by classDeclarationVisitor and variableTypeVisitor Assertions.assertEquals( 5, graph.getEdgeWeight(graph.getEdge( "org.hjug.graphbuilder.visitor.testclasses.newClass.A", "org.hjug.graphbuilder.visitor.testclasses.newClass.B"))); Assertions.assertEquals( 3, graph.getEdgeWeight(graph.getEdge( "org.hjug.graphbuilder.visitor.testclasses.newClass.A", "org.hjug.graphbuilder.visitor.testclasses.newClass.C"))); } } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/JavaVariableTypeVisitorTest.java ================================================ package org.hjug.graphbuilder.visitor; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.stream.Collectors; import org.hjug.graphbuilder.GraphDependencyCollector; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultDirectedWeightedGraph; import org.jgrapht.graph.DefaultWeightedEdge; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.openrewrite.ExecutionContext; import org.openrewrite.InMemoryExecutionContext; import org.openrewrite.java.JavaParser; class JavaVariableTypeVisitorTest { @Test void visitClasses() throws IOException { File srcDirectory = new File("src/test/java/org/hjug/graphbuilder/visitor/testclasses"); org.openrewrite.java.JavaParser javaParser = JavaParser.fromJavaVersion().build(); ExecutionContext ctx = new InMemoryExecutionContext(Throwable::printStackTrace); Graph classReferencesGraph = new DefaultDirectedWeightedGraph<>(DefaultWeightedEdge.class); Graph packageReferencesGraph = new DefaultDirectedWeightedGraph<>(DefaultWeightedEdge.class); GraphDependencyCollector dependencyCollector = new GraphDependencyCollector(classReferencesGraph, packageReferencesGraph); JavaVariableTypeVisitor javaVariableCapturingVisitor = new JavaVariableTypeVisitor<>(dependencyCollector); List list = Files.walk(Paths.get(srcDirectory.getAbsolutePath())).collect(Collectors.toList()); javaParser.parse(list, Paths.get(srcDirectory.getAbsolutePath()), ctx).forEach(cu -> { javaVariableCapturingVisitor.visit(cu, ctx); }); Assertions.assertTrue(classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.A")); Assertions.assertTrue(classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.B")); Assertions.assertTrue(classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.C")); Assertions.assertTrue(classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.D")); Assertions.assertTrue(classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.E")); Assertions.assertTrue( classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.MyAnnotation")); Assertions.assertFalse(classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.F")); Assertions.assertFalse(classReferencesGraph.containsVertex("org.hjug.graphbuilder.visitor.testclasses.G")); } } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/JavaVisitorTest.java ================================================ package org.hjug.graphbuilder.visitor; import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.stream.Collectors; import org.hjug.graphbuilder.GraphDependencyCollector; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultWeightedEdge; import org.jgrapht.graph.SimpleDirectedWeightedGraph; import org.junit.jupiter.api.Test; import org.openrewrite.ExecutionContext; import org.openrewrite.InMemoryExecutionContext; import org.openrewrite.java.JavaParser; class JavaVisitorTest { @Test void visitClasses() throws IOException { File srcDirectory = new File("src/test/java/org/hjug/graphbuilder/visitor/testclasses"); org.openrewrite.java.JavaParser javaParser = JavaParser.fromJavaVersion().build(); ExecutionContext ctx = new InMemoryExecutionContext(Throwable::printStackTrace); final Graph classReferencesGraph = new SimpleDirectedWeightedGraph<>(DefaultWeightedEdge.class); final Graph packageReferencesGraph = new SimpleDirectedWeightedGraph<>(DefaultWeightedEdge.class); final GraphDependencyCollector dependencyCollector = new GraphDependencyCollector(classReferencesGraph, packageReferencesGraph); final JavaVisitor javaVisitor = new JavaVisitor<>(dependencyCollector); List list = Files.walk(Paths.get(srcDirectory.getAbsolutePath())).collect(Collectors.toList()); javaParser.parse(list, Paths.get(srcDirectory.getAbsolutePath()), ctx).forEach(cu -> { System.out.println(cu.getSourcePath()); javaVisitor.visit(cu, ctx); }); assertEquals(5, dependencyCollector.getPackagesInCodebase().size()); } } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/A.java ================================================ package org.hjug.graphbuilder.visitor.testclasses; import java.util.List; import java.util.Map; @MyAnnotation public class A { // public A(B cB, C cC){} B crazyType; @MyAnnotation @MyOtherAnnotation int intVar, intVar2; @MyAnnotation @MyOtherAnnotation C rawC; B b, b3; C c; D[] ds; D d; @MyAnnotation B[] arrayOfGenericBsWithCTypeParam; @MyAnnotation B bWithArrayOfCs; List> listWithNestedGenric; Map map; List> listOfListsOfNumbers; @MyAnnotation F doSomething(B paramB, C genericParam) { List> list3; A a2; B b2; C c2; H h = new H(); B.invocationTest(h); return new G(); } class InnerClass { class InnerInner { class MegaInner { D d; } } } static class StaticInnerClass {} } class NonPublic {} ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/B.java ================================================ package org.hjug.graphbuilder.visitor.testclasses; public class B { static D invocationTest(T type) { return new D(); } static class InnerB extends A {} } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/C.java ================================================ package org.hjug.graphbuilder.visitor.testclasses; public class C {} ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/D.java ================================================ package org.hjug.graphbuilder.visitor.testclasses; public class D {} ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/E.java ================================================ package org.hjug.graphbuilder.visitor.testclasses; public interface E { void foo(A a); } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/F.java ================================================ package org.hjug.graphbuilder.visitor.testclasses; public class F {} ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/G.java ================================================ package org.hjug.graphbuilder.visitor.testclasses; public class G extends F {} ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/H.java ================================================ package org.hjug.graphbuilder.visitor.testclasses; public class H extends B {} ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/MyAnnotation.java ================================================ package org.hjug.graphbuilder.visitor.testclasses; import java.lang.annotation.ElementType; import java.lang.annotation.Target; @Target(ElementType.TYPE_USE) @interface MyAnnotation {} ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/MyOtherAnnotation.java ================================================ package org.hjug.graphbuilder.visitor.testclasses; import java.lang.annotation.ElementType; import java.lang.annotation.Target; @Target(ElementType.TYPE_USE) @interface MyOtherAnnotation {} ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/initializers/ComplexInitializerClass.java ================================================ package org.hjug.graphbuilder.visitor.testclasses.initializers; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; public class ComplexInitializerClass { private static ConcurrentHashMap staticCache; private static AtomicInteger instanceCounter; private DataProcessor processor; private HelperService helper; // Static initializer with new class instantiations static { staticCache = new ConcurrentHashMap<>(); instanceCounter = new AtomicInteger(0); staticCache.put("initialized", "true"); } // Instance initializer with dependencies { processor = new DataProcessor(); helper = new HelperService(); instanceCounter.incrementAndGet(); } // Another static initializer static { staticCache.put("version", "1.0"); } public void process() { processor.execute(); } static class DataProcessor { public void execute() {} } static class HelperService {} } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/initializers/InitializerBlockTestClass.java ================================================ package org.hjug.graphbuilder.visitor.testclasses.initializers; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class InitializerBlockTestClass { private List items; private Map counters; private StringBuilder builder; // Instance initializer block { items = new ArrayList<>(); counters = new HashMap<>(); builder = new StringBuilder("Initialized"); } // Static initializer block static { System.out.println("Static initializer"); } // Another instance initializer block with method invocations { items.add("default"); counters.put("default", 0); builder.append(" with defaults"); } public InitializerBlockTestClass() { // Constructor } } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/lambda/DataProcessor.java ================================================ package org.hjug.graphbuilder.visitor.testclasses.lambda; public class DataProcessor { public String transform(String data) { return data; } } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/lambda/HelperClass.java ================================================ package org.hjug.graphbuilder.visitor.testclasses.lambda; public class HelperClass { public String process(String input) { return input.toUpperCase(); } public static String staticProcess(String input) { return input.toLowerCase(); } } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/lambda/LambdaTestClass.java ================================================ package org.hjug.graphbuilder.visitor.testclasses.lambda; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; public class LambdaTestClass { private List items = new ArrayList<>(); private HelperClass helper = new HelperClass(); public void processWithLambda() { // Lambda with method invocation on helper class items.forEach(item -> helper.process(item)); // Lambda with multiple method invocations items.stream().map(s -> s.toUpperCase()).filter(s -> s.length() > 5).collect(Collectors.toList()); // Lambda with new class instantiation - creates dependency on DataProcessor items.stream().map(s -> new DataProcessor().transform(s)).collect(Collectors.toList()); // Lambda with new StringBuilder instantiation items.stream().map(s -> new StringBuilder(s)).collect(Collectors.toList()); // Nested lambda items.stream() .map(s -> s.chars().mapToObj(c -> String.valueOf((char) c)).collect(Collectors.joining())) .collect(Collectors.toList()); // Lambda with static method reference items.stream().map(HelperClass::staticProcess).forEach(System.out::println); // Lambda with type cast items.stream().map(s -> (CharSequence) s).collect(Collectors.toList()); } public void lambdaWithLocalVariable() { items.forEach(item -> { DataProcessor processor = new DataProcessor(); String processed = processor.transform(item); System.out.println(processed); }); } } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/lambda/NestedLambdaTestClass.java ================================================ package org.hjug.graphbuilder.visitor.testclasses.lambda; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; public class NestedLambdaTestClass { private List> nestedItems = new ArrayList<>(); private HelperClass helper = new HelperClass(); public void processNestedLambdas() { // Nested lambda with DataProcessor instantiation in inner lambda nestedItems.stream() .map(innerList -> innerList.stream() .map(s -> new DataProcessor().transform(s)) .collect(Collectors.toList())) .collect(Collectors.toList()); // Nested lambda with HelperClass method invocation in inner lambda nestedItems.stream() .flatMap(innerList -> innerList.stream().map(s -> helper.process(s))) .collect(Collectors.toList()); // Triple nested lambda with multiple dependencies nestedItems.stream() .map(outerList -> outerList.stream() .map(middleItem -> middleItem .chars() .mapToObj(c -> new DataProcessor().transform(String.valueOf((char) c))) .collect(Collectors.joining())) .collect(Collectors.toList())) .collect(Collectors.toList()); } public void deeplyNestedLambdaWithNewClass() { // Deeply nested lambda creating new instances at each level nestedItems.stream() .map(level1 -> { DataProcessor processor1 = new DataProcessor(); return level1.stream() .map(level2 -> { HelperClass helper2 = new HelperClass(); return helper2.process(processor1.transform(level2)); }) .collect(Collectors.toList()); }) .collect(Collectors.toList()); } } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/methodInvocation/A.java ================================================ package org.hjug.graphbuilder.visitor.testclasses.methodInvocation; public class A { A doSomething() { B.invocationTest(new D()); A a = B.invocationTest(new D()); // TODO: add visitor for J.ReturnType return B.invocationTest(new D()); } } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/methodInvocation/B.java ================================================ package org.hjug.graphbuilder.visitor.testclasses.methodInvocation; public class B { static A invocationTest(T type) { return new A(); } } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/methodInvocation/C.java ================================================ package org.hjug.graphbuilder.visitor.testclasses.methodInvocation; public class C extends B {} ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/methodInvocation/D.java ================================================ package org.hjug.graphbuilder.visitor.testclasses.methodInvocation; public class D extends C {} ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/newClass/A.java ================================================ package org.hjug.graphbuilder.visitor.testclasses.newClass; import java.util.ArrayList; import java.util.List; public class A { B newClassMethod() { new C(); C c = new C(); // var treated like "B", counts as 2 var b = new B(null); // <> treated like , counts as 2 List listB = new ArrayList<>(); // TODO: add visitor for J.ReturnType return new B(c); } } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/newClass/B.java ================================================ package org.hjug.graphbuilder.visitor.testclasses.newClass; public class B { public B(C c) {} } ================================================ FILE: codebase-graph-builder/src/test/java/org/hjug/graphbuilder/visitor/testclasses/newClass/C.java ================================================ package org.hjug.graphbuilder.visitor.testclasses.newClass; public class C {} ================================================ FILE: codebase-graph-builder/src/test/resources/javaSrcDirectory/com/ideacrest/parser/testclasses/A.java ================================================ package com.ideacrest.parser.testclasses; public class A { B b; } ================================================ FILE: codebase-graph-builder/src/test/resources/javaSrcDirectory/com/ideacrest/parser/testclasses/B.java ================================================ package com.ideacrest.parser.testclasses; public class B { C c; } ================================================ FILE: codebase-graph-builder/src/test/resources/javaSrcDirectory/com/ideacrest/parser/testclasses/C.java ================================================ package com.ideacrest.parser.testclasses; public class C { A a; E e; } ================================================ FILE: codebase-graph-builder/src/test/resources/javaSrcDirectory/com/ideacrest/parser/testclasses/D.java ================================================ package com.ideacrest.parser.testclasses; public class D { A a; C c; } ================================================ FILE: codebase-graph-builder/src/test/resources/javaSrcDirectory/com/ideacrest/parser/testclasses/E.java ================================================ package com.ideacrest.parser.testclasses; public class E { D d; D d2; } ================================================ FILE: cost-benefit-calculator/pom.xml ================================================ 4.0.0 org.hjug.refactorfirst refactor-first 0.8.1-SNAPSHOT org.hjug.refactorfirst.costbenefitcalculator cost-benefit-calculator RefactorFirst Cost Benefit Calculator org.slf4j slf4j-api org.hjug.refactorfirst.codebasegraphbuilder codebase-graph-builder org.hjug.refactorfirst.changepronenessranker change-proneness-ranker org.hjug.refactorfirst.effortranker effort-ranker org.hjug.refactorfirst.dsm graph-algorithms org.hjug.refactorfirst.testresources test-resources ================================================ FILE: cost-benefit-calculator/src/main/java/org/hjug/cbc/CostBenefitCalculator.java ================================================ package org.hjug.cbc; import static net.sourceforge.pmd.RuleViolation.CLASS_NAME; import static net.sourceforge.pmd.RuleViolation.PACKAGE_NAME; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; import java.util.stream.Stream; import lombok.extern.slf4j.Slf4j; import net.sourceforge.pmd.*; import net.sourceforge.pmd.lang.LanguageRegistry; import org.eclipse.jgit.api.errors.GitAPIException; import org.hjug.git.ChangePronenessRanker; import org.hjug.git.GitLogReader; import org.hjug.git.ScmLogInfo; import org.hjug.metrics.*; import org.hjug.metrics.rules.CBORule; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultWeightedEdge; @Slf4j public class CostBenefitCalculator implements AutoCloseable { private Report report; private final String repositoryPath; private GitLogReader gitLogReader; private final ChangePronenessRanker changePronenessRanker; private final Map classToSourceFilePathMapping; public CostBenefitCalculator(String repositoryPath, Map classToSourceFilePathMapping) { this.repositoryPath = repositoryPath; log.info("Initiating Cost Benefit calculation"); try { gitLogReader = new GitLogReader(new File(repositoryPath)); } catch (IOException e) { log.error("Failure to access Git repository", e); } changePronenessRanker = new ChangePronenessRanker(gitLogReader); this.classToSourceFilePathMapping = classToSourceFilePathMapping; } @Override public void close() throws Exception { gitLogReader.close(); } // copied from PMD's PmdTaskImpl.java and modified public void runPmdAnalysis() throws IOException { PMDConfiguration configuration = new PMDConfiguration(); try (PmdAnalysis pmd = PmdAnalysis.create(configuration)) { loadRules(pmd); try (Stream files = Files.walk(Paths.get(repositoryPath))) { files.filter(Files::isRegularFile).forEach(file -> pmd.files().addFile(file)); } report = pmd.performAnalysisAndCollectReport(); } } public void runPmdAnalysis(boolean excludeTests, String testSourceDirectory) throws IOException { PMDConfiguration configuration = new PMDConfiguration(); try (PmdAnalysis pmd = PmdAnalysis.create(configuration)) { loadRules(pmd); try (Stream files = Files.walk(Paths.get(repositoryPath))) { Stream pathStream; if (excludeTests) { pathStream = files.filter(Files::isRegularFile) .filter(file -> !file.toString().contains(testSourceDirectory)); } else { pathStream = files.filter(Files::isRegularFile); } pathStream.forEach(file -> pmd.files().addFile(file)); } report = pmd.performAnalysisAndCollectReport(); } } private void loadRules(PmdAnalysis pmd) { RuleSetLoader rulesetLoader = pmd.newRuleSetLoader(); pmd.addRuleSets(rulesetLoader.loadRuleSetsWithoutException(List.of("category/java/design.xml"))); Rule cboClassRule = new CBORule(); cboClassRule.setLanguage(LanguageRegistry.PMD.getLanguageByFullName("Java")); pmd.addRuleSet(RuleSet.forSingleRule(cboClassRule)); log.info("files to be scanned: " + Paths.get(repositoryPath)); } public List calculateGodClassCostBenefitValues() { List godClasses = getGodClasses(); List scmLogInfos = getRankedChangeProneness(godClasses); Map rankedLogInfosByPath = getRankedLogInfosByPath(scmLogInfos); List rankedDisharmonies = godClasses.stream() .filter(godClass -> rankedLogInfosByPath.containsKey(godClass.getFileName())) .map(godClass -> new RankedDisharmony(godClass, rankedLogInfosByPath.get(godClass.getFileName()))) .sorted(Comparator.comparing(RankedDisharmony::getRawPriority).reversed()) .collect(Collectors.toList()); int godClassPriority = 1; for (RankedDisharmony rankedGodClassDisharmony : rankedDisharmonies) { rankedGodClassDisharmony.setPriority(godClassPriority++); } return rankedDisharmonies; } private static Map getRankedLogInfosByPath(List scmLogInfos) { return scmLogInfos.stream().collect(Collectors.toMap(ScmLogInfo::getPath, logInfo -> logInfo, (a, b) -> b)); } private static Map getRankedLogInfosByClass(List scmLogInfos) { return scmLogInfos.stream() .collect(Collectors.toMap(ScmLogInfo::getClassName, logInfo -> logInfo, (a, b) -> b)); } private List getGodClasses() { List godClasses = new ArrayList<>(); for (RuleViolation violation : report.getViolations()) { if (violation.getRule().getName().contains("GodClass")) { GodClass godClass = new GodClass( violation.getAdditionalInfo().get(CLASS_NAME), getFileName(violation), violation.getAdditionalInfo().get(PACKAGE_NAME), violation.getDescription()); log.info("God Class identified: {}", godClass.getFileName()); godClasses.add(godClass); } } GodClassRanker godClassRanker = new GodClassRanker(); godClassRanker.rankGodClasses(godClasses); return godClasses; } public List getRankedChangeProneness(List disharmonies) { log.info("Calculating Change Proneness"); Map innerClassPaths = new ConcurrentHashMap<>(); Map scmLogInfosByPath = new ConcurrentHashMap<>(); List> scmLogInfos = disharmonies.parallelStream() .map(disharmony -> { String className = disharmony.getClassName(); String path = null; ScmLogInfo scmLogInfo = null; try { if (className.contains("$") && classToSourceFilePathMapping.containsKey( className.substring(0, className.indexOf("$")))) { path = classToSourceFilePathMapping.get(className.substring(0, className.indexOf("$"))); log.debug("Found source file {} for nested class: {}", path, className); innerClassPaths.put(className, path); } else { path = disharmony.getFileName(); try { log.debug("Reading scmLogInfo for {}", path); scmLogInfo = gitLogReader.fileLog(path); scmLogInfo.setClassName(className); log.debug("Successfully fetched scmLogInfo for {}", scmLogInfo.getPath()); scmLogInfosByPath.put(path, scmLogInfo); } catch (GitAPIException | IOException e) { log.error("Error reading Git repository contents.", e); } } } catch (NullPointerException e) { // Should not be reached log.error( "Error looking up class SCM info. If this error is encountered, " + "please log a bug on the RefactorFirst project and describe if the class is a nested class, lambda, etc. \nClass: {}, Path: {}", className, path, e); } Optional scmLogInfoOptional = Optional.ofNullable(scmLogInfo); if (scmLogInfoOptional.isEmpty()) { log.warn("No scmLogInfo found for class: {} at path: {}", className, path); } return scmLogInfoOptional; }) .collect(Collectors.toList()); List> innerClassScmLogInfos = innerClassPaths.entrySet().parallelStream() .map(innerClassPathEntry -> { ScmLogInfo scmLogInfo = scmLogInfosByPath.get(innerClassPathEntry.getValue()); ScmLogInfo innerClassScmLogInfo = null; if (scmLogInfo == null) { String className = innerClassPathEntry.getKey(); String path = classToSourceFilePathMapping.get(className.substring(0, className.indexOf("$"))); log.debug("Reading scmLogInfo for inner class {}", canonicaliseURIStringForRepoLookup(path)); try { innerClassScmLogInfo = gitLogReader.fileLog(canonicaliseURIStringForRepoLookup(path)); innerClassScmLogInfo.setClassName(className); log.debug( "Successfully fetched scmLogInfo for inner class {} at {}", innerClassScmLogInfo.getClassName(), innerClassScmLogInfo.getPath()); scmLogInfosByPath.put(path, innerClassScmLogInfo); } catch (GitAPIException | IOException e) { log.error( "Error reading Git repository contents for class {} with file path {}", className, path, e); } } else { innerClassScmLogInfo = new ScmLogInfo( innerClassPathEntry.getValue(), innerClassPathEntry.getKey(), scmLogInfo.getEarliestCommit(), scmLogInfo.getMostRecentCommit(), scmLogInfo.getCommitCount()); String className = innerClassPathEntry.getKey(); innerClassScmLogInfo.setClassName(className); String path = classToSourceFilePathMapping.get(className.substring(0, className.indexOf("$"))); scmLogInfosByPath.put(path, innerClassScmLogInfo); } return Optional.ofNullable(innerClassScmLogInfo); }) .collect(Collectors.toList()); scmLogInfos.addAll(innerClassScmLogInfos); List sortedScmInfos = new ArrayList<>(scmLogInfos.stream() .filter(Optional::isPresent) .map(Optional::get) .collect(Collectors.toList())); changePronenessRanker.rankChangeProneness(sortedScmInfos); return sortedScmInfos; } public List calculateCBOCostBenefitValues() { List cboClasses = getCBOClasses(); List scmLogInfos = getRankedChangeProneness(cboClasses); Map rankedLogInfosByPath = getRankedLogInfosByPath(scmLogInfos); for (Map.Entry stringScmLogInfoEntry : rankedLogInfosByPath.entrySet()) { log.debug( "ScmLogInfo entry: {} path: {}", stringScmLogInfoEntry.getKey(), stringScmLogInfoEntry.getValue().getPath()); } List rankedDisharmonies = new ArrayList<>(); for (CBOClass cboClass : cboClasses) { log.debug("CBO Class identified: {}", cboClass.getFileName()); log.debug( "ScmLogInfo: {}", rankedLogInfosByPath.get(cboClass.getFileName()).getPath()); rankedDisharmonies.add(new RankedDisharmony(cboClass, rankedLogInfosByPath.get(cboClass.getFileName()))); } rankedDisharmonies.sort( Comparator.comparing(RankedDisharmony::getRawPriority).reversed()); int cboPriority = 1; for (RankedDisharmony rankedCBODisharmony : rankedDisharmonies) { rankedCBODisharmony.setPriority(cboPriority++); } return rankedDisharmonies; } private List getCBOClasses() { List cboClasses = new ArrayList<>(); for (RuleViolation violation : report.getViolations()) { if (violation.getRule().getName().contains("CBORule")) { log.info(violation.getDescription()); CBOClass godClass = new CBOClass( violation.getAdditionalInfo().get(CLASS_NAME), getFileName(violation), violation.getAdditionalInfo().get(PACKAGE_NAME), violation.getDescription()); log.debug("Highly Coupled class identified: {}", godClass.getFileName()); cboClasses.add(godClass); } } return cboClasses; } public List calculateSourceNodeCostBenefitValues( Graph classGraph, Map edgeSourceNodeInfos, Map edgeTargetNodeInfos, Map edgeToRemoveCycleCounts, Set vertexesToRemove) { List sourceLogInfos = getRankedChangeProneness(new ArrayList<>(edgeSourceNodeInfos.values())); List targetLogInfos = getRankedChangeProneness(new ArrayList<>(edgeTargetNodeInfos.values())); List scmLogInfos = new ArrayList<>(sourceLogInfos.size() + targetLogInfos.size()); scmLogInfos.addAll(sourceLogInfos); scmLogInfos.addAll(targetLogInfos); Map sourceRankedLogInfosByPath = getRankedLogInfosByPath(scmLogInfos); List edgesThatNeedToBeRemoved = new ArrayList<>(); for (Map.Entry entry : edgeSourceNodeInfos.entrySet()) { String edgeSource = classGraph.getEdgeSource(entry.getKey()); String edgeSourcePath; if (edgeSource.contains("$")) { edgeSourcePath = classToSourceFilePathMapping.get(edgeSource.substring(0, edgeSource.indexOf("$"))); } else { edgeSourcePath = classToSourceFilePathMapping.get(edgeSource); } String edgeTarget = classGraph.getEdgeTarget(entry.getKey()); String edgeTargetPath; if (edgeTarget.contains("$")) { edgeTargetPath = classToSourceFilePathMapping.get(edgeTarget.substring(0, edgeTarget.indexOf("$"))); } else { edgeTargetPath = classToSourceFilePathMapping.get(edgeTarget); } String sourceNodeFileName = canonicaliseURIStringForRepoLookup(edgeSourcePath); String targetNodeFileName = canonicaliseURIStringForRepoLookup(edgeTargetPath); boolean sourceNodeShouldBeRemoved = vertexesToRemove.contains(edgeSource); boolean targetNodeShouldBeRemoved = vertexesToRemove.contains(edgeTarget); ScmLogInfo sourceScmLogInfo = null; if (sourceRankedLogInfosByPath.containsKey(sourceNodeFileName)) { sourceScmLogInfo = sourceRankedLogInfosByPath.get(sourceNodeFileName); } ScmLogInfo targetScmLogInfo = null; if (sourceRankedLogInfosByPath.containsKey(sourceNodeFileName)) { targetScmLogInfo = sourceRankedLogInfosByPath.get(targetNodeFileName); } RankedDisharmony edgeThatNeedsToBeRemoved = new RankedDisharmony( edgeSource, entry.getKey(), edgeToRemoveCycleCounts.get(entry.getKey()), (int) classGraph.getEdgeWeight(entry.getKey()), sourceNodeShouldBeRemoved, targetNodeShouldBeRemoved, sourceScmLogInfo, targetScmLogInfo); edgesThatNeedToBeRemoved.add(edgeThatNeedsToBeRemoved); } sortEdgesThatNeedToBeRemoved(edgesThatNeedToBeRemoved); // Then subtract edge weight int rawPriority = 1; for (RankedDisharmony rankedDisharmony : edgesThatNeedToBeRemoved) { rankedDisharmony.setRawPriority(rawPriority++); } // Push edges with higher weights down in the priority list edgesThatNeedToBeRemoved.sort(Comparator.comparing(RankedDisharmony::getRawPriority)); // Then set priority int sourceNodePriority = 1; for (RankedDisharmony rankedSourceNodeDisharmony : edgesThatNeedToBeRemoved) { rankedSourceNodeDisharmony.setPriority(sourceNodePriority++); } return edgesThatNeedToBeRemoved; } static void sortEdgesThatNeedToBeRemoved(List rankedDisharmonies) { // Sort by impact value // Order by cycle count reversed (highest count bubbles to the top) rankedDisharmonies.sort(Comparator.comparingInt(RankedDisharmony::getCycleCount) .reversed() // then by weight, with lowest weight edges bubbling to the top .thenComparingInt(RankedDisharmony::getEffortRank) // then by change proneness .thenComparingInt(rankedDisharmony -> -1 * rankedDisharmony.getChangePronenessRank()) .thenComparingInt(rankedDisharmony -> -1 * rankedDisharmony.getEdgeTargetChangePronenessRank()) // then if the source node is in the list of nodes to be removed // multiplying by -1 reverses the sort order (reverse doesn't work in chained comparators) .thenComparingInt(rankedDisharmony -> -1 * rankedDisharmony.getSourceNodeShouldBeRemoved()) // then if the target node is in the list of nodes to be removed .thenComparingInt(rankedDisharmony -> -1 * rankedDisharmony.getTargetNodeShouldBeRemoved())); } private String getFileName(RuleViolation violation) { String uriString = violation.getFileId().getUriString(); return canonicaliseURIStringForRepoLookup(uriString); } String canonicaliseURIStringForRepoLookup(String uriString) { if (repositoryPath.startsWith("/") || repositoryPath.startsWith("\\")) { return uriString.replace("file://" + repositoryPath.replace("\\", "/") + "/", ""); } return uriString.replace("file:///" + repositoryPath.replace("\\", "/") + "/", ""); } } ================================================ FILE: cost-benefit-calculator/src/main/java/org/hjug/cbc/CycleNode.java ================================================ package org.hjug.cbc; import java.time.Instant; import lombok.Data; import org.hjug.git.ScmLogInfo; import org.hjug.metrics.Disharmony; @Data public class CycleNode implements Disharmony { private final String className; private String fileName; private Integer changePronenessRank; private Instant firstCommitTime; private Instant mostRecentCommitTime; private Integer commitCount; public CycleNode(String className, String fileName) { this.className = className; this.fileName = fileName; } public String getPackageName() { return className.substring(0, className.lastIndexOf('.')); } public void setScmLogInfo(ScmLogInfo scmLogInfo) { firstCommitTime = Instant.ofEpochSecond(scmLogInfo.getEarliestCommit()); mostRecentCommitTime = Instant.ofEpochSecond(scmLogInfo.getMostRecentCommit()); commitCount = scmLogInfo.getCommitCount(); } } ================================================ FILE: cost-benefit-calculator/src/main/java/org/hjug/cbc/CycleRanker.java ================================================ package org.hjug.cbc; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.*; import java.util.stream.Collectors; import java.util.stream.Stream; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.hjug.dsm.CircularReferenceChecker; import org.hjug.graphbuilder.CodebaseGraphDTO; import org.hjug.graphbuilder.JavaGraphBuilder; import org.jgrapht.Graph; import org.jgrapht.graph.AsSubgraph; import org.jgrapht.graph.DefaultWeightedEdge; @RequiredArgsConstructor @Slf4j public class CycleRanker { private final String repositoryPath; private final JavaGraphBuilder javaGraphBuilder = new JavaGraphBuilder(); @Getter private Graph classReferencesGraph; @Getter private CodebaseGraphDTO codebaseGraphDTO; @Getter private Map classNamesAndPaths = new HashMap<>(); @Getter private Map fqnsAndPaths = new HashMap<>(); public void generateClassReferencesGraph(boolean excludeTests, String testSourceDirectory) { try { codebaseGraphDTO = javaGraphBuilder.getCodebaseGraphDTO(repositoryPath, excludeTests, testSourceDirectory); classReferencesGraph = codebaseGraphDTO.getClassReferencesGraph(); loadClassNamesAndPaths(); /*for (Map.Entry stringStringEntry : fqnsAndPaths.entrySet()) { log.info(stringStringEntry.getKey() + " : " + stringStringEntry.getValue()); }*/ } catch (IOException e) { throw new RuntimeException(e); } } public List performCycleAnalysis(boolean excludeTests, String testSourceDirectory) { List rankedCycles = new ArrayList<>(); try { boolean calculateCycleChurn = false; generateClassReferencesGraph(excludeTests, testSourceDirectory); identifyRankedCycles(rankedCycles); sortRankedCycles(rankedCycles, calculateCycleChurn); setPriorities(rankedCycles); } catch (IOException e) { throw new RuntimeException(e); } return rankedCycles; } private void identifyRankedCycles(List rankedCycles) throws IOException { CircularReferenceChecker circularReferenceChecker = new CircularReferenceChecker(); Map> cycles = circularReferenceChecker.getCycles(classReferencesGraph); cycles.forEach((vertex, subGraph) -> { // TODO: Calculate min cuts for smaller graphs - has a runtime of O(V^4) for a graph /*Set minCutEdges; GusfieldGomoryHuCutTree gusfieldGomoryHuCutTree = new GusfieldGomoryHuCutTree<>(new AsUndirectedGraph<>(subGraph)); double minCut = gusfieldGomoryHuCutTree.calculateMinCut(); minCutEdges = gusfieldGomoryHuCutTree.getCutEdges();*/ List cycleNodes = subGraph.vertexSet().stream() .map(classInCycle -> new CycleNode(classInCycle, classNamesAndPaths.get(classInCycle))) // .peek(cycleNode -> log.info(cycleNode.toString())) .collect(Collectors.toList()); rankedCycles.add(createRankedCycle(vertex, subGraph, cycleNodes, 0.0, new HashSet<>())); }); } public CycleNode classToCycleNode(String fqnClass) { return new CycleNode(fqnClass, fqnsAndPaths.get(fqnClass)); } private RankedCycle createRankedCycle( String vertex, AsSubgraph subGraph, List cycleNodes, double minCut, Set minCutEdges) { return new RankedCycle(vertex, subGraph.vertexSet(), subGraph.edgeSet(), minCut, minCutEdges, cycleNodes); } private static void sortRankedCycles(List rankedCycles, boolean calculateChurnForCycles) { if (calculateChurnForCycles) { rankedCycles.sort(Comparator.comparing(RankedCycle::getAverageChangeProneness)); int cpr = 1; for (RankedCycle rankedCycle : rankedCycles) { rankedCycle.setChangePronenessRank(cpr++); } } else { rankedCycles.sort(Comparator.comparing(RankedCycle::getRawPriority).reversed()); } } private static void setPriorities(List rankedCycles) { int priority = 1; for (RankedCycle rankedCycle : rankedCycles) { rankedCycle.setPriority(priority++); } } void loadClassNamesAndPaths() throws IOException { try (Stream walk = Files.walk(Paths.get(repositoryPath))) { walk.forEach(path -> { String filename = path.getFileName().toString(); if (filename.endsWith(".java")) { // extract package and class name String packageName = getPackageName(path); String uriString = path.toUri().toString(); String className = getClassName(filename); String canonicalUri = canonicaliseURIStringForRepoLookup(uriString); fqnsAndPaths.put(packageName + "." + className, canonicalUri); classNamesAndPaths.put(className, canonicalUri); } }); } } private static String getPackageName(Path path) { try { return Files.readAllLines(path).stream() .filter(line -> line.startsWith("package")) .map(line -> line.replace("package", "").replace(";", "").trim()) .findFirst() .orElse(""); } catch (IOException e) { throw new RuntimeException(e); } } private String canonicaliseURIStringForRepoLookup(String uriString) { if (repositoryPath.startsWith("/") || repositoryPath.startsWith("\\")) { return uriString.replace("file://" + repositoryPath.replace("\\", "/") + "/", ""); } return uriString.replace("file:///" + repositoryPath.replace("\\", "/") + "/", ""); } /** * Extract class name from java file name * Example : MyJavaClass.java becomes MyJavaClass * * @param javaFileName * @return */ private String getClassName(String javaFileName) { return javaFileName.substring(0, javaFileName.indexOf('.')); } } ================================================ FILE: cost-benefit-calculator/src/main/java/org/hjug/cbc/RankedCycle.java ================================================ package org.hjug.cbc; import java.util.HashSet; import java.util.List; import java.util.Set; import lombok.Data; import lombok.extern.slf4j.Slf4j; import org.jgrapht.graph.DefaultWeightedEdge; @Data @Slf4j public class RankedCycle { private final String cycleName; private Integer changePronenessRankSum = 0; private final Set vertexSet; private final Set edgeSet; private final double minCutCount; private final Set minCutEdges; private final List cycleNodes; private float rawPriority; private Integer priority = 0; private float averageChangeProneness; private Integer changePronenessRank = 0; private float impact; public RankedCycle( String cycleName, Set vertexSet, Set edgeSet, double minCutCount, Set minCutEdges, List cycleNodes) { this.cycleNodes = cycleNodes; this.cycleName = cycleName; this.vertexSet = vertexSet; this.edgeSet = edgeSet; this.minCutCount = minCutCount; if (null == minCutEdges) { this.minCutEdges = new HashSet<>(); } else { this.minCutEdges = minCutEdges; } if (minCutCount == 0.0) { this.impact = (float) (vertexSet.size()); } else { this.impact = (float) (vertexSet.size() / minCutCount); } this.rawPriority = this.impact; } public RankedCycle( String cycleName, Integer changePronenessRankSum, Set vertexSet, Set edgeSet, double minCutCount, Set minCutEdges, List cycleNodes) { this.cycleNodes = cycleNodes; this.cycleName = cycleName; this.changePronenessRankSum = changePronenessRankSum; this.vertexSet = vertexSet; this.edgeSet = edgeSet; this.minCutCount = minCutCount; if (null == minCutEdges) { this.minCutEdges = new HashSet<>(); } else { this.minCutEdges = minCutEdges; } if (minCutCount == 0.0) { this.impact = (float) (vertexSet.size()); } else { this.impact = (float) (vertexSet.size() / minCutCount); } this.averageChangeProneness = (float) changePronenessRankSum / vertexSet.size(); this.rawPriority = this.impact + averageChangeProneness; } } ================================================ FILE: cost-benefit-calculator/src/main/java/org/hjug/cbc/RankedDisharmony.java ================================================ package org.hjug.cbc; import java.nio.file.Paths; import java.time.Instant; import lombok.Data; import org.hjug.git.ScmLogInfo; import org.hjug.metrics.CBOClass; import org.hjug.metrics.GodClass; import org.jgrapht.graph.DefaultWeightedEdge; @Data public class RankedDisharmony { private Instant firstCommitTime; private Instant mostRecentCommitTime; private Integer commitCount; private String path; private String fileName; private final String className; private final Integer effortRank; private final Integer changePronenessRank; private Integer rawPriority; private Integer priority = 0; private Integer wmc; private Integer wmcRank; private Integer atfd; private Integer atfdRank; private Float tcc; private Integer tccRank; private DefaultWeightedEdge edge; private Integer cycleCount; private int sourceNodeShouldBeRemoved; private int targetNodeShouldBeRemoved; private String edgeTargetClass; private Integer edgeTargetChangePronenessRank; public RankedDisharmony(GodClass godClass, ScmLogInfo scmLogInfo) { path = scmLogInfo.getPath(); // from https://stackoverflow.com/questions/1011287/get-file-name-from-a-file-location-in-java fileName = Paths.get(path).getFileName().toString(); className = godClass.getClassName(); changePronenessRank = scmLogInfo.getChangePronenessRank(); effortRank = godClass.getOverallRank(); rawPriority = changePronenessRank - effortRank; wmc = godClass.getWmc(); wmcRank = godClass.getWmcRank(); atfd = godClass.getAtfd(); atfdRank = godClass.getAtfdRank(); tcc = godClass.getTcc(); tccRank = godClass.getTccRank(); firstCommitTime = Instant.ofEpochSecond(scmLogInfo.getEarliestCommit()); mostRecentCommitTime = Instant.ofEpochSecond(scmLogInfo.getMostRecentCommit()); commitCount = scmLogInfo.getCommitCount(); } public RankedDisharmony(CBOClass cboClass, ScmLogInfo scmLogInfo) { path = scmLogInfo.getPath(); // from https://stackoverflow.com/questions/1011287/get-file-name-from-a-file-location-in-java fileName = Paths.get(path).getFileName().toString(); className = cboClass.getClassName(); changePronenessRank = scmLogInfo.getChangePronenessRank(); effortRank = cboClass.getCouplingCount(); rawPriority = changePronenessRank - effortRank; firstCommitTime = Instant.ofEpochSecond(scmLogInfo.getEarliestCommit()); mostRecentCommitTime = Instant.ofEpochSecond(scmLogInfo.getMostRecentCommit()); commitCount = scmLogInfo.getCommitCount(); } public RankedDisharmony( String edgeSource, DefaultWeightedEdge edge, int cycleCount, int weight, boolean sourceNodeShouldBeRemoved, boolean targetNodeShouldBeRemoved, ScmLogInfo sourceScmLogInfo, ScmLogInfo targetScmLogInfo) { if (null != sourceScmLogInfo) { path = sourceScmLogInfo.getPath(); // from https://stackoverflow.com/questions/1011287/get-file-name-from-a-file-location-in-java fileName = Paths.get(path).getFileName().toString(); firstCommitTime = Instant.ofEpochSecond(sourceScmLogInfo.getEarliestCommit()); mostRecentCommitTime = Instant.ofEpochSecond(sourceScmLogInfo.getMostRecentCommit()); commitCount = sourceScmLogInfo.getCommitCount(); } className = edgeSource; this.edge = edge; this.cycleCount = cycleCount; changePronenessRank = null == sourceScmLogInfo ? 0 : sourceScmLogInfo.getChangePronenessRank(); edgeTargetChangePronenessRank = null == targetScmLogInfo ? 0 : targetScmLogInfo.getChangePronenessRank(); effortRank = weight; this.sourceNodeShouldBeRemoved = sourceNodeShouldBeRemoved ? 1 : 0; this.targetNodeShouldBeRemoved = targetNodeShouldBeRemoved ? 1 : 0; } } ================================================ FILE: cost-benefit-calculator/src/test/java/org/hjug/cbc/CostBenefitCalculatorTest.java ================================================ package org.hjug.cbc; import static java.nio.charset.StandardCharsets.UTF_8; import java.io.*; import java.util.*; import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.errors.GitAPIException; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.revwalk.RevCommit; import org.hjug.git.ScmLogInfo; import org.hjug.metrics.Disharmony; import org.jetbrains.annotations.NotNull; import org.jgrapht.graph.DefaultWeightedEdge; import org.jgrapht.graph.SimpleDirectedWeightedGraph; import org.junit.jupiter.api.*; import org.junit.jupiter.api.io.TempDir; class CostBenefitCalculatorTest { @TempDir public File tempFolder; private String faceletsPath = "org/apache/myfaces/tobago/facelets/"; private String hudsonPath = "hudson/model/"; private Git git; private Repository repository; @BeforeEach public void setUp() throws GitAPIException { git = Git.init().setDirectory(tempFolder).call(); repository = git.getRepository(); new File(tempFolder.getPath() + "/" + faceletsPath).mkdirs(); new File(tempFolder.getPath() + "/" + hudsonPath).mkdirs(); } @AfterEach public void tearDown() { repository.close(); } @Test void testCBOViolation() throws IOException, GitAPIException, InterruptedException { // Has CBO violation String user = "User.java"; InputStream userResourceAsStream = getClass().getClassLoader().getResourceAsStream(hudsonPath + user); writeFile(hudsonPath + user, convertInputStreamToString(userResourceAsStream)); git.add().addFilepattern(".").call(); RevCommit firstCommit = git.commit().setMessage("message").call(); CostBenefitCalculator costBenefitCalculator = new CostBenefitCalculator(git.getRepository().getDirectory().getParent(), new HashMap<>()); costBenefitCalculator.runPmdAnalysis(); List disharmonies = costBenefitCalculator.calculateCBOCostBenefitValues(); Assertions.assertFalse(disharmonies.isEmpty()); } @Test void testCostBenefitCalculation() throws IOException, GitAPIException, InterruptedException { String attributeHandler = "AttributeHandler.java"; InputStream resourceAsStream = getClass().getClassLoader().getResourceAsStream(faceletsPath + attributeHandler); writeFile(faceletsPath + attributeHandler, convertInputStreamToString(resourceAsStream)); git.add().addFilepattern(".").call(); RevCommit firstCommit = git.commit().setMessage("message").call(); // Sleeping for one second to guarantee commits have different time stamps Thread.sleep(1000); // write contents of updated file to original file InputStream resourceAsStream2 = getClass().getClassLoader().getResourceAsStream(faceletsPath + "AttributeHandler2.java"); writeFile(faceletsPath + attributeHandler, convertInputStreamToString(resourceAsStream2)); InputStream resourceAsStream3 = getClass().getClassLoader().getResourceAsStream(faceletsPath + "AttributeHandlerAndSorter.java"); writeFile(faceletsPath + "AttributeHandlerAndSorter.java", convertInputStreamToString(resourceAsStream3)); git.add().addFilepattern(".").call(); RevCommit secondCommit = git.commit().setMessage("message").call(); CostBenefitCalculator costBenefitCalculator = new CostBenefitCalculator(git.getRepository().getDirectory().getParent(), new HashMap<>()); costBenefitCalculator.runPmdAnalysis(); List disharmonies = costBenefitCalculator.calculateGodClassCostBenefitValues(); Assertions.assertEquals(1, disharmonies.get(0).getRawPriority().intValue()); Assertions.assertEquals(1, disharmonies.get(1).getRawPriority().intValue()); Assertions.assertEquals(1, disharmonies.get(0).getPriority().intValue()); Assertions.assertEquals(2, disharmonies.get(1).getPriority().intValue()); } @Test void calculateSourceNodeCostBenefitValues_filtersMissingLogInfoAndAssignsPriority() throws Exception { writeFile(hudsonPath + "Dummy.java", "public class Dummy {}"); git.add().addFilepattern(".").call(); git.commit().setMessage("initial commit").call(); SimpleDirectedWeightedGraph classGraph = new SimpleDirectedWeightedGraph<>(DefaultWeightedEdge.class); classGraph.addVertex("ClassA"); classGraph.addVertex("ClassB"); classGraph.addVertex("ClassC"); classGraph.addVertex("ClassD"); classGraph.addVertex("ClassE"); classGraph.addVertex("ClassF"); DefaultWeightedEdge edge1 = classGraph.addEdge("ClassA", "ClassB"); classGraph.setEdgeWeight(edge1, 4); DefaultWeightedEdge edge2 = classGraph.addEdge("ClassC", "ClassD"); classGraph.setEdgeWeight(edge2, 2); Map edgeSourceNodeInfos = new HashMap<>(); edgeSourceNodeInfos.put(edge1, new CycleNode("ClassA", hudsonPath + "ClassA.java")); edgeSourceNodeInfos.put(edge2, new CycleNode("ClassC", hudsonPath + "ClassC.java")); Map edgeTargeteNodeInfos = new HashMap<>(); edgeTargeteNodeInfos.put(edge1, new CycleNode("ClassB", hudsonPath + "ClassB.java")); edgeTargeteNodeInfos.put(edge2, new CycleNode("ClassD", hudsonPath + "ClassD.java")); Map edgeToRemoveCycleCounts = new HashMap<>(); edgeToRemoveCycleCounts.put(edge1, 5); edgeToRemoveCycleCounts.put(edge2, 3); Set vertexesToRemove = new HashSet<>(Arrays.asList("ClassA", "ClassD")); ScmLogInfo scmLogInfo1 = new ScmLogInfo(hudsonPath + "ClassA.java", null, 1, 2, 3); scmLogInfo1.setChangePronenessRank(4); ScmLogInfo scmLogInfo2 = new ScmLogInfo(hudsonPath + "ClassC.java", null, 1, 2, 5); scmLogInfo2.setChangePronenessRank(7); List scmLogInfos = Arrays.asList(scmLogInfo1, scmLogInfo2); try (TestableCostBenefitCalculator costBenefitCalculator = new TestableCostBenefitCalculator( git.getRepository().getDirectory().getParent(), scmLogInfos)) { List disharmonies = costBenefitCalculator.calculateSourceNodeCostBenefitValues( classGraph, edgeTargeteNodeInfos, edgeTargeteNodeInfos, edgeToRemoveCycleCounts, vertexesToRemove); Assertions.assertEquals(2, disharmonies.size()); RankedDisharmony classA = disharmonies.get(0); Assertions.assertEquals("ClassA", classA.getClassName()); Assertions.assertEquals(5, classA.getCycleCount().intValue()); Assertions.assertEquals(4, classA.getEffortRank().intValue()); Assertions.assertEquals(1, classA.getSourceNodeShouldBeRemoved()); Assertions.assertEquals(0, classA.getTargetNodeShouldBeRemoved()); Assertions.assertEquals(1, classA.getPriority().intValue()); RankedDisharmony classC = disharmonies.get(1); Assertions.assertEquals("ClassC", classC.getClassName()); Assertions.assertEquals(3, classC.getCycleCount().intValue()); Assertions.assertEquals(2, classC.getEffortRank().intValue()); Assertions.assertEquals(0, classC.getSourceNodeShouldBeRemoved()); Assertions.assertEquals(1, classC.getTargetNodeShouldBeRemoved()); Assertions.assertEquals(2, classC.getPriority().intValue()); Assertions.assertEquals(7, classC.getChangePronenessRank()); } } @Test void calculateSourceNodeCostBenefitValues_prefersHigherChangePronenessRank() throws Exception { writeFile(faceletsPath + "Placeholder.java", "public class Placeholder {}"); git.add().addFilepattern(".").call(); git.commit().setMessage("initial commit").call(); SimpleDirectedWeightedGraph classGraph = new SimpleDirectedWeightedGraph<>(DefaultWeightedEdge.class); classGraph.addVertex("Alpha"); classGraph.addVertex("Beta"); classGraph.addVertex("Gamma"); DefaultWeightedEdge edge1 = classGraph.addEdge("Alpha", "Beta"); classGraph.setEdgeWeight(edge1, 3); DefaultWeightedEdge edge2 = classGraph.addEdge("Gamma", "Beta"); classGraph.setEdgeWeight(edge2, 3); Map edgeSourceNodeInfos = new HashMap<>(); edgeSourceNodeInfos.put(edge1, new CycleNode("Alpha", faceletsPath + "Alpha.java")); edgeSourceNodeInfos.put(edge2, new CycleNode("Gamma", faceletsPath + "Gamma.java")); Map edgeTargetNodeInfos = new HashMap<>(); edgeTargetNodeInfos.put(edge1, new CycleNode("Beta", faceletsPath + "Beta.java")); Map edgeToRemoveCycleCounts = new HashMap<>(); edgeToRemoveCycleCounts.put(edge1, 4); edgeToRemoveCycleCounts.put(edge2, 4); Set vertexesToRemove = new HashSet<>(Arrays.asList("Alpha", "Gamma")); ScmLogInfo scmLogInfo1 = new ScmLogInfo(faceletsPath + "Alpha.java", null, 2, 3, 1); scmLogInfo1.setChangePronenessRank(2); ScmLogInfo scmLogInfo2 = new ScmLogInfo(faceletsPath + "Gamma.java", null, 2, 3, 1); scmLogInfo2.setChangePronenessRank(8); List scmLogInfos = Arrays.asList(scmLogInfo1, scmLogInfo2); try (TestableCostBenefitCalculator costBenefitCalculator = new TestableCostBenefitCalculator( git.getRepository().getDirectory().getParent(), scmLogInfos)) { List disharmonies = costBenefitCalculator.calculateSourceNodeCostBenefitValues( classGraph, edgeSourceNodeInfos, edgeTargetNodeInfos, edgeToRemoveCycleCounts, vertexesToRemove); Assertions.assertEquals(2, disharmonies.size()); Assertions.assertEquals(8, disharmonies.get(0).getChangePronenessRank()); Assertions.assertEquals(1, disharmonies.get(0).getPriority().intValue()); Assertions.assertEquals(2, disharmonies.get(1).getPriority().intValue()); Assertions.assertEquals(2, disharmonies.get(1).getChangePronenessRank()); } } @Test void sortEdgesThatNeedToBeRemoved_sortsByMultipleCriteria() { // Create ScmLogInfo objects for testing ScmLogInfo logInfo1 = new ScmLogInfo("path1.java", null, 1, 2, 3); logInfo1.setChangePronenessRank(5); ScmLogInfo logInfo2 = new ScmLogInfo("path2.java", null, 1, 2, 3); logInfo2.setChangePronenessRank(3); ScmLogInfo logInfo3 = new ScmLogInfo("path3.java", null, 1, 2, 3); logInfo3.setChangePronenessRank(8); ScmLogInfo logInfo4 = new ScmLogInfo("path4.java", null, 1, 2, 3); logInfo4.setChangePronenessRank(2); ScmLogInfo logInfo5 = new ScmLogInfo("path4.java", null, 1, 2, 3); logInfo5.setChangePronenessRank(5); // Create RankedDisharmony objects with different combinations // Expected order after sorting: cycleCount desc, then sourceRemoved desc, then targetRemoved desc, then // changeProneness desc // cycle=5, source=0, target=0, change=5 RankedDisharmony disharmony1 = new RankedDisharmony( "Class1", new org.jgrapht.graph.DefaultWeightedEdge(), 5, 1, false, false, logInfo1, null); // cycle=5, source=1, target=0, change=3 RankedDisharmony disharmony2 = new RankedDisharmony( "Class2", new org.jgrapht.graph.DefaultWeightedEdge(), 5, 1, true, false, logInfo2, null); // cycle=3, source=0, target=1, change=8 RankedDisharmony disharmony3 = new RankedDisharmony( "Class3", new org.jgrapht.graph.DefaultWeightedEdge(), 3, 1, false, true, logInfo3, null); // cycle=3, source=0, target=0, change=2 RankedDisharmony disharmony4 = new RankedDisharmony( "Class4", new org.jgrapht.graph.DefaultWeightedEdge(), 3, 1, false, false, logInfo4, null); // cycle=3, source=0, target=0, change=5 RankedDisharmony disharmony5 = new RankedDisharmony( "Class5", new org.jgrapht.graph.DefaultWeightedEdge(), 3, 1, false, false, logInfo5, null); List disharmonies = Arrays.asList(disharmony4, disharmony2, disharmony1, disharmony3, disharmony5); // Sort the list CostBenefitCalculator.sortEdgesThatNeedToBeRemoved(disharmonies); // Verify the order // Order by cycle count reversed (highest count bubbles to the top) // then Order by source node removed (source nodes needing to be removed bubble to the top) // then Order by target node removed (target nodes needing to be removed bubble to the top)\ // then Order by change proneness (highest change proneness bubbles to the top) for (RankedDisharmony disharmony : disharmonies) { System.out.println(disharmony.getClassName() + " " + disharmony.getCycleCount() + " " + disharmony.getEffortRank() + " " + disharmony.getSourceNodeShouldBeRemoved() + " " + disharmony.getTargetNodeShouldBeRemoved() + " " + disharmony.getChangePronenessRank()); } RankedDisharmony orderedDisharmony0 = disharmonies.get(0); Assertions.assertEquals("Class1", orderedDisharmony0.getClassName()); Assertions.assertEquals(5, orderedDisharmony0.getCycleCount().intValue()); Assertions.assertEquals(1, orderedDisharmony0.getEffortRank().intValue()); Assertions.assertEquals(0, orderedDisharmony0.getSourceNodeShouldBeRemoved()); Assertions.assertEquals(0, orderedDisharmony0.getTargetNodeShouldBeRemoved()); Assertions.assertEquals(5, orderedDisharmony0.getChangePronenessRank()); RankedDisharmony orderedDisharmony1 = disharmonies.get(1); Assertions.assertEquals("Class2", orderedDisharmony1.getClassName()); Assertions.assertEquals(5, orderedDisharmony1.getCycleCount().intValue()); Assertions.assertEquals(1, orderedDisharmony1.getEffortRank().intValue()); Assertions.assertEquals(1, orderedDisharmony1.getSourceNodeShouldBeRemoved()); Assertions.assertEquals(0, orderedDisharmony1.getTargetNodeShouldBeRemoved()); Assertions.assertEquals(3, orderedDisharmony1.getChangePronenessRank()); RankedDisharmony orderedDisharmony2 = disharmonies.get(2); Assertions.assertEquals("Class3", orderedDisharmony2.getClassName()); Assertions.assertEquals(3, orderedDisharmony2.getCycleCount().intValue()); Assertions.assertEquals(1, orderedDisharmony2.getEffortRank().intValue()); Assertions.assertEquals(0, orderedDisharmony2.getSourceNodeShouldBeRemoved()); Assertions.assertEquals(1, orderedDisharmony2.getTargetNodeShouldBeRemoved()); Assertions.assertEquals(8, orderedDisharmony2.getChangePronenessRank()); RankedDisharmony orderedDisharmony3 = disharmonies.get(3); Assertions.assertEquals("Class5", orderedDisharmony3.getClassName()); Assertions.assertEquals(3, orderedDisharmony3.getCycleCount().intValue()); Assertions.assertEquals(1, orderedDisharmony3.getEffortRank().intValue()); Assertions.assertEquals(0, orderedDisharmony3.getSourceNodeShouldBeRemoved()); Assertions.assertEquals(0, orderedDisharmony3.getTargetNodeShouldBeRemoved()); Assertions.assertEquals(5, orderedDisharmony3.getChangePronenessRank()); RankedDisharmony orderedDisharmony4 = disharmonies.get(4); Assertions.assertEquals("Class4", orderedDisharmony4.getClassName()); Assertions.assertEquals(1, orderedDisharmony4.getEffortRank().intValue()); Assertions.assertEquals(3, orderedDisharmony4.getCycleCount().intValue()); Assertions.assertEquals(0, orderedDisharmony4.getSourceNodeShouldBeRemoved()); Assertions.assertEquals(0, orderedDisharmony4.getTargetNodeShouldBeRemoved()); Assertions.assertEquals(2, orderedDisharmony4.getChangePronenessRank()); } private void writeFile(String name, String content) throws IOException { // Files.writeString(Path.of(git.getRepository().getWorkTree().getPath()), content); File file = new File(git.getRepository().getWorkTree(), name); try (FileOutputStream outputStream = new FileOutputStream(file)) { outputStream.write(content.getBytes(UTF_8)); } } private String convertInputStreamToString(InputStream inputStream) throws IOException { ByteArrayOutputStream result = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; int length; while ((length = inputStream.read(buffer)) != -1) { result.write(buffer, 0, length); } return result.toString("UTF-8"); } private static class TestableCostBenefitCalculator extends CostBenefitCalculator { private final List scmLogInfos; TestableCostBenefitCalculator(String repositoryPath, List scmLogInfos) { super(repositoryPath, getClassToSourceFilePathMapping()); this.scmLogInfos = scmLogInfos; } private static @NotNull Map getClassToSourceFilePathMapping() { Map classToSourceFilePathMapping = new HashMap<>(); classToSourceFilePathMapping.put("Alpha", "org/apache/myfaces/tobago/facelets/Alpha.java"); classToSourceFilePathMapping.put("Beta", "org/apache/myfaces/tobago/facelets/Beta.java"); classToSourceFilePathMapping.put("Gamma", "org/apache/myfaces/tobago/facelets/Gamma.java"); classToSourceFilePathMapping.put("ClassA", "hudson/model/ClassA.java"); classToSourceFilePathMapping.put("ClassB", "hudson/model/ClassB.java"); classToSourceFilePathMapping.put("ClassC", "hudson/model/ClassC.java"); classToSourceFilePathMapping.put("ClassD", "hudson/model/ClassD.java"); return classToSourceFilePathMapping; } @Override public List getRankedChangeProneness(List disharmonies) { return new ArrayList<>(scmLogInfos); } } } ================================================ FILE: cost-benefit-calculator/src/test/resources/hudson/model/User.java ================================================ /* * The MIT License * * Copyright (c) 2004-2018, Sun Microsystems, Inc., Kohsuke Kawaguchi, Erik Ramfelt, * Tom Huybrechts, Vincent Latombe, CloudBees, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.model; import com.infradna.tool.bridge_method_injector.WithBridgeMethods; import edu.umd.cs.findbugs.annotations.CheckForNull; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import hudson.BulkChange; import hudson.CopyOnWrite; import hudson.Extension; import hudson.ExtensionList; import hudson.ExtensionPoint; import hudson.Util; import hudson.XmlFile; import hudson.init.InitMilestone; import hudson.init.Initializer; import hudson.model.Descriptor.FormException; import hudson.model.listeners.SaveableListener; import hudson.security.ACL; import hudson.security.AccessControlled; import hudson.security.SecurityRealm; import hudson.security.UserMayOrMayNotExistException2; import hudson.util.FormApply; import hudson.util.FormValidation; import hudson.util.RunList; import hudson.util.XStream2; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import java.util.function.Predicate; import java.util.logging.Level; import java.util.logging.Logger; import javax.servlet.ServletException; import javax.servlet.http.HttpServletResponse; import jenkins.model.IdStrategy; import jenkins.model.Jenkins; import jenkins.model.ModelObjectWithContextMenu; import jenkins.scm.RunWithSCM; import jenkins.security.ImpersonatingUserDetailsService2; import jenkins.security.LastGrantedAuthoritiesProperty; import jenkins.security.UserDetailsCache; import jenkins.util.SystemProperties; import net.sf.json.JSONObject; import org.apache.commons.lang.StringUtils; import org.jenkinsci.Symbol; import org.kohsuke.accmod.Restricted; import org.kohsuke.accmod.restrictions.NoExternalUse; import org.kohsuke.stapler.StaplerProxy; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import org.kohsuke.stapler.export.Exported; import org.kohsuke.stapler.export.ExportedBean; import org.kohsuke.stapler.interceptor.RequirePOST; import org.kohsuke.stapler.verb.POST; import org.springframework.security.authentication.AnonymousAuthenticationToken; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.Authentication; import org.springframework.security.core.AuthenticationException; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.userdetails.UserDetails; import org.springframework.security.core.userdetails.UsernameNotFoundException; /** * Represents a user. * *

* In Hudson, {@link User} objects are created in on-demand basis; * for example, when a build is performed, its change log is computed * and as a result commits from users who Hudson has never seen may be discovered. * When this happens, new {@link User} object is created. * *

* If the persisted record for an user exists, the information is loaded at * that point, but if there's no such record, a fresh instance is created from * thin air (this is where {@link UserPropertyDescriptor#newInstance(User)} is * called to provide initial {@link UserProperty} objects. * *

* Such newly created {@link User} objects will be simply GC-ed without * ever leaving the persisted record, unless {@link User#save()} method * is explicitly invoked (perhaps as a result of a browser submitting a * configuration.) * * @author Kohsuke Kawaguchi */ @ExportedBean public class User extends AbstractModelObject implements AccessControlled, DescriptorByNameOwner, Saveable, Comparable, ModelObjectWithContextMenu, StaplerProxy { public static final XStream2 XSTREAM = new XStream2(); private static final Logger LOGGER = Logger.getLogger(User.class.getName()); static final String CONFIG_XML = "config.xml"; /** * Escape hatch for StaplerProxy-based access control */ @Restricted(NoExternalUse.class) @SuppressFBWarnings(value = "MS_SHOULD_BE_FINAL", justification = "for script console") public static /* Script Console modifiable */ boolean SKIP_PERMISSION_CHECK = SystemProperties.getBoolean(User.class.getName() + ".skipPermissionCheck"); /** * Jenkins now refuses to let the user login if he/she doesn't exist in {@link SecurityRealm}, * which was necessary to make sure users removed from the backend will get removed from the frontend. *

* Unfortunately this infringed some legitimate use cases of creating Jenkins-local users for * automation purposes. This escape hatch switch can be enabled to resurrect that behaviour. *

* See JENKINS-22346. */ @SuppressFBWarnings(value = "MS_SHOULD_BE_FINAL", justification = "for script console") public static boolean ALLOW_NON_EXISTENT_USER_TO_LOGIN = SystemProperties.getBoolean(User.class.getName() + ".allowNonExistentUserToLogin"); /** * Jenkins historically created a (usually) ephemeral user record when an user with Overall/Administer permission * accesses a /user/arbitraryName URL. *

* Unfortunately this constitutes a CSRF vulnerability, as malicious users can make admins create arbitrary numbers * of ephemeral user records, so the behavior was changed in Jenkins 2.44 / 2.32.2. *

* As some users may be relying on the previous behavior, setting this to true restores the previous behavior. This * is not recommended. *

* SECURITY-406. */ @Restricted(NoExternalUse.class) @SuppressFBWarnings(value = "MS_SHOULD_BE_FINAL", justification = "for script console") public static boolean ALLOW_USER_CREATION_VIA_URL = SystemProperties.getBoolean(User.class.getName() + ".allowUserCreationViaUrl"); /** * The username of the 'unknown' user used to avoid null user references. */ private static final String UNKNOWN_USERNAME = "unknown"; /** * These usernames should not be used by real users logging into Jenkins. Therefore, we prevent * users with these names from being saved. */ private static final String[] ILLEGAL_PERSISTED_USERNAMES = new String[]{ACL.ANONYMOUS_USERNAME, ACL.SYSTEM_USERNAME, UNKNOWN_USERNAME}; private final int version = 10; // Not currently used, but it may be helpful in the future to store a version. private String id; private volatile String fullName; private volatile String description; @CopyOnWrite private volatile List properties = new ArrayList<>(); static { XSTREAM.alias("user", User.class); } private User(String id, String fullName) { this.id = id; this.fullName = fullName; load(id); } private void load(String userId) { clearExistingProperties(); loadFromUserConfigFile(userId); removeNullsThatFailedToLoad(); allocateDefaultPropertyInstancesAsNeeded(); setUserToProperties(); } private void setUserToProperties() { for (UserProperty p : properties) { p.setUser(this); } } private void allocateDefaultPropertyInstancesAsNeeded() { for (UserPropertyDescriptor d : UserProperty.all()) { if (getProperty(d.clazz) == null) { UserProperty up = d.newInstance(this); if (up != null) properties.add(up); } } } private void removeNullsThatFailedToLoad() { properties.removeIf(Objects::isNull); } private void loadFromUserConfigFile(String userId) { XmlFile config = getConfigFile(); try { if (config != null && config.exists()) { config.unmarshal(this); this.id = userId; } } catch (IOException e) { LOGGER.log(Level.SEVERE, "Failed to load " + config, e); } } private void clearExistingProperties() { properties.clear(); } private XmlFile getConfigFile() { File existingUserFolder = getExistingUserFolder(); return existingUserFolder == null ? null : new XmlFile(XSTREAM, new File(existingUserFolder, CONFIG_XML)); } /** * Returns the {@link jenkins.model.IdStrategy} for use with {@link User} instances. See * {@link hudson.security.SecurityRealm#getUserIdStrategy()} * * @return the {@link jenkins.model.IdStrategy} for use with {@link User} instances. * @since 1.566 */ @NonNull public static IdStrategy idStrategy() { Jenkins j = Jenkins.get(); SecurityRealm realm = j.getSecurityRealm(); if (realm == null) { return IdStrategy.CASE_INSENSITIVE; } return realm.getUserIdStrategy(); } @Override public int compareTo(@NonNull User that) { return idStrategy().compare(this.id, that.id); } @Exported public String getId() { return id; } public @NonNull String getUrl() { return "user/" + Util.rawEncode(idStrategy().keyFor(id)); } @Override public @NonNull String getSearchUrl() { return "/user/" + Util.rawEncode(idStrategy().keyFor(id)); } /** * The URL of the user page. */ @Exported(visibility = 999) public @NonNull String getAbsoluteUrl() { return Jenkins.get().getRootUrl() + getUrl(); } /** * Gets the human readable name of this user. * This is configurable by the user. */ @Exported(visibility = 999) public @NonNull String getFullName() { return fullName; } /** * Sets the human readable name of the user. * If the input parameter is empty, the user's ID will be set. */ public void setFullName(String name) { if (Util.fixEmptyAndTrim(name) == null) name = id; this.fullName = name; } @Exported public @CheckForNull String getDescription() { return description; } /** * Sets the description of the user. * * @since 1.609 */ public void setDescription(String description) { this.description = description; } /** * Gets the user properties configured for this user. */ public Map, UserProperty> getProperties() { return Descriptor.toMap(properties); } /** * Updates the user object by adding a property. */ public synchronized void addProperty(@NonNull UserProperty p) throws IOException { UserProperty old = getProperty(p.getClass()); List ps = new ArrayList<>(properties); if (old != null) ps.remove(old); ps.add(p); p.setUser(this); properties = ps; save(); } /** * List of all {@link UserProperty}s exposed primarily for the remoting API. */ @Exported(name = "property", inline = true) public List getAllProperties() { if (hasPermission(Jenkins.ADMINISTER)) { return Collections.unmodifiableList(properties); } return Collections.emptyList(); } /** * Gets the specific property, or null. */ public T getProperty(Class clazz) { for (UserProperty p : properties) { if (clazz.isInstance(p)) return clazz.cast(p); } return null; } /** * Creates an {@link Authentication} object that represents this user. *

* This method checks with {@link SecurityRealm} if the user is a valid user that can login to the security realm. * If {@link SecurityRealm} is a kind that does not support querying information about other users, this will * use {@link LastGrantedAuthoritiesProperty} to pick up the granted authorities as of the last time the user has * logged in. * * @throws UsernameNotFoundException If this user is not a valid user in the backend {@link SecurityRealm}. * @since 2.266 */ public @NonNull Authentication impersonate2() throws UsernameNotFoundException { return this.impersonate(this.getUserDetailsForImpersonation2()); } /** * @deprecated use {@link #impersonate2} * @since 1.419 */ @Deprecated public @NonNull org.acegisecurity.Authentication impersonate() throws org.acegisecurity.userdetails.UsernameNotFoundException { try { return org.acegisecurity.Authentication.fromSpring(impersonate2()); } catch (AuthenticationException x) { throw org.acegisecurity.AuthenticationException.fromSpring(x); } } /** * This method checks with {@link SecurityRealm} if the user is a valid user that can login to the security realm. * If {@link SecurityRealm} is a kind that does not support querying information about other users, this will * use {@link LastGrantedAuthoritiesProperty} to pick up the granted authorities as of the last time the user has * logged in. * * @return userDetails for the user, in case he's not found but seems legitimate, we provide a userDetails with minimum access * @throws UsernameNotFoundException If this user is not a valid user in the backend {@link SecurityRealm}. * @since 2.266 */ public @NonNull UserDetails getUserDetailsForImpersonation2() throws UsernameNotFoundException { ImpersonatingUserDetailsService2 userDetailsService = new ImpersonatingUserDetailsService2( Jenkins.get().getSecurityRealm().getSecurityComponents().userDetails2 ); try { UserDetails userDetails = userDetailsService.loadUserByUsername(id); LOGGER.log(Level.FINE, "Impersonation of the user {0} was a success", id); return userDetails; } catch (UserMayOrMayNotExistException2 e) { LOGGER.log(Level.FINE, "The user {0} may or may not exist in the SecurityRealm, so we provide minimum access", id); } catch (UsernameNotFoundException e) { if (ALLOW_NON_EXISTENT_USER_TO_LOGIN) { LOGGER.log(Level.FINE, "The user {0} was not found in the SecurityRealm but we are required to let it pass, due to ALLOW_NON_EXISTENT_USER_TO_LOGIN", id); } else { LOGGER.log(Level.FINE, "The user {0} was not found in the SecurityRealm", id); throw e; } } return new LegitimateButUnknownUserDetails(id); } /** * @deprecated use {@link #getUserDetailsForImpersonation2} */ @Deprecated public @NonNull org.acegisecurity.userdetails.UserDetails getUserDetailsForImpersonation() throws org.acegisecurity.userdetails.UsernameNotFoundException { try { return org.acegisecurity.userdetails.UserDetails.fromSpring(getUserDetailsForImpersonation2()); } catch (AuthenticationException x) { throw org.acegisecurity.AuthenticationException.fromSpring(x); } } /** * Only used for a legitimate user we have no idea about. We give it only minimum access */ private static class LegitimateButUnknownUserDetails extends org.springframework.security.core.userdetails.User { private LegitimateButUnknownUserDetails(String username) throws IllegalArgumentException { super( username, "", true, true, true, true, Set.of(SecurityRealm.AUTHENTICATED_AUTHORITY2) ); } } /** * Creates an {@link Authentication} object that represents this user using the given userDetails * * @param userDetails Provided by {@link #getUserDetailsForImpersonation2()}. * @see #getUserDetailsForImpersonation2() */ @Restricted(NoExternalUse.class) public @NonNull Authentication impersonate(@NonNull UserDetails userDetails) { return new UsernamePasswordAuthenticationToken(userDetails.getUsername(), "", userDetails.getAuthorities()); } /** * Accepts the new description. */ @RequirePOST public void doSubmitDescription(StaplerRequest req, StaplerResponse rsp) throws IOException { checkPermission(Jenkins.ADMINISTER); description = req.getParameter("description"); save(); rsp.sendRedirect("."); // go to the top page } /** * Gets the fallback "unknown" user instance. *

* This is used to avoid null {@link User} instance. */ public static @NonNull User getUnknown() { return getById(UNKNOWN_USERNAME, true); } /** * Gets the {@link User} object by its id or full name. * * @param create If true, this method will never return null for valid input * (by creating a new {@link User} object if none exists.) * If false, this method will return null if {@link User} object * with the given name doesn't exist. * @return Requested user. May be {@code null} if a user does not exist and * {@code create} is false. * @deprecated use {@link User#get(String, boolean, java.util.Map)} */ @Deprecated public static @Nullable User get(String idOrFullName, boolean create) { return get(idOrFullName, create, Collections.emptyMap()); } /** * Gets the {@link User} object by its id or full name. *

* In order to resolve the user ID, the method invokes {@link CanonicalIdResolver} extension points. * Note that it may cause significant performance degradation. * If you are sure the passed value is a User ID, it is recommended to use {@link #getById(String, boolean)}. * * @param create If true, this method will never return null for valid input * (by creating a new {@link User} object if none exists.) * If false, this method will return null if {@link User} object * with the given name doesn't exist. * @param context contextual environment this user idOfFullName was retrieved from, * that can help resolve the user ID * @return An existing or created user. May be {@code null} if a user does not exist and * {@code create} is false. */ public static @Nullable User get(String idOrFullName, boolean create, @NonNull Map context) { if (idOrFullName == null) { return null; } User user = AllUsers.get(idOrFullName); if (user != null) { return user; } String id = CanonicalIdResolver.resolve(idOrFullName, context); return getOrCreateById(id, idOrFullName, create); } /** * Retrieve a user by its ID, and create a new one if requested. * * @return An existing or created user. May be {@code null} if a user does not exist and * {@code create} is false. */ private static @Nullable User getOrCreateById(@NonNull String id, @NonNull String fullName, boolean create) { User u = AllUsers.get(id); if (u == null && (create || UserIdMapper.getInstance().isMapped(id))) { u = new User(id, fullName); AllUsers.put(id, u); if (!id.equals(fullName) && !UserIdMapper.getInstance().isMapped(id)) { try { u.save(); } catch (IOException x) { LOGGER.log(Level.WARNING, "Failed to save user configuration for " + id, x); } } } return u; } /** * Gets the {@link User} object by its id or full name. *

* Creates a user on-demand. * *

* Use {@link #getById} when you know you have an ID. * In this method Jenkins will try to resolve the {@link User} by full name with help of various * {@link hudson.tasks.UserNameResolver}. * This is slow (see JENKINS-23281). * * @deprecated This method is deprecated, because it causes unexpected {@link User} creation * by API usage code and causes performance degradation of used to retrieve users by ID. * Use {@link #getById} when you know you have an ID. * Otherwise use {@link #getOrCreateByIdOrFullName(String)} or {@link #get(String, boolean, Map)}. */ @Deprecated public static @NonNull User get(String idOrFullName) { return getOrCreateByIdOrFullName(idOrFullName); } /** * Get the user by ID or Full Name. *

* If the user does not exist, creates a new one on-demand. * *

* Use {@link #getById} when you know you have an ID. * In this method Jenkins will try to resolve the {@link User} by full name with help of various * {@link hudson.tasks.UserNameResolver}. * This is slow (see JENKINS-23281). * * @param idOrFullName User ID or full name * @return User instance. It will be created on-demand. * @since 2.91 */ public static @NonNull User getOrCreateByIdOrFullName(@NonNull String idOrFullName) { return get(idOrFullName, true, Collections.emptyMap()); } /** * Gets the {@link User} object representing the currently logged-in user, or null * if the current user is anonymous. * * @since 1.172 */ public static @CheckForNull User current() { return get2(Jenkins.getAuthentication2()); } /** * Gets the {@link User} object representing the supplied {@link Authentication} or * {@code null} if the supplied {@link Authentication} is either anonymous or {@code null} * * @param a the supplied {@link Authentication} . * @return a {@link User} object for the supplied {@link Authentication} or {@code null} * @since 2.266 */ public static @CheckForNull User get2(@CheckForNull Authentication a) { if (a == null || a instanceof AnonymousAuthenticationToken) return null; // Since we already know this is a name, we can just call getOrCreateById with the name directly. return getById(a.getName(), true); } /** * @deprecated use {@link #get2(Authentication)} * @since 1.609 */ @Deprecated public static @CheckForNull User get(@CheckForNull org.acegisecurity.Authentication a) { return get2(a != null ? a.toSpring() : null); } /** * Gets the {@link User} object by its {@code id} * * @param id the id of the user to retrieve and optionally create if it does not exist. * @param create If {@code true}, this method will never return {@code null} for valid input (by creating a * new {@link User} object if none exists.) If {@code false}, this method will return * {@code null} if {@link User} object with the given id doesn't exist. * @return the a User whose id is {@code id}, or {@code null} if {@code create} is {@code false} * and the user does not exist. * @since 1.651.2 / 2.3 */ public static @Nullable User getById(String id, boolean create) { return getOrCreateById(id, id, create); } /** * Gets all the users. */ public static @NonNull Collection getAll() { final IdStrategy strategy = idStrategy(); ArrayList users = new ArrayList<>(AllUsers.values()); users.sort((o1, o2) -> strategy.compare(o1.getId(), o2.getId())); return users; } /** * To be called from {@link Jenkins#reload} only. */ @Restricted(NoExternalUse.class) public static void reload() throws IOException { UserIdMapper.getInstance().reload(); AllUsers.reload(); } /** * Called when changing the {@link IdStrategy}. * * @since 1.566 */ public static void rekey() { /* There are many and varied ways in which this could cause erratic or problematic behavior. Such changes should really only occur during initial setup and under very controlled situations. After this sort of a change the whole webapp should restart. It's possible that this rekeying, or greater issues in the realm change, could affect currently logged in users and even the user making the change. */ try { reload(); } catch (IOException e) { LOGGER.log(Level.SEVERE, "Failed to perform rekey operation.", e); } } /** * Returns the user name. */ @Override public @NonNull String getDisplayName() { return getFullName(); } /** * true if {@link RunWithSCM#hasParticipant} or {@link hudson.model.Cause.UserIdCause} */ private boolean relatedTo(@NonNull Run b) { if (b instanceof RunWithSCM && ((RunWithSCM) b).hasParticipant(this)) { return true; } for (Cause cause : b.getCauses()) { if (cause instanceof Cause.UserIdCause) { String userId = ((Cause.UserIdCause) cause).getUserId(); if (userId != null && idStrategy().equals(userId, getId())) { return true; } } } return false; } /** * Searches for builds which include changes by this user or which were triggered by this user. */ @SuppressWarnings("unchecked") @WithBridgeMethods(List.class) public @NonNull RunList getBuilds() { return RunList.fromJobs((Iterable) Jenkins.get(). allItems(Job.class)).filter((Predicate>) this::relatedTo); } /** * Gets all the {@link AbstractProject}s that this user has committed to. * * @since 1.191 */ public @NonNull Set> getProjects() { Set> r = new HashSet<>(); for (AbstractProject p : Jenkins.get().allItems(AbstractProject.class, p -> p.hasParticipant(this))) r.add(p); return r; } @Override public String toString() { return id; } /** * Called by tests in the JTH. Otherwise this shouldn't be called. * Even in the tests this usage is questionable. * @deprecated removed without replacement */ @Deprecated public static void clear() { if (ExtensionList.lookup(AllUsers.class).isEmpty()) { return; } UserIdMapper.getInstance().clear(); AllUsers.clear(); } private static File getConfigFileFor(String id) { return new File(getUserFolderFor(id), "config.xml"); } private static File getUserFolderFor(String id) { return new File(getRootDir(), idStrategy().filenameOf(id)); } /** * Returns the folder that store all the user information. * Useful for plugins to save a user-specific file aside the config.xml. * Exposes implementation details that may be subject to change. * * @return The folder containing the user configuration files or {@code null} if the user was not yet saved. * * @since 2.129 */ public @CheckForNull File getUserFolder() { return getExistingUserFolder(); } private @CheckForNull File getExistingUserFolder() { return UserIdMapper.getInstance().getDirectory(id); } /** * Gets the directory where Hudson stores user information. */ static File getRootDir() { return new File(Jenkins.get().getRootDir(), "users"); } /** * Is the ID allowed? Some are prohibited for security reasons. See SECURITY-166. *

* Note that this is only enforced when saving. These users are often created * via the constructor (and even listed on /asynchPeople), but our goal is to * prevent anyone from logging in as these users. Therefore, we prevent * saving a User with one of these ids. * * @param id ID to be checked * @return {@code true} if the username or fullname is valid. * For {@code null} or blank IDs returns {@code false}. * @since 1.600 */ public static boolean isIdOrFullnameAllowed(@CheckForNull String id) { if (StringUtils.isBlank(id)) { return false; } final String trimmedId = id.trim(); for (String invalidId : ILLEGAL_PERSISTED_USERNAMES) { if (trimmedId.equalsIgnoreCase(invalidId)) return false; } return true; } /** * Save the user configuration. */ @Override public synchronized void save() throws IOException { if (!isIdOrFullnameAllowed(id)) { throw FormValidation.error(Messages.User_IllegalUsername(id)); } if (!isIdOrFullnameAllowed(fullName)) { throw FormValidation.error(Messages.User_IllegalFullname(fullName)); } if (BulkChange.contains(this)) { return; } XmlFile xmlFile = new XmlFile(XSTREAM, constructUserConfigFile()); xmlFile.write(this); SaveableListener.fireOnChange(this, xmlFile); } private File constructUserConfigFile() throws IOException { return new File(putUserFolderIfAbsent(), CONFIG_XML); } private File putUserFolderIfAbsent() throws IOException { return UserIdMapper.getInstance().putIfAbsent(id, true); } /** * Deletes the data directory and removes this user from Hudson. * * @throws IOException if we fail to delete. */ public void delete() throws IOException { String idKey = idStrategy().keyFor(id); File existingUserFolder = getExistingUserFolder(); UserIdMapper.getInstance().remove(id); AllUsers.remove(id); deleteExistingUserFolder(existingUserFolder); UserDetailsCache.get().invalidate(idKey); } private void deleteExistingUserFolder(File existingUserFolder) throws IOException { if (existingUserFolder != null && existingUserFolder.exists()) { Util.deleteRecursive(existingUserFolder); } } /** * Exposed remote API. */ public Api getApi() { return new Api(this); } /** * Accepts submission from the configuration page. */ @POST public void doConfigSubmit(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException, FormException { checkPermission(Jenkins.ADMINISTER); JSONObject json = req.getSubmittedForm(); String oldFullName = this.fullName; fullName = json.getString("fullName"); description = json.getString("description"); List props = new ArrayList<>(); int i = 0; for (UserPropertyDescriptor d : UserProperty.all()) { UserProperty p = getProperty(d.clazz); JSONObject o = json.optJSONObject("userProperty" + i++); if (o != null) { if (p != null) { p = p.reconfigure(req, o); } else { p = d.newInstance(req, o); } p.setUser(this); } if (p != null) props.add(p); } this.properties = props; save(); if (oldFullName != null && !oldFullName.equals(this.fullName)) { UserDetailsCache.get().invalidate(oldFullName); } FormApply.success(".").generateResponse(req, rsp, this); } /** * Deletes this user from Hudson. */ @RequirePOST public void doDoDelete(StaplerRequest req, StaplerResponse rsp) throws IOException { checkPermission(Jenkins.ADMINISTER); if (idStrategy().equals(id, Jenkins.getAuthentication2().getName())) { rsp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Cannot delete self"); return; } delete(); rsp.sendRedirect2("../.."); } public void doRssAll(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { RSS.rss(req, rsp, "Jenkins:" + getDisplayName() + " (all builds)", getUrl(), getBuilds().newBuilds()); } public void doRssFailed(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { RSS.rss(req, rsp, "Jenkins:" + getDisplayName() + " (failed builds)", getUrl(), getBuilds().regressionOnly()); } public void doRssLatest(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { final List lastBuilds = new ArrayList<>(); for (Job p : Jenkins.get().allItems(Job.class)) { for (Run b = p.getLastBuild(); b != null; b = b.getPreviousBuild()) { if (relatedTo(b)) { lastBuilds.add(b); break; } } } // historically these have been reported sorted by project name, we switched to the lazy iteration // so we only have to sort the sublist of runs rather than the full list of irrelevant projects lastBuilds.sort((o1, o2) -> Items.BY_FULL_NAME.compare(o1.getParent(), o2.getParent())); RSS.rss(req, rsp, "Jenkins:" + getDisplayName() + " (latest builds)", getUrl(), RunList.fromRuns(lastBuilds), Run.FEED_ADAPTER_LATEST); } @Override @NonNull public ACL getACL() { ACL base = Jenkins.get().getAuthorizationStrategy().getACL(this); // always allow a non-anonymous user full control of himself. return ACL.lambda2((a, permission) -> (idStrategy().equals(a.getName(), id) && !(a instanceof AnonymousAuthenticationToken)) || base.hasPermission2(a, permission)); } /** * With ADMINISTER permission, can delete users with persisted data but can't delete self. */ public boolean canDelete() { final IdStrategy strategy = idStrategy(); return hasPermission(Jenkins.ADMINISTER) && !strategy.equals(id, Jenkins.getAuthentication2().getName()) && UserIdMapper.getInstance().isMapped(id); } /** * Checks for authorities (groups) associated with this user. * If the caller lacks {@link Jenkins#ADMINISTER}, or any problems arise, returns an empty list. * {@link SecurityRealm#AUTHENTICATED_AUTHORITY2} and the username, if present, are omitted. * * @return a possibly empty list * @since 1.498 */ public @NonNull List getAuthorities() { if (!Jenkins.get().hasPermission(Jenkins.ADMINISTER)) { return Collections.emptyList(); } List r = new ArrayList<>(); Authentication authentication; try { authentication = impersonate2(); } catch (UsernameNotFoundException x) { LOGGER.log(Level.FINE, "cannot look up authorities for " + id, x); return Collections.emptyList(); } for (GrantedAuthority a : authentication.getAuthorities()) { if (a.equals(SecurityRealm.AUTHENTICATED_AUTHORITY2)) { continue; } String n = a.getAuthority(); if (n != null && !idStrategy().equals(n, id)) { r.add(n); } } r.sort(String.CASE_INSENSITIVE_ORDER); return r; } public Object getDynamic(String token) { for (Action action : getTransientActions()) { if (Objects.equals(action.getUrlName(), token)) return action; } for (Action action : getPropertyActions()) { if (Objects.equals(action.getUrlName(), token)) return action; } return null; } /** * Return all properties that are also actions. * * @return the list can be empty but never null. read only. */ public List getPropertyActions() { List actions = new ArrayList<>(); for (UserProperty userProp : getProperties().values()) { if (userProp instanceof Action) { actions.add((Action) userProp); } } return Collections.unmodifiableList(actions); } /** * Return all transient actions associated with this user. * * @return the list can be empty but never null. read only. */ public List getTransientActions() { List actions = new ArrayList<>(); for (TransientUserActionFactory factory : TransientUserActionFactory.all()) { actions.addAll(factory.createFor(this)); } return Collections.unmodifiableList(actions); } @Override public ContextMenu doContextMenu(StaplerRequest request, StaplerResponse response) throws Exception { return new ContextMenu().from(this, request, response); } @Override @Restricted(NoExternalUse.class) public Object getTarget() { if (!SKIP_PERMISSION_CHECK) { if (!Jenkins.get().hasPermission(Jenkins.READ)) { return null; } } return this; } /** * Gets list of Illegal usernames, for which users should not be created. * Always includes users from {@link #ILLEGAL_PERSISTED_USERNAMES} * * @return List of usernames */ @Restricted(NoExternalUse.class) /*package*/ static Set getIllegalPersistedUsernames() { return new HashSet<>(Arrays.asList(ILLEGAL_PERSISTED_USERNAMES)); } private Object writeReplace() { return XmlFile.replaceIfNotAtTopLevel(this, () -> new Replacer(this)); } private static class Replacer { private final String id; Replacer(User u) { id = u.getId(); } private Object readResolve() { return getById(id, false); } } /** * Per-{@link Jenkins} holder of all known {@link User}s. */ @Extension @Restricted(NoExternalUse.class) public static final class AllUsers { private final ConcurrentMap byName = new ConcurrentHashMap<>(); @Initializer(after = InitMilestone.JOB_CONFIG_ADAPTED) public static void scanAll() { for (String userId : UserIdMapper.getInstance().getConvertedUserIds()) { User user = new User(userId, userId); getInstance().byName.putIfAbsent(idStrategy().keyFor(userId), user); } } /** * Keyed by {@link User#id}. This map is used to ensure * singleton-per-id semantics of {@link User} objects. *

* The key needs to be generated by {@link IdStrategy#keyFor(String)}. */ private static AllUsers getInstance() { return ExtensionList.lookupSingleton(AllUsers.class); } private static void reload() { getInstance().byName.clear(); UserDetailsCache.get().invalidateAll(); scanAll(); } private static void clear() { getInstance().byName.clear(); } private static void remove(String id) { getInstance().byName.remove(idStrategy().keyFor(id)); } private static User get(String id) { return getInstance().byName.get(idStrategy().keyFor(id)); } private static void put(String id, User user) { getInstance().byName.putIfAbsent(idStrategy().keyFor(id), user); } private static Collection values() { return getInstance().byName.values(); } } /** * Resolves User IDs by ID, full names or other strings. *

* This extension point may be useful to map SCM user names to Jenkins {@link User} IDs. * Currently the extension point is used in {@link User#get(String, boolean, Map)}. * * @see jenkins.model.DefaultUserCanonicalIdResolver * @see FullNameIdResolver * @since 1.479 */ public abstract static class CanonicalIdResolver extends AbstractDescribableImpl implements ExtensionPoint, Comparable { /** * context key for realm (domain) where idOrFullName has been retrieved from. * Can be used (for example) to distinguish ambiguous committer ID using the SCM URL. * Associated Value is a {@link String} */ public static final String REALM = "realm"; @Override public int compareTo(@NonNull CanonicalIdResolver o) { // reverse priority order return Integer.compare(o.getPriority(), getPriority()); } /** * extract user ID from idOrFullName with help from contextual infos. * can return {@code null} if no user ID matched the input */ public abstract @CheckForNull String resolveCanonicalId(String idOrFullName, Map context); /** * Gets priority of the resolver. * Higher priority means that it will be checked earlier. *

* Overriding methods must not use {@link Integer#MIN_VALUE}, because it will cause collisions * with {@link jenkins.model.DefaultUserCanonicalIdResolver}. * * @return Priority of the resolver. */ public int getPriority() { return 1; } //Such sorting and collection rebuild is not good for User#get(...) method performance. /** * Gets all extension points, sorted by priority. * * @return Sorted list of extension point implementations. * @since 2.93 */ public static List all() { List resolvers = new ArrayList<>(ExtensionList.lookup(CanonicalIdResolver.class)); Collections.sort(resolvers); return resolvers; } /** * Resolves users using all available {@link CanonicalIdResolver}s. * * @param idOrFullName ID or full name of the user * @param context Context * @return Resolved User ID or {@code null} if the user ID cannot be resolved. * @since 2.93 */ @CheckForNull public static String resolve(@NonNull String idOrFullName, @NonNull Map context) { for (CanonicalIdResolver resolver : CanonicalIdResolver.all()) { String id = resolver.resolveCanonicalId(idOrFullName, context); if (id != null) { LOGGER.log(Level.FINE, "{0} mapped {1} to {2}", new Object[]{resolver, idOrFullName, id}); return id; } } // De-facto it is not going to happen OOTB, because the current DefaultUserCanonicalIdResolver // always returns a value. But we still need to check nulls if somebody disables the extension point return null; } } /** * Resolve user ID from full name */ @Extension @Symbol("fullName") public static class FullNameIdResolver extends CanonicalIdResolver { @Override public String resolveCanonicalId(String idOrFullName, Map context) { for (User user : getAll()) { if (idOrFullName.equals(user.getFullName())) return user.getId(); } return null; } @Override public int getPriority() { return -1; // lower than default } } /** * Tries to verify if an ID is valid. * If so, we do not want to even consider users who might have the same full name. */ @Extension @Restricted(NoExternalUse.class) public static class UserIDCanonicalIdResolver extends User.CanonicalIdResolver { private static /* not final */ boolean SECURITY_243_FULL_DEFENSE = SystemProperties.getBoolean(User.class.getName() + ".SECURITY_243_FULL_DEFENSE", true); private static final ThreadLocal resolving = ThreadLocal.withInitial(() -> false); @Override public String resolveCanonicalId(String idOrFullName, Map context) { User existing = getById(idOrFullName, false); if (existing != null) { return existing.getId(); } if (SECURITY_243_FULL_DEFENSE) { if (!resolving.get()) { resolving.set(true); try { UserDetails userDetails = UserDetailsCache.get().loadUserByUsername(idOrFullName); return userDetails.getUsername(); } catch (UsernameNotFoundException x) { LOGGER.log(Level.FINE, "not sure whether " + idOrFullName + " is a valid username or not", x); } catch (ExecutionException x) { LOGGER.log(Level.FINE, "could not look up " + idOrFullName, x); } finally { resolving.set(false); } } } return null; } @Override public int getPriority() { // should always come first so that ID that are ids get mapped correctly return Integer.MAX_VALUE; } } } ================================================ FILE: cost-benefit-calculator/src/test/resources/org/apache/myfaces/tobago/facelets/AttributeHandler.java ================================================ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.myfaces.tobago.facelets; import org.apache.commons.beanutils.PropertyUtils; import org.apache.myfaces.tobago.component.Attributes; import org.apache.myfaces.tobago.component.SupportsMarkup; import org.apache.myfaces.tobago.component.SupportsRenderedPartially; import org.apache.myfaces.tobago.context.Markup; import org.apache.myfaces.tobago.el.ConstantMethodBinding; import org.apache.myfaces.tobago.internal.util.StringUtils; import org.apache.myfaces.tobago.util.ComponentUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.el.ELException; import javax.el.ExpressionFactory; import javax.el.MethodExpression; import javax.el.ValueExpression; import javax.faces.FacesException; import javax.faces.component.ActionSource; import javax.faces.component.ActionSource2; import javax.faces.component.EditableValueHolder; import javax.faces.component.UIComponent; import javax.faces.component.ValueHolder; import javax.faces.convert.Converter; import javax.faces.event.MethodExpressionActionListener; import javax.faces.event.MethodExpressionValueChangeListener; import javax.faces.validator.MethodExpressionValidator; import javax.faces.view.facelets.ComponentHandler; import javax.faces.view.facelets.FaceletContext; import javax.faces.view.facelets.TagAttribute; import javax.faces.view.facelets.TagConfig; import javax.faces.view.facelets.TagException; import javax.faces.view.facelets.TagHandler; import java.beans.IntrospectionException; import java.beans.PropertyDescriptor; //from Apache MyFaces 2.0.8 //Retrieved from http://grepcode.com/file_/repo1.maven.org/maven2/org.apache.myfaces.tobago/tobago-core/2.0.8/org/apache/myfaces/tobago/facelets/AttributeHandler.java/?v=source public final class AttributeHandler extends TagHandler { private static final Logger LOG = LoggerFactory.getLogger(AttributeHandler.class); private final TagAttribute name; private final TagAttribute value; private final TagAttribute mode; public AttributeHandler(final TagConfig config) { super(config); this.name = getRequiredAttribute(Attributes.NAME); this.value = getRequiredAttribute(Attributes.VALUE); this.mode = getAttribute(Attributes.MODE); } public void apply(final FaceletContext faceletContext, final UIComponent parent) throws ELException { if (parent == null) { throw new TagException(tag, "Parent UIComponent was null"); } if (ComponentHandler.isNew(parent)) { if (mode != null) { if ("isNotSet".equals(mode.getValue())) { boolean result = false; String expressionString = value.getValue(); if (!value.isLiteral()) { while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { result = true; break; } else { expressionString = expression.getExpressionString(); } } else { result = false; break; } } } else { result = StringUtils.isEmpty(expressionString); } parent.getAttributes().put(name.getValue(), result); } else if ("isSet".equals(mode.getValue())) { boolean result = true; String expressionString = value.getValue(); if (!value.isLiteral()) { while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { result = false; break; } else { expressionString = expression.getExpressionString(); } } else { result = true; break; } } } else { result = StringUtils.isNotEmpty(expressionString); } parent.getAttributes().put(name.getValue(), result); } else if ("action".equals(mode.getValue())) { String expressionString = value.getValue(); while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { // when the action hasn't been set while using a composition. if (LOG.isDebugEnabled()) { LOG.debug("Variable can't be resolved: value='" + expressionString + "'"); } expressionString = null; break; } else { expressionString = expression.getExpressionString(); } } else { break; } } if (expressionString != null) { final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); final MethodExpression action = new TagMethodExpression(value, expressionFactory.createMethodExpression( faceletContext, expressionString, String.class, ComponentUtils.ACTION_ARGS)); ((ActionSource2) parent).setActionExpression(action); } } else if ("actionListener".equals(mode.getValue())) { String expressionString = value.getValue(); while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { if (LOG.isDebugEnabled()) { // when the action hasn't been set while using a composition. LOG.debug("Variable can't be resolved: value='" + expressionString + "'"); } expressionString = null; break; } else { expressionString = expression.getExpressionString(); } } else { LOG.warn("Only expressions are supported mode=actionListener value='" + expressionString + "'"); expressionString = null; break; } } if (expressionString != null) { final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); final MethodExpression actionListener = new TagMethodExpression(value, expressionFactory.createMethodExpression( faceletContext, expressionString, null, ComponentUtils.ACTION_LISTENER_ARGS)); ((ActionSource) parent).addActionListener(new MethodExpressionActionListener(actionListener)); } } else if ("actionFromValue".equals(mode.getValue())) { if (!value.isLiteral()) { final String result = value.getValue(faceletContext); parent.getAttributes().put(name.getValue(), new ConstantMethodBinding(result)); } } else if ("valueIfSet".equals(mode.getValue())) { String expressionString = value.getValue(); String lastExpressionString = null; while (isMethodOrValueExpression(expressionString) && isSimpleExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression != null) { lastExpressionString = expressionString; expressionString = expression.getExpressionString(); } else { // restore last value expressionString = lastExpressionString; break; } } if (expressionString != null) { final String attributeName = name.getValue(faceletContext); if (containsMethodOrValueExpression(expressionString)) { final ValueExpression expression = value.getValueExpression(faceletContext, Object.class); parent.setValueExpression(attributeName, expression); } else { final Object literalValue = getValue(faceletContext, parent, expressionString, attributeName); parent.getAttributes().put(attributeName, literalValue); } } } else { throw new FacesException("Type " + mode + " not supported"); } } else { final String nameValue = name.getValue(faceletContext); if (Attributes.RENDERED.equals(nameValue)) { if (value.isLiteral()) { parent.setRendered(value.getBoolean(faceletContext)); } else { parent.setValueExpression(nameValue, value.getValueExpression(faceletContext, Boolean.class)); } } else if (Attributes.RENDERED_PARTIALLY.equals(nameValue) && parent instanceof SupportsRenderedPartially) { if (value.isLiteral()) { final String[] components = ComponentUtils.splitList(value.getValue()); ((SupportsRenderedPartially) parent).setRenderedPartially(components); } else { parent.setValueExpression(nameValue, value.getValueExpression(faceletContext, Object.class)); } } else if (Attributes.STYLE_CLASS.equals(nameValue)) { // TODO expression ComponentUtils.setStyleClasses(parent, value.getValue()); } else if (Attributes.MARKUP.equals(nameValue)) { if (parent instanceof SupportsMarkup) { if (value.isLiteral()) { ((SupportsMarkup) parent).setMarkup(Markup.valueOf(value.getValue())); } else { final ValueExpression expression = value.getValueExpression(faceletContext, Object.class); parent.setValueExpression(nameValue, expression); } } else { LOG.error("Component is not instanceof SupportsMarkup. Instance is: " + parent.getClass().getName()); } } else if (parent instanceof EditableValueHolder && Attributes.VALIDATOR.equals(nameValue)) { final MethodExpression methodExpression = getMethodExpression(faceletContext, null, ComponentUtils.VALIDATOR_ARGS); if (methodExpression != null) { ((EditableValueHolder) parent).addValidator(new MethodExpressionValidator(methodExpression)); } } else if (parent instanceof EditableValueHolder && Attributes.VALUE_CHANGE_LISTENER.equals(nameValue)) { final MethodExpression methodExpression = getMethodExpression(faceletContext, null, ComponentUtils.VALUE_CHANGE_LISTENER_ARGS); if (methodExpression != null) { ((EditableValueHolder) parent).addValueChangeListener( new MethodExpressionValueChangeListener(methodExpression)); } } else if (parent instanceof ValueHolder && Attributes.CONVERTER.equals(nameValue)) { setConverter(faceletContext, parent, nameValue); } else if (parent instanceof ActionSource && Attributes.ACTION.equals(nameValue)) { final MethodExpression action = getMethodExpression(faceletContext, String.class, ComponentUtils.ACTION_ARGS); if (action != null) { ((ActionSource2) parent).setActionExpression(action); } } else if (parent instanceof ActionSource && Attributes.ACTION_LISTENER.equals(nameValue)) { final MethodExpression action = getMethodExpression(faceletContext, null, ComponentUtils.ACTION_LISTENER_ARGS); if (action != null) { ((ActionSource) parent).addActionListener(new MethodExpressionActionListener(action)); } } else if (!parent.getAttributes().containsKey(nameValue)) { if (value.isLiteral()) { parent.getAttributes().put(nameValue, value.getValue()); } else { parent.setValueExpression(nameValue, value.getValueExpression(faceletContext, Object.class)); } } } } } private boolean isMethodOrValueExpression(final String string) { return (string.startsWith("${") || string.startsWith("#{")) && string.endsWith("}"); } private boolean containsMethodOrValueExpression(final String string) { return (string.contains("${") || string.contains("#{")) && string.contains("}"); } private boolean isSimpleExpression(final String string) { return string.indexOf('.') < 0 && string.indexOf('[') < 0; } private String removeElParenthesis(final String string) { return string.substring(2, string.length() - 1); } private ValueExpression getExpression(final FaceletContext faceletContext) { final String myValue = removeElParenthesis(value.getValue()); return faceletContext.getVariableMapper().resolveVariable(myValue); } private MethodExpression getMethodExpression( final FaceletContext faceletContext, final Class returnType, final Class[] args) { // in a composition may be we get the method expression string from the current variable mapper // the expression can be empty // in this case return nothing if (value.getValue().startsWith("${")) { final ValueExpression expression = getExpression(faceletContext); if (expression != null) { final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); return new TagMethodExpression(value, expressionFactory.createMethodExpression(faceletContext, expression.getExpressionString(), returnType, args)); } else { return null; } } else { return value.getMethodExpression(faceletContext, returnType, args); } } private Object getValue( final FaceletContext faceletContext, final UIComponent parent, final String expressionString, final String attributeName) { Class type = Object.class; try { type = PropertyUtils.getReadMethod( new PropertyDescriptor(attributeName, parent.getClass())).getReturnType(); } catch (final IntrospectionException e) { LOG.warn("Can't determine expected type", e); } final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); final ValueExpression valueExpression = expressionFactory .createValueExpression(faceletContext, expressionString, type); return valueExpression.getValue(faceletContext); } private void setConverter(final FaceletContext faceletContext, final UIComponent parent, final String nameValue) { // in a composition may be we get the converter expression string from the current variable mapper // the expression can be empty // in this case return nothing if (value.getValue().startsWith("${")) { final ValueExpression expression = getExpression(faceletContext); if (expression != null) { setConverter(faceletContext, parent, nameValue, expression); } } else { setConverter(faceletContext, parent, nameValue, value.getValueExpression(faceletContext, Object.class)); } } private void setConverter( final FaceletContext faceletContext, final UIComponent parent, final String nameValue, final ValueExpression expression) { if (expression.isLiteralText()) { final Converter converter = faceletContext.getFacesContext().getApplication().createConverter(expression.getExpressionString()); ((ValueHolder) parent).setConverter(converter); } else { parent.setValueExpression(nameValue, expression); } } } ================================================ FILE: cost-benefit-calculator/src/test/resources/org/apache/myfaces/tobago/facelets/AttributeHandler2.java ================================================ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.myfaces.tobago.facelets; import org.apache.commons.beanutils.PropertyUtils; import org.apache.myfaces.tobago.component.Attributes; import org.apache.myfaces.tobago.component.SupportsMarkup; import org.apache.myfaces.tobago.component.SupportsRenderedPartially; import org.apache.myfaces.tobago.context.Markup; import org.apache.myfaces.tobago.el.ConstantMethodBinding; import org.apache.myfaces.tobago.internal.util.StringUtils; import org.apache.myfaces.tobago.util.ComponentUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.el.ELException; import javax.el.ExpressionFactory; import javax.el.MethodExpression; import javax.el.ValueExpression; import javax.faces.FacesException; import javax.faces.component.ActionSource; import javax.faces.component.ActionSource2; import javax.faces.component.EditableValueHolder; import javax.faces.component.UIComponent; import javax.faces.component.ValueHolder; import javax.faces.convert.Converter; import javax.faces.event.MethodExpressionActionListener; import javax.faces.event.MethodExpressionValueChangeListener; import javax.faces.validator.MethodExpressionValidator; import javax.faces.view.facelets.ComponentHandler; import javax.faces.view.facelets.FaceletContext; import javax.faces.view.facelets.TagAttribute; import javax.faces.view.facelets.TagConfig; import javax.faces.view.facelets.TagException; import javax.faces.view.facelets.TagHandler; import java.beans.IntrospectionException; import java.beans.PropertyDescriptor; //from Apache MyFaces 2.0.8 //Retrieved from http://grepcode.com/file_/repo1.maven.org/maven2/org.apache.myfaces.tobago/tobago-core/2.0.8/org/apache/myfaces/tobago/facelets/AttributeHandler.java/?v=source public final class AttributeHandler extends TagHandler { private static final Logger LOG = LoggerFactory.getLogger(AttributeHandler.class); private final TagAttribute name; private final TagAttribute value; private final TagAttribute mode; public AttributeHandler(final TagConfig config) { super(config); this.name = getRequiredAttribute(Attributes.NAME); this.value = getRequiredAttribute(Attributes.VALUE); this.mode = getAttribute(Attributes.MODE); } public void apply(final FaceletContext faceletContext, final UIComponent parent) throws ELException { if (parent == null) { throw new TagException(tag, "Parent UIComponent was null"); } if (ComponentHandler.isNew(parent)) { if (mode != null) { if ("isNotSet".equals(mode.getValue())) { boolean result = false; String expressionString = value.getValue(); if (!value.isLiteral()) { while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { result = true; break; } else { expressionString = expression.getExpressionString(); } } else { result = false; break; } } } else { result = StringUtils.isEmpty(expressionString); } parent.getAttributes().put(name.getValue(), result); } else if ("isSet".equals(mode.getValue())) { boolean result = true; String expressionString = value.getValue(); if (!value.isLiteral()) { while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { result = false; break; } else { expressionString = expression.getExpressionString(); } } else { result = true; break; } } } else { result = StringUtils.isNotEmpty(expressionString); } parent.getAttributes().put(name.getValue(), result); } else if ("action".equals(mode.getValue())) { String expressionString = value.getValue(); while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { // when the action hasn't been set while using a composition. if (LOG.isDebugEnabled()) { LOG.debug("Variable can't be resolved: value='" + expressionString + "'"); } expressionString = null; break; } else { expressionString = expression.getExpressionString(); } } else { break; } } if (expressionString != null) { final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); final MethodExpression action = new TagMethodExpression(value, expressionFactory.createMethodExpression( faceletContext, expressionString, String.class, ComponentUtils.ACTION_ARGS)); ((ActionSource2) parent).setActionExpression(action); } } else if ("actionListener".equals(mode.getValue())) { String expressionString = value.getValue(); while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { if (LOG.isDebugEnabled()) { // when the action hasn't been set while using a composition. LOG.debug("Variable can't be resolved: value='" + expressionString + "'"); } expressionString = null; break; } else { expressionString = expression.getExpressionString(); } } else { LOG.warn("Only expressions are supported mode=actionListener value='" + expressionString + "'"); expressionString = null; break; } } if (expressionString != null) { final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); final MethodExpression actionListener = new TagMethodExpression(value, expressionFactory.createMethodExpression( faceletContext, expressionString, null, ComponentUtils.ACTION_LISTENER_ARGS)); ((ActionSource) parent).addActionListener(new MethodExpressionActionListener(actionListener)); } } else if ("actionFromValue".equals(mode.getValue())) { if (!value.isLiteral()) { final String result = value.getValue(faceletContext); parent.getAttributes().put(name.getValue(), new ConstantMethodBinding(result)); } } else if ("valueIfSet".equals(mode.getValue())) { String expressionString = value.getValue(); String lastExpressionString = null; while (isMethodOrValueExpression(expressionString) && isSimpleExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression != null) { lastExpressionString = expressionString; expressionString = expression.getExpressionString(); } else { // restore last value expressionString = lastExpressionString; break; } } if (expressionString != null) { final String attributeName = name.getValue(faceletContext); if (containsMethodOrValueExpression(expressionString)) { final ValueExpression expression = value.getValueExpression(faceletContext, Object.class); parent.setValueExpression(attributeName, expression); } else { final Object literalValue = getValue(faceletContext, parent, expressionString, attributeName); parent.getAttributes().put(attributeName, literalValue); } } } else { throw new FacesException("Type " + mode + " not supported"); } } else { final String nameValue = name.getValue(faceletContext); if (Attributes.RENDERED.equals(nameValue)) { if (value.isLiteral()) { parent.setRendered(value.getBoolean(faceletContext)); } else { parent.setValueExpression(nameValue, value.getValueExpression(faceletContext, Boolean.class)); } } else if (Attributes.RENDERED_PARTIALLY.equals(nameValue) && parent instanceof SupportsRenderedPartially) { if (value.isLiteral()) { final String[] components = ComponentUtils.splitList(value.getValue()); ((SupportsRenderedPartially) parent).setRenderedPartially(components); } else { parent.setValueExpression(nameValue, value.getValueExpression(faceletContext, Object.class)); } } else if (Attributes.STYLE_CLASS.equals(nameValue)) { // TODO expression ComponentUtils.setStyleClasses(parent, value.getValue()); } else if (Attributes.MARKUP.equals(nameValue)) { if (parent instanceof SupportsMarkup) { if (value.isLiteral()) { ((SupportsMarkup) parent).setMarkup(Markup.valueOf(value.getValue())); } else { final ValueExpression expression = value.getValueExpression(faceletContext, Object.class); parent.setValueExpression(nameValue, expression); } } else { LOG.error("Component is not instanceof SupportsMarkup. Instance is: " + parent.getClass().getName()); } } else if (parent instanceof EditableValueHolder && Attributes.VALIDATOR.equals(nameValue)) { final MethodExpression methodExpression = getMethodExpression(faceletContext, null, ComponentUtils.VALIDATOR_ARGS); if (methodExpression != null) { ((EditableValueHolder) parent).addValidator(new MethodExpressionValidator(methodExpression)); } } else if (parent instanceof EditableValueHolder && Attributes.VALUE_CHANGE_LISTENER.equals(nameValue)) { final MethodExpression methodExpression = getMethodExpression(faceletContext, null, ComponentUtils.VALUE_CHANGE_LISTENER_ARGS); if (methodExpression != null) { ((EditableValueHolder) parent).addValueChangeListener( new MethodExpressionValueChangeListener(methodExpression)); } } else if (parent instanceof ValueHolder && Attributes.CONVERTER.equals(nameValue)) { setConverter(faceletContext, parent, nameValue); } else if (parent instanceof ActionSource && Attributes.ACTION.equals(nameValue)) { final MethodExpression action = getMethodExpression(faceletContext, String.class, ComponentUtils.ACTION_ARGS); if (action != null) { ((ActionSource2) parent).setActionExpression(action); } } else if (parent instanceof ActionSource && Attributes.ACTION_LISTENER.equals(nameValue)) { final MethodExpression action = getMethodExpression(faceletContext, null, ComponentUtils.ACTION_LISTENER_ARGS); if (action != null) { ((ActionSource) parent).addActionListener(new MethodExpressionActionListener(action)); } } else if (!parent.getAttributes().containsKey(nameValue)) { if (value.isLiteral()) { parent.getAttributes().put(nameValue, value.getValue()); } else { parent.setValueExpression(nameValue, value.getValueExpression(faceletContext, Object.class)); } } } } } private boolean isMethodOrValueExpression(final String string) { return (string.startsWith("${") || string.startsWith("#{")) && string.endsWith("}"); } private boolean containsMethodOrValueExpression(final String string) { return (string.contains("${") || string.contains("#{")) && string.contains("}"); } private boolean isSimpleExpression(final String string) { return string.indexOf('.') < 0 && string.indexOf('[') < 0; } private String removeElParenthesis(final String string) { return string.substring(2, string.length() - 1); } private ValueExpression getExpression(final FaceletContext faceletContext) { final String myValue = removeElParenthesis(value.getValue()); return faceletContext.getVariableMapper().resolveVariable(myValue); } private MethodExpression getMethodExpression( final FaceletContext faceletContext, final Class returnType, final Class[] args) { // in a composition may be we get the method expression string from the current variable mapper // the expression can be empty // in this case return nothing if (value.getValue().startsWith("${")) { final ValueExpression expression = getExpression(faceletContext); if (expression != null) { final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); return new TagMethodExpression(value, expressionFactory.createMethodExpression(faceletContext, expression.getExpressionString(), returnType, args)); } else { return null; } } else { return value.getMethodExpression(faceletContext, returnType, args); } } private Object getValue( final FaceletContext faceletContext, final UIComponent parent, final String expressionString, final String attributeName) { Class type = Object.class; try { type = PropertyUtils.getReadMethod( new PropertyDescriptor(attributeName, parent.getClass())).getReturnType(); } catch (final IntrospectionException e) { LOG.warn("Can't determine expected type", e); } final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); final ValueExpression valueExpression = expressionFactory .createValueExpression(faceletContext, expressionString, type); return valueExpression.getValue(faceletContext); } private void setConverter(final FaceletContext faceletContext, final UIComponent parent, final String nameValue) { // in a composition may be we get the converter expression string from the current variable mapper // the expression can be empty // in this case return nothing if (value.getValue().startsWith("${")) { final ValueExpression expression = getExpression(faceletContext); if (expression != null) { setConverter(faceletContext, parent, nameValue, expression); } } else { setConverter(faceletContext, parent, nameValue, value.getValueExpression(faceletContext, Object.class)); } } private void setConverter( final FaceletContext faceletContext, final UIComponent parent, final String nameValue, final ValueExpression expression) { if (expression.isLiteralText()) { final Converter converter = faceletContext.getFacesContext().getApplication().createConverter(expression.getExpressionString()); ((ValueHolder) parent).setConverter(converter); } else { parent.setValueExpression(nameValue, expression); } } public static void letsAddASimpleMethod() { System.out.println("Howdy!"); } } ================================================ FILE: cost-benefit-calculator/src/test/resources/org/apache/myfaces/tobago/facelets/AttributeHandlerAndSorter.java ================================================ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.myfaces.tobago.facelets; import org.apache.commons.beanutils.PropertyUtils; import org.apache.myfaces.tobago.component.Attributes; import org.apache.myfaces.tobago.component.SupportsMarkup; import org.apache.myfaces.tobago.component.SupportsRenderedPartially; import org.apache.myfaces.tobago.context.Markup; import org.apache.myfaces.tobago.el.ConstantMethodBinding; import org.apache.myfaces.tobago.internal.util.StringUtils; import org.apache.myfaces.tobago.util.ComponentUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.el.ELException; import javax.el.ExpressionFactory; import javax.el.MethodExpression; import javax.el.ValueExpression; import javax.faces.FacesException; import javax.faces.component.ActionSource; import javax.faces.component.ActionSource2; import javax.faces.component.EditableValueHolder; import javax.faces.component.UIComponent; import javax.faces.component.ValueHolder; import javax.faces.convert.Converter; import javax.faces.event.MethodExpressionActionListener; import javax.faces.event.MethodExpressionValueChangeListener; import javax.faces.validator.MethodExpressionValidator; import javax.faces.view.facelets.ComponentHandler; import javax.faces.view.facelets.FaceletContext; import javax.faces.view.facelets.TagAttribute; import javax.faces.view.facelets.TagConfig; import javax.faces.view.facelets.TagException; import javax.faces.view.facelets.TagHandler; import java.beans.IntrospectionException; import java.beans.PropertyDescriptor; import org.apache.myfaces.tobago.event.SortActionEvent; import org.apache.myfaces.tobago.internal.component.AbstractUICommand; import org.apache.myfaces.tobago.internal.component.AbstractUISheet; import org.apache.myfaces.tobago.internal.util.StringUtils; import org.apache.myfaces.tobago.model.SheetState; import org.apache.myfaces.tobago.util.BeanComparator; import org.apache.myfaces.tobago.util.ValueExpressionComparator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.el.ValueExpression; import javax.faces.component.UIColumn; import javax.faces.component.UICommand; import javax.faces.component.UIComponent; import javax.faces.component.UIInput; import javax.faces.component.UIOutput; import javax.faces.component.UISelectBoolean; import javax.faces.component.UISelectMany; import javax.faces.component.UISelectOne; import javax.faces.context.FacesContext; import javax.faces.model.DataModel; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; public final class AttributeHandlerAndSorter extends TagHandler { private static final Logger LOG = LoggerFactory.getLogger(org.apache.myfaces.tobago.facelets.AttributeHandler.class); private final TagAttribute name; private final TagAttribute value; private final TagAttribute mode; public AttributeHandler(final TagConfig config) { super(config); this.name = getRequiredAttribute(Attributes.NAME); this.value = getRequiredAttribute(Attributes.VALUE); this.mode = getAttribute(Attributes.MODE); } public void apply(final FaceletContext faceletContext, final UIComponent parent) throws ELException { if (parent == null) { throw new TagException(tag, "Parent UIComponent was null"); } if (ComponentHandler.isNew(parent)) { if (mode != null) { if ("isNotSet".equals(mode.getValue())) { boolean result = false; String expressionString = value.getValue(); if (!value.isLiteral()) { while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { result = true; break; } else { expressionString = expression.getExpressionString(); } } else { result = false; break; } } } else { result = StringUtils.isEmpty(expressionString); } parent.getAttributes().put(name.getValue(), result); } else if ("isSet".equals(mode.getValue())) { boolean result = true; String expressionString = value.getValue(); if (!value.isLiteral()) { while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { result = false; break; } else { expressionString = expression.getExpressionString(); } } else { result = true; break; } } } else { result = StringUtils.isNotEmpty(expressionString); } parent.getAttributes().put(name.getValue(), result); } else if ("action".equals(mode.getValue())) { String expressionString = value.getValue(); while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { // when the action hasn't been set while using a composition. if (LOG.isDebugEnabled()) { LOG.debug("Variable can't be resolved: value='" + expressionString + "'"); } expressionString = null; break; } else { expressionString = expression.getExpressionString(); } } else { break; } } if (expressionString != null) { final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); final MethodExpression action = new TagMethodExpression(value, expressionFactory.createMethodExpression( faceletContext, expressionString, String.class, ComponentUtils.ACTION_ARGS)); ((ActionSource2) parent).setActionExpression(action); } } else if ("actionListener".equals(mode.getValue())) { String expressionString = value.getValue(); while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { if (LOG.isDebugEnabled()) { // when the action hasn't been set while using a composition. LOG.debug("Variable can't be resolved: value='" + expressionString + "'"); } expressionString = null; break; } else { expressionString = expression.getExpressionString(); } } else { LOG.warn("Only expressions are supported mode=actionListener value='" + expressionString + "'"); expressionString = null; break; } } if (expressionString != null) { final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); final MethodExpression actionListener = new TagMethodExpression(value, expressionFactory.createMethodExpression( faceletContext, expressionString, null, ComponentUtils.ACTION_LISTENER_ARGS)); ((ActionSource) parent).addActionListener(new MethodExpressionActionListener(actionListener)); } } else if ("actionFromValue".equals(mode.getValue())) { if (!value.isLiteral()) { final String result = value.getValue(faceletContext); parent.getAttributes().put(name.getValue(), new ConstantMethodBinding(result)); } } else if ("valueIfSet".equals(mode.getValue())) { String expressionString = value.getValue(); String lastExpressionString = null; while (isMethodOrValueExpression(expressionString) && isSimpleExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression != null) { lastExpressionString = expressionString; expressionString = expression.getExpressionString(); } else { // restore last value expressionString = lastExpressionString; break; } } if (expressionString != null) { final String attributeName = name.getValue(faceletContext); if (containsMethodOrValueExpression(expressionString)) { final ValueExpression expression = value.getValueExpression(faceletContext, Object.class); parent.setValueExpression(attributeName, expression); } else { final Object literalValue = getValue(faceletContext, parent, expressionString, attributeName); parent.getAttributes().put(attributeName, literalValue); } } } else { throw new FacesException("Type " + mode + " not supported"); } } else { final String nameValue = name.getValue(faceletContext); if (Attributes.RENDERED.equals(nameValue)) { if (value.isLiteral()) { parent.setRendered(value.getBoolean(faceletContext)); } else { parent.setValueExpression(nameValue, value.getValueExpression(faceletContext, Boolean.class)); } } else if (Attributes.RENDERED_PARTIALLY.equals(nameValue) && parent instanceof SupportsRenderedPartially) { if (value.isLiteral()) { final String[] components = ComponentUtils.splitList(value.getValue()); ((SupportsRenderedPartially) parent).setRenderedPartially(components); } else { parent.setValueExpression(nameValue, value.getValueExpression(faceletContext, Object.class)); } } else if (Attributes.STYLE_CLASS.equals(nameValue)) { // TODO expression ComponentUtils.setStyleClasses(parent, value.getValue()); } else if (Attributes.MARKUP.equals(nameValue)) { if (parent instanceof SupportsMarkup) { if (value.isLiteral()) { ((SupportsMarkup) parent).setMarkup(Markup.valueOf(value.getValue())); } else { final ValueExpression expression = value.getValueExpression(faceletContext, Object.class); parent.setValueExpression(nameValue, expression); } } else { LOG.error("Component is not instanceof SupportsMarkup. Instance is: " + parent.getClass().getName()); } } else if (parent instanceof EditableValueHolder && Attributes.VALIDATOR.equals(nameValue)) { final MethodExpression methodExpression = getMethodExpression(faceletContext, null, ComponentUtils.VALIDATOR_ARGS); if (methodExpression != null) { ((EditableValueHolder) parent).addValidator(new MethodExpressionValidator(methodExpression)); } } else if (parent instanceof EditableValueHolder && Attributes.VALUE_CHANGE_LISTENER.equals(nameValue)) { final MethodExpression methodExpression = getMethodExpression(faceletContext, null, ComponentUtils.VALUE_CHANGE_LISTENER_ARGS); if (methodExpression != null) { ((EditableValueHolder) parent).addValueChangeListener( new MethodExpressionValueChangeListener(methodExpression)); } } else if (parent instanceof ValueHolder && Attributes.CONVERTER.equals(nameValue)) { setConverter(faceletContext, parent, nameValue); } else if (parent instanceof ActionSource && Attributes.ACTION.equals(nameValue)) { final MethodExpression action = getMethodExpression(faceletContext, String.class, ComponentUtils.ACTION_ARGS); if (action != null) { ((ActionSource2) parent).setActionExpression(action); } } else if (parent instanceof ActionSource && Attributes.ACTION_LISTENER.equals(nameValue)) { final MethodExpression action = getMethodExpression(faceletContext, null, ComponentUtils.ACTION_LISTENER_ARGS); if (action != null) { ((ActionSource) parent).addActionListener(new MethodExpressionActionListener(action)); } } else if (!parent.getAttributes().containsKey(nameValue)) { if (value.isLiteral()) { parent.getAttributes().put(nameValue, value.getValue()); } else { parent.setValueExpression(nameValue, value.getValueExpression(faceletContext, Object.class)); } } } } } private boolean isMethodOrValueExpression(final String string) { return (string.startsWith("${") || string.startsWith("#{")) && string.endsWith("}"); } private boolean containsMethodOrValueExpression(final String string) { return (string.contains("${") || string.contains("#{")) && string.contains("}"); } private boolean isSimpleExpression(final String string) { return string.indexOf('.') < 0 && string.indexOf('[') < 0; } private String removeElParenthesis(final String string) { return string.substring(2, string.length() - 1); } private ValueExpression getExpression(final FaceletContext faceletContext) { final String myValue = removeElParenthesis(value.getValue()); return faceletContext.getVariableMapper().resolveVariable(myValue); } private MethodExpression getMethodExpression( final FaceletContext faceletContext, final Class returnType, final Class[] args) { // in a composition may be we get the method expression string from the current variable mapper // the expression can be empty // in this case return nothing if (value.getValue().startsWith("${")) { final ValueExpression expression = getExpression(faceletContext); if (expression != null) { final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); return new TagMethodExpression(value, expressionFactory.createMethodExpression(faceletContext, expression.getExpressionString(), returnType, args)); } else { return null; } } else { return value.getMethodExpression(faceletContext, returnType, args); } } private Object getValue( final FaceletContext faceletContext, final UIComponent parent, final String expressionString, final String attributeName) { Class type = Object.class; try { type = PropertyUtils.getReadMethod( new PropertyDescriptor(attributeName, parent.getClass())).getReturnType(); } catch (final IntrospectionException e) { LOG.warn("Can't determine expected type", e); } final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); final ValueExpression valueExpression = expressionFactory .createValueExpression(faceletContext, expressionString, type); return valueExpression.getValue(faceletContext); } private void setConverter(final FaceletContext faceletContext, final UIComponent parent, final String nameValue) { // in a composition may be we get the converter expression string from the current variable mapper // the expression can be empty // in this case return nothing if (value.getValue().startsWith("${")) { final ValueExpression expression = getExpression(faceletContext); if (expression != null) { setConverter(faceletContext, parent, nameValue, expression); } } else { setConverter(faceletContext, parent, nameValue, value.getValueExpression(faceletContext, Object.class)); } } private void setConverter( final FaceletContext faceletContext, final UIComponent parent, final String nameValue, final ValueExpression expression) { if (expression.isLiteralText()) { final Converter converter = faceletContext.getFacesContext().getApplication().createConverter(expression.getExpressionString()); ((ValueHolder) parent).setConverter(converter); } else { parent.setValueExpression(nameValue, expression); } } } //http://grepcode.com/file_/repo1.maven.org/maven2/org.apache.myfaces.tobago/tobago-core/2.0.8/org/apache/myfaces/tobago/component/Sorter.java/?v=source class Sorter { private static final Logger LOG = LoggerFactory.getLogger(Sorter.class); private Comparator comparator; /** * @deprecated Please use {@link #perform(org.apache.myfaces.tobago.internal.component.AbstractUISheet)} */ @Deprecated public void perform(final SortActionEvent sortEvent) { final AbstractUISheet data = (AbstractUISheet) sortEvent.getComponent(); perform(data); } public void perform(final AbstractUISheet data) { Object value = data.getValue(); if (value instanceof DataModel) { value = ((DataModel) value).getWrappedData(); } final FacesContext facesContext = FacesContext.getCurrentInstance(); final SheetState sheetState = data.getSheetState(facesContext); final String sortedColumnId = sheetState.getSortedColumnId(); if (LOG.isDebugEnabled()) { LOG.debug("sorterId = '{}'", sortedColumnId); } if (sortedColumnId == null) { // not to be sorted return; } final UIColumn column = (UIColumn) data.findComponent(sortedColumnId); if (column == null) { LOG.warn("No column to sort found, sorterId = '{}'", sortedColumnId); return; } final Comparator actualComparator; if (value instanceof List || value instanceof Object[]) { final String sortProperty; try { final UIComponent child = getFirstSortableChild(column.getChildren()); if (child != null) { final String attributeName = child instanceof AbstractUICommand ? Attributes.LABEL : Attributes.VALUE; if (child.getValueExpression(attributeName) != null) { final String var = data.getVar(); if (var == null) { LOG.error("No sorting performed. Property var of sheet is not set!"); unsetSortableAttribute(column); return; } String expressionString = child.getValueExpression(attributeName).getExpressionString(); if (isSimpleProperty(expressionString)) { if (expressionString.startsWith("#{") && expressionString.endsWith("}")) { expressionString = expressionString.substring(2, expressionString.length() - 1); } sortProperty = expressionString.substring(var.length() + 1); actualComparator = new BeanComparator( sortProperty, comparator, !sheetState.isAscending()); if (LOG.isDebugEnabled()) { LOG.debug("Sort property is {}", sortProperty); } } else { final boolean descending = !sheetState.isAscending(); final ValueExpression expression = child.getValueExpression("value"); actualComparator = new ValueExpressionComparator(facesContext, var, expression, descending, comparator); } } else { LOG.error("No sorting performed. No Expression target found for sorting!"); unsetSortableAttribute(column); return; } } else { LOG.error("No sorting performed. Value is not instanceof List or Object[]!"); unsetSortableAttribute(column); return; } } catch (final Exception e) { LOG.error("Error while extracting sortMethod :" + e.getMessage(), e); if (column != null) { unsetSortableAttribute(column); } return; } // TODO: locale / comparator parameter? // don't compare numbers with Collator.getInstance() comparator // Comparator comparator = Collator.getInstance(); // comparator = new RowComparator(ascending, method); // memorize selected rows List selectedDataRows = null; if (sheetState.getSelectedRows().size() > 0) { selectedDataRows = new ArrayList(sheetState.getSelectedRows().size()); Object dataRow; for (final Integer index : sheetState.getSelectedRows()) { if (value instanceof List) { dataRow = ((List) value).get(index); } else { dataRow = ((Object[]) value)[index]; } selectedDataRows.add(dataRow); } } // do sorting if (value instanceof List) { Collections.sort((List) value, actualComparator); } else { // value is instanceof Object[] Arrays.sort((Object[]) value, actualComparator); } // restore selected rows if (selectedDataRows != null) { sheetState.getSelectedRows().clear(); for (final Object dataRow : selectedDataRows) { int index = -1; if (value instanceof List) { for (int i = 0; i < ((List) value).size() && index < 0; i++) { if (dataRow == ((List) value).get(i)) { index = i; } } } else { for (int i = 0; i < ((Object[]) value).length && index < 0; i++) { if (dataRow == ((Object[]) value)[i]) { index = i; } } } if (index >= 0) { sheetState.getSelectedRows().add(index); } } } } else { // DataModel?, ResultSet, Result or Object LOG.warn("Sorting not supported for type " + (value != null ? value.getClass().toString() : "null")); } } // XXX needs to be tested // XXX was based on ^#\{(\w+(\.\w)*)\}$ which is wrong, because there is a + missing after the last \w boolean isSimpleProperty(final String expressionString) { if (expressionString.startsWith("#{") && expressionString.endsWith("}")) { final String inner = expressionString.substring(2, expressionString.length() - 1); final String[] parts = StringUtils.split(inner, '.'); for (final String part : parts) { if (!StringUtils.isAlpha(part)) { return false; } } return true; } return false; } private void unsetSortableAttribute(final UIColumn uiColumn) { LOG.warn("removing attribute sortable from column " + uiColumn.getId()); uiColumn.getAttributes().put(Attributes.SORTABLE, Boolean.FALSE); } private UIComponent getFirstSortableChild(final List children) { UIComponent result = null; for (UIComponent child : children) { result = child; if (child instanceof UISelectMany || child instanceof UISelectOne || child instanceof UISelectBoolean || (child instanceof AbstractUICommand && child.getChildren().isEmpty()) || (child instanceof UIInput && RendererTypes.HIDDEN.equals(child.getRendererType()))) { continue; // look for a better component if any } if (child instanceof UIOutput) { break; } if (child instanceof UICommand || child instanceof javax.faces.component.UIPanel) { child = getFirstSortableChild(child.getChildren()); if (child instanceof UIOutput) { break; } } } return result; } public Comparator getComparator() { return comparator; } public void setComparator(final Comparator comparator) { this.comparator = comparator; } } ================================================ FILE: coverage/pom.xml ================================================ 4.0.0 org.hjug.refactorfirst refactor-first 0.8.1-SNAPSHOT coverage Compute aggregated test code coverage true org.hjug.refactorfirst.changepronenessranker change-proneness-ranker org.hjug.refactorfirst.effortranker effort-ranker org.hjug.refactorfirst.dsm graph-algorithms org.hjug.refactorfirst.costbenefitcalculator cost-benefit-calculator org.hjug.refactorfirst.graphdatagenerator graph-data-generator org.hjug.refactorfirst.plugin refactor-first-maven-plugin org.hjug.refactorfirst.report report org.jacoco jacoco-maven-plugin 0.8.8 report-aggregate verify report-aggregate ================================================ FILE: effort-ranker/pom.xml ================================================ 4.0.0 org.hjug.refactorfirst refactor-first 0.8.1-SNAPSHOT org.hjug.refactorfirst.effortranker effort-ranker RefactorFirst Effort Ranker net.sourceforge.pmd pmd-java org.hjug.refactorfirst.testresources test-resources org.slf4j slf4j-api ================================================ FILE: effort-ranker/src/main/java/org/hjug/metrics/CBOClass.java ================================================ package org.hjug.metrics; import java.util.Scanner; import lombok.Data; /** * Created by Jim on 11/16/2016. */ @Data public class CBOClass implements Disharmony { private String className; private String fileName; private String packageName; private Integer couplingCount; public CBOClass(String className, String fileName, String packageName, String result) { this.className = className; this.fileName = fileName; this.packageName = packageName; try (Scanner scanner = new Scanner(result)) { couplingCount = scanner.useDelimiter("[^\\d]+").nextInt(); } } } ================================================ FILE: effort-ranker/src/main/java/org/hjug/metrics/Disharmony.java ================================================ package org.hjug.metrics; public interface Disharmony { String getFileName(); String getClassName(); String getPackageName(); } ================================================ FILE: effort-ranker/src/main/java/org/hjug/metrics/GodClass.java ================================================ package org.hjug.metrics; import java.text.NumberFormat; import java.text.ParseException; import lombok.Data; /** * Created by Jim on 11/16/2016. */ @Data public class GodClass implements Disharmony { private String className; private String fileName; private String packageName; private Integer wmc; private Integer atfd; private Float tcc; private Integer wmcRank; private Integer atfdRank; private Integer tccRank; private Integer sumOfRanks; private Integer overallRank; public GodClass(String className, String fileName, String packageName, String result) { this.className = className; this.fileName = fileName; this.packageName = packageName; NumberFormat integerFormat = NumberFormat.getIntegerInstance(); String[] values = result.substring(result.indexOf("(") + 1, result.indexOf(")")).split(", "); try { wmc = (int) (long) integerFormat.parse(extractValue(values[0])); atfd = (int) (long) integerFormat.parse(extractValue(values[1])); } catch (ParseException e) { throw new RuntimeException(e); } String rawTcc = extractValue(values[2]); tcc = Float.valueOf(rawTcc.replace("%", "")); } private String extractValue(String value) { return value.split("=")[1]; } } ================================================ FILE: effort-ranker/src/main/java/org/hjug/metrics/GodClassRanker.java ================================================ package org.hjug.metrics; import java.util.Comparator; import java.util.List; import java.util.function.Function; import java.util.function.ObjIntConsumer; import lombok.extern.slf4j.Slf4j; /** * Created by Wendy on 11/16/2016. */ @Slf4j public class GodClassRanker { public void rankGodClasses(List godClasses) { rankWmc(godClasses); rankAtfd(godClasses); rankTcc(godClasses); computeOverallRank(godClasses); } void computeOverallRank(List godClasses) { godClasses.forEach(godClass -> godClass.setSumOfRanks(godClass.getWmcRank() + godClass.getAtfdRank() + godClass.getTccRank())); godClasses.sort(Comparator.comparing(GodClass::getSumOfRanks)); Function getSumOfRanks = GodClass::getSumOfRanks; ObjIntConsumer setOverallRank = GodClass::setOverallRank; setRank(godClasses, getSumOfRanks, setOverallRank); } void rankWmc(List godClasses) { log.info("Calculating Weighted Method per Class (WMC) Rank"); godClasses.sort(Comparator.comparing(GodClass::getWmc)); Function getWmc = GodClass::getWmc; ObjIntConsumer setWmcRank = GodClass::setWmcRank; setRank(godClasses, getWmc, setWmcRank); } void rankAtfd(List godClasses) { log.info("Calculating Access to Foreign Data (ATFD) Rank"); godClasses.sort(Comparator.comparing(GodClass::getAtfd)); Function getAtfd = GodClass::getAtfd; ObjIntConsumer setAtfdRank = GodClass::setAtfdRank; setRank(godClasses, getAtfd, setAtfdRank); } void rankTcc(List godClasses) { log.info("Calculating Tight Class Cohesion (TCC) Rank"); godClasses.sort(Comparator.comparing(GodClass::getTcc)); Function getTcc = GodClass::getTcc; ObjIntConsumer setTccRank = GodClass::setTccRank; setRank(godClasses, getTcc, setTccRank); } > void setRank( List godClasses, Function getter, ObjIntConsumer setter) { int rank = 1; T previousValue = null; for (GodClass godClass : godClasses) { T value = getter.apply(godClass); if (null == previousValue) { previousValue = value; } if (value.compareTo(previousValue) > 0) { setter.accept(godClass, ++rank); previousValue = value; } else { setter.accept(godClass, rank); } } } } ================================================ FILE: effort-ranker/src/main/java/org/hjug/metrics/rules/CBORule.java ================================================ package org.hjug.metrics.rules; import java.util.HashSet; import java.util.Set; import net.sourceforge.pmd.lang.java.ast.ASTClassOrInterfaceDeclaration; import net.sourceforge.pmd.lang.java.ast.ASTCompilationUnit; import net.sourceforge.pmd.lang.java.ast.ASTFieldDeclaration; import net.sourceforge.pmd.lang.java.ast.ASTFormalParameter; import net.sourceforge.pmd.lang.java.ast.ASTLocalVariableDeclaration; import net.sourceforge.pmd.lang.java.ast.ASTMethodDeclaration; import net.sourceforge.pmd.lang.java.ast.ASTType; import net.sourceforge.pmd.lang.java.rule.AbstractJavaRule; import net.sourceforge.pmd.lang.java.symbols.JTypeDeclSymbol; import net.sourceforge.pmd.lang.java.types.JTypeMirror; import net.sourceforge.pmd.properties.NumericConstraints; import net.sourceforge.pmd.properties.PropertyBuilder; import net.sourceforge.pmd.properties.PropertyDescriptor; import net.sourceforge.pmd.properties.PropertyFactory; /** * Copy of PMD's CouplingBetweenObjectsRule * but generates the originally intended message containing coupling count */ public class CBORule extends AbstractJavaRule { private static final PropertyDescriptor THRESHOLD_DESCRIPTOR = ((PropertyBuilder.GenericPropertyBuilder) ((PropertyBuilder.GenericPropertyBuilder) ((PropertyBuilder.GenericPropertyBuilder) PropertyFactory.intProperty("threshold") .desc("Unique type reporting threshold")) .require(NumericConstraints.positive())) .defaultValue(20)) .build(); private int couplingCount; private boolean inInterface; private final Set typesFoundSoFar = new HashSet(); private String message; public CBORule() { this.definePropertyDescriptor(THRESHOLD_DESCRIPTOR); } @Override public String getMessage() { return message; } @Override public Object visit(ASTCompilationUnit cu, Object data) { super.visit(cu, data); if (this.couplingCount > 20) { // (Integer) this.getProperty(THRESHOLD_DESCRIPTOR)) { message = "A value of " + this.couplingCount + " may denote a high amount of coupling within the class"; this.addViolation(data, cu, message); this.setMessage(message); } this.couplingCount = 0; this.typesFoundSoFar.clear(); return null; } public Object visit(ASTClassOrInterfaceDeclaration node, Object data) { boolean prev = this.inInterface; this.inInterface = node.isInterface(); super.visit(node, data); this.inInterface = prev; return null; } public Object visit(ASTMethodDeclaration node, Object data) { ASTType type = node.getResultTypeNode(); this.checkVariableType(type); return super.visit(node, data); } public Object visit(ASTLocalVariableDeclaration node, Object data) { ASTType type = node.getTypeNode(); this.checkVariableType(type); return super.visit(node, data); } public Object visit(ASTFormalParameter node, Object data) { ASTType type = node.getTypeNode(); this.checkVariableType(type); return super.visit(node, data); } public Object visit(ASTFieldDeclaration node, Object data) { ASTType type = node.getTypeNode(); this.checkVariableType(type); return super.visit(node, data); } private void checkVariableType(ASTType typeNode) { if (!this.inInterface && typeNode != null) { JTypeMirror t = typeNode.getTypeMirror(); if (!this.ignoreType(typeNode, t) && this.typesFoundSoFar.add(t)) { ++this.couplingCount; } } } private boolean ignoreType(ASTType typeNode, JTypeMirror t) { if (typeNode.getEnclosingType() != null && typeNode.getEnclosingType().getSymbol().equals(t.getSymbol())) { return true; } else { JTypeDeclSymbol symbol = t.getSymbol(); return symbol == null || "java.lang".equals(symbol.getPackageName()) || t.isPrimitive() || t.isBoxedPrimitive(); } } } ================================================ FILE: effort-ranker/src/test/java/org/hjug/metrics/CBOClassParsingTest.java ================================================ package org.hjug.metrics; import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.Locale; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; public class CBOClassParsingTest { private Locale defaultLocale; @BeforeEach public void before() { defaultLocale = Locale.getDefault(Locale.Category.FORMAT); Locale.setDefault(Locale.Category.FORMAT, Locale.ENGLISH); } @AfterEach public void after() { Locale.setDefault(defaultLocale); } @Test void test() { String result = "A value of 20 may denote a high amount of coupling within the class"; CBOClass cboClass = new CBOClass("a", "a.txt", "org.hjug", result); assertEquals(Integer.valueOf(20), cboClass.getCouplingCount()); } } ================================================ FILE: effort-ranker/src/test/java/org/hjug/metrics/GodClassParsingTest.java ================================================ package org.hjug.metrics; import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.Locale; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; public class GodClassParsingTest { private Locale defaultLocale; @BeforeEach public void before() { defaultLocale = Locale.getDefault(Locale.Category.FORMAT); Locale.setDefault(Locale.Category.FORMAT, Locale.ENGLISH); } @AfterEach public void after() { Locale.setDefault(defaultLocale); } @Test void test() { String result = "Possible God Class (WMC=9200, ATFD=1,700, TCC=4.597%)"; GodClass god = new GodClass("a", "a.txt", "org.hjug", result); assertEquals(Integer.valueOf(9200), god.getWmc()); assertEquals(Integer.valueOf(1700), god.getAtfd()); assertEquals(Float.valueOf(4.597f), god.getTcc()); } } ================================================ FILE: effort-ranker/src/test/java/org/hjug/metrics/GodClassRankerTest.java ================================================ package org.hjug.metrics; import java.util.ArrayList; import java.util.List; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; /** * Created by Wendy on 11/16/2016. */ public class GodClassRankerTest { private final GodClassRanker godClassRanker = new GodClassRanker(); private final GodClass attributeHandler = new GodClass( "AttributeHandler", "org/hjug/git/AttributeHandler.java", "org.apache.myfaces.tobago.facelets", "null (WMC=79, ATFD=79, TCC=0.027777777777777776)"); private final GodClass attributeHandler2 = new GodClass( "AttributeHandler", "org/hjug/git/AttributeHandler.java", "org.apache.myfaces.tobago.facelets", "null (WMC=79, ATFD=79, TCC=0.027777777777777776)"); private final GodClass sorter = new GodClass( "Sorter", "Sorter.java", "org.apache.myfaces.tobago.facelets", " God class (WMC=51, ATFD=25, TCC=0.2)"); private final GodClass sorter2 = new GodClass( "Sorter", "Sorter2.java", "org.apache.myfaces.tobago.facelets", " God class (WMC=51, ATFD=25, TCC=0.2)"); private final GodClass themeImpl = new GodClass( "ThemeImpl", "ThemeImpl.java", "org.apache.myfaces.tobago.facelets", "God class (WMC=60, ATFD=16, TCC=0.07816091954022988)"); private final GodClass themeImpl2 = new GodClass( "ThemeImpl", "ThemeImpl2.java", "org.apache.myfaces.tobago.facelets", "God class (WMC=60, ATFD=16, TCC=0.07816091954022988)"); private final List godClasses = new ArrayList<>(); @BeforeEach public void setUp() { godClasses.add(attributeHandler); godClasses.add(sorter); godClasses.add(themeImpl); } @Test void testRankGodClasses() { godClassRanker.rankGodClasses(godClasses); Assertions.assertEquals("ThemeImpl.java", godClasses.get(0).getFileName()); Assertions.assertEquals("Sorter.java", godClasses.get(1).getFileName()); Assertions.assertEquals( "org/hjug/git/AttributeHandler.java", godClasses.get(2).getFileName()); Assertions.assertEquals(5, godClasses.get(0).getSumOfRanks().longValue()); Assertions.assertEquals(6, godClasses.get(1).getSumOfRanks().longValue()); Assertions.assertEquals(7, godClasses.get(2).getSumOfRanks().longValue()); Assertions.assertEquals(1, godClasses.get(0).getOverallRank().longValue()); Assertions.assertEquals(2, godClasses.get(1).getOverallRank().longValue()); Assertions.assertEquals(3, godClasses.get(2).getOverallRank().longValue()); } @Test void testWmcRanker() { godClassRanker.rankWmc(godClasses); Assertions.assertEquals("Sorter.java", godClasses.get(0).getFileName()); Assertions.assertEquals("ThemeImpl.java", godClasses.get(1).getFileName()); Assertions.assertEquals( "org/hjug/git/AttributeHandler.java", godClasses.get(2).getFileName()); Assertions.assertEquals(1, godClasses.get(0).getWmcRank().longValue()); Assertions.assertEquals(2, godClasses.get(1).getWmcRank().longValue()); Assertions.assertEquals(3, godClasses.get(2).getWmcRank().longValue()); } @Test void testWmcRankerWithDupeValue() { godClasses.add(themeImpl2); godClassRanker.rankWmc(godClasses); Assertions.assertEquals(1, godClasses.get(0).getWmcRank().longValue()); Assertions.assertEquals(2, godClasses.get(1).getWmcRank().longValue()); Assertions.assertEquals(2, godClasses.get(2).getWmcRank().longValue()); Assertions.assertEquals(3, godClasses.get(3).getWmcRank().longValue()); } @Test void testAtfdRanker() { godClassRanker.rankAtfd(godClasses); Assertions.assertEquals("ThemeImpl.java", godClasses.get(0).getFileName()); Assertions.assertEquals("Sorter.java", godClasses.get(1).getFileName()); Assertions.assertEquals( "org/hjug/git/AttributeHandler.java", godClasses.get(2).getFileName()); Assertions.assertEquals(1, godClasses.get(0).getAtfdRank().longValue()); Assertions.assertEquals(2, godClasses.get(1).getAtfdRank().longValue()); Assertions.assertEquals(3, godClasses.get(2).getAtfdRank().longValue()); } @Test void testAtfdRankerWithDupeValue() { godClasses.add(sorter2); godClassRanker.rankAtfd(godClasses); Assertions.assertEquals(1, godClasses.get(0).getAtfdRank().longValue()); Assertions.assertEquals(2, godClasses.get(1).getAtfdRank().longValue()); Assertions.assertEquals(2, godClasses.get(2).getAtfdRank().longValue()); Assertions.assertEquals(3, godClasses.get(3).getAtfdRank().longValue()); } @Test void testTccRanker() { godClassRanker.rankTcc(godClasses); Assertions.assertEquals( "org/hjug/git/AttributeHandler.java", godClasses.get(0).getFileName()); Assertions.assertEquals("ThemeImpl.java", godClasses.get(1).getFileName()); Assertions.assertEquals("Sorter.java", godClasses.get(2).getFileName()); Assertions.assertEquals(1, godClasses.get(0).getTccRank().longValue()); Assertions.assertEquals(2, godClasses.get(1).getTccRank().longValue()); Assertions.assertEquals(3, godClasses.get(2).getTccRank().longValue()); } @Test void testTccRankerWithDuplicateValue() { godClasses.add(attributeHandler2); godClassRanker.rankTcc(godClasses); // Two classes with a rank of 1 Assertions.assertEquals(1, godClasses.get(0).getTccRank().longValue()); Assertions.assertEquals(1, godClasses.get(1).getTccRank().longValue()); Assertions.assertEquals(2, godClasses.get(2).getTccRank().longValue()); Assertions.assertEquals(3, godClasses.get(3).getTccRank().longValue()); } } ================================================ FILE: graph-algorithms/pom.xml ================================================ 4.0.0 org.hjug.refactorfirst refactor-first 0.8.1-SNAPSHOT org.hjug.refactorfirst.dsm graph-algorithms RefactorFirst Graph Algorithms Implementation of a DSM that only has JGraphT-Core as a dependency. Can be used by other projects. org.jgrapht jgrapht-core org.jgrapht jgrapht-opt org.slf4j slf4j-api com.google.guava guava 33.4.8-jre ================================================ FILE: graph-algorithms/src/main/java/org/hjug/dsm/CircularReferenceChecker.java ================================================ package org.hjug.dsm; import java.util.HashMap; import java.util.Map; import lombok.extern.slf4j.Slf4j; import org.jgrapht.Graph; import org.jgrapht.alg.cycle.CycleDetector; import org.jgrapht.graph.AsSubgraph; @Slf4j public class CircularReferenceChecker { private final Map> uniqueSubGraphs = new HashMap<>(); /** * Detects cycles in the graph that is passed in * and returns the unique cycles in the graph as a map of subgraphs * * @param graph * @return a Map of unique cycles in the graph */ public Map> getCycles(Graph graph) { if (!uniqueSubGraphs.isEmpty()) { return uniqueSubGraphs; } // use CycleDetector.findCycles()? Map> cycles = detectCycles(graph); cycles.forEach((vertex, subGraph) -> { int vertexCount = subGraph.vertexSet().size(); int edgeCount = subGraph.edgeSet().size(); if (vertexCount > 1 && edgeCount > 1 && !isDuplicateSubGraph(subGraph, vertex)) { uniqueSubGraphs.put(vertex, subGraph); log.debug("Vertex: {} vertex count: {} edge count: {}", vertex, vertexCount, edgeCount); } }); return uniqueSubGraphs; } private boolean isDuplicateSubGraph(AsSubgraph subGraph, V vertex) { if (!uniqueSubGraphs.isEmpty()) { for (AsSubgraph renderedSubGraph : uniqueSubGraphs.values()) { if (renderedSubGraph.vertexSet().size() == subGraph.vertexSet().size() && renderedSubGraph.edgeSet().size() == subGraph.edgeSet().size() && renderedSubGraph.vertexSet().contains(vertex)) { return true; } } } return false; } private Map> detectCycles(Graph graph) { Map> cyclesForEveryVertexMap = new HashMap<>(); CycleDetector cycleDetector = new CycleDetector<>(graph); cycleDetector.findCycles().forEach(v -> { AsSubgraph subGraph = new AsSubgraph<>(graph, cycleDetector.findCyclesContainingVertex(v)); cyclesForEveryVertexMap.put(v, subGraph); }); return cyclesForEveryVertexMap; } } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/dsm/DSM.java ================================================ package org.hjug.dsm; import java.util.*; import java.util.stream.Collectors; import lombok.Getter; import org.jgrapht.Graph; import org.jgrapht.Graphs; import org.jgrapht.alg.connectivity.KosarajuStrongConnectivityInspector; import org.jgrapht.alg.util.Triple; import org.jgrapht.opt.graph.sparse.SparseIntDirectedWeightedGraph; /* Generated with Generative AI using a prompt similar to the following and iterated on: Provide a complete implementation of a Numerical DSM with integer weighted edges in Java. Include as many comments as possible in the implementation to make it easy to understand. Use JGraphT classes and methods to the greatest extent possible. Construction of the DSM should take place as follows. First, Place nodes with empty rows at the top of the DSM. Second, Place nodes with empty columns on the right of the DSM. Third, identify strongly connected nodes and treat them as a single node using JGraphT's TarjanSimpleCycles class. Fourth, order all edges in the DSM with a topological sort that permits cycles in the graph after identifying strongly connected components. Fifth, Print the DSM in a method that performs no sorting or ordering - it should only print rows and columns. When the DSM is printed, label the columns and rows. Place dashes on the diagonal when printing. include a method tht returns all edges above the diagonal. include another method that returns the optimal edge above the diagonal to remove, include a third method that identifies all minimum weight edges to remove above the diagonal. Used https://sookocheff.com/post/dsm/improving-software-architecture-using-design-structure-matrix/#optimizing-processes as a starting point. */ public class DSM { private final Graph graph; private List sortedActivities; boolean activitiesSorted = false; private final List edgesAboveDiagonal = new ArrayList<>(); List sparseIntSortedActivities; SparseIntDirectedWeightedGraph sparseGraph; @Getter double sumOfEdgeWeightsAboveDiagonal; Map vertexToInt = new HashMap<>(); Map intToVertex = new HashMap<>(); List> sparseEdges = new ArrayList<>(); int vertexCount = 0; public DSM(Graph graph) { this.graph = graph; sortedActivities = new ArrayList<>(); } public void addActivity(V activity) { graph.addVertex(activity); } public void addDependency(V from, V to, int weight) { E edge = graph.addEdge(from, to); if (edge != null) { graph.setEdgeWeight(edge, weight); } } private void orderVertices() { sparseGraph = getSparseIntDirectedWeightedGraph(); List> sccs = this.findStronglyConnectedSparseGraphComponents(sparseGraph); sparseIntSortedActivities = topologicalSortSparseGraph(sccs, sparseGraph); // reversing corrects rendering of the DSM // with sources as rows and targets as columns // was needed after AI solution was generated and iterated Collections.reverse(sparseIntSortedActivities); sortedActivities = convertIntToStringVertices(sparseIntSortedActivities); activitiesSorted = true; } private SparseIntDirectedWeightedGraph getSparseIntDirectedWeightedGraph() { for (V vertex : graph.vertexSet()) { vertexToInt.put(vertex, vertexCount); intToVertex.put(vertexCount, vertex); vertexCount++; } // Create the list of sparseEdges for the SparseIntDirectedWeightedGraph for (E edge : graph.edgeSet()) { int source = vertexToInt.get(graph.getEdgeSource(edge)); int target = vertexToInt.get(graph.getEdgeTarget(edge)); double weight = graph.getEdgeWeight(edge); sparseEdges.add(Triple.of(source, target, weight)); } // Create the SparseIntDirectedWeightedGraph return new SparseIntDirectedWeightedGraph(vertexCount, sparseEdges); } List convertIntToStringVertices(List intVertices) { return intVertices.stream().map(intToVertex::get).collect(Collectors.toList()); } /** * Kosaraju SCC detector avoids stack overflow. * It is used by JGraphT's CycleDetector, and makes sense to use it here as well for consistency * * @param graph * @return */ private List> findStronglyConnectedSparseGraphComponents(Graph graph) { KosarajuStrongConnectivityInspector kosaraju = new KosarajuStrongConnectivityInspector<>(graph); return kosaraju.stronglyConnectedSets(); } private List topologicalSortSparseGraph(List> sccs, Graph graph) { List sortedActivities = new ArrayList<>(); Set visited = new HashSet<>(); for (Set scc : sccs) { for (Integer activity : scc) { if (!visited.contains(activity)) { topologicalSortUtilSparseGraph(activity, visited, sortedActivities, graph); } } } Collections.reverse(sortedActivities); return sortedActivities; } private void topologicalSortUtilSparseGraph( Integer activity, Set visited, List sortedActivities, Graph graph) { visited.add(activity); for (Integer neighbor : Graphs.successorListOf(graph, activity)) { if (!visited.contains(neighbor)) { topologicalSortUtilSparseGraph(neighbor, visited, sortedActivities, graph); } } sortedActivities.add(activity); } public List getEdgesAboveDiagonal() { if (!activitiesSorted) { orderVertices(); } if (edgesAboveDiagonal.isEmpty()) { for (int i = 0; i < sortedActivities.size(); i++) { for (int j = i + 1; j < sortedActivities.size(); j++) { // source / destination vertex was flipped after solution generation // to correctly identify the vertex above the diagonal to remove E edge = graph.getEdge(sortedActivities.get(i), sortedActivities.get(j)); if (edge != null) { edgesAboveDiagonal.add(edge); } } } sumOfEdgeWeightsAboveDiagonal = edgesAboveDiagonal.stream() .mapToInt(edge -> (int) graph.getEdgeWeight(edge)) .sum(); } return edgesAboveDiagonal; } private List getSparseEdgesAboveDiagonal() { if (!activitiesSorted) { orderVertices(); } List sparseEdgesAboveDiagonal = new ArrayList<>(); for (int i = 0; i < sparseIntSortedActivities.size(); i++) { for (int j = i + 1; j < sparseIntSortedActivities.size(); j++) { // source / destination vertex was flipped after solution generation // to correctly identify the vertex above the diagonal to remove Integer edge = sparseGraph.getEdge(sparseIntSortedActivities.get(i), sparseIntSortedActivities.get(j)); if (edge != null) { sparseEdgesAboveDiagonal.add(edge); } } } return sparseEdgesAboveDiagonal; } public E getFirstLowestWeightEdgeAboveDiagonalToRemove() { if (!activitiesSorted) { orderVertices(); } List edgesAboveDiagonal = getEdgesAboveDiagonal(); E optimalEdge = null; int minWeight = Integer.MAX_VALUE; for (E edge : edgesAboveDiagonal) { int weight = (int) graph.getEdgeWeight(edge); if (weight < minWeight) { minWeight = weight; optimalEdge = edge; if (minWeight == 1) { break; } } } return optimalEdge; } public List getMinimumWeightEdgesAboveDiagonal() { if (!activitiesSorted) { orderVertices(); } List edgesAboveDiagonal = getEdgesAboveDiagonal(); List minWeightEdges = new ArrayList<>(); double minWeight = Double.MAX_VALUE; for (E edge : edgesAboveDiagonal) { double weight = graph.getEdgeWeight(edge); if (weight < minWeight) { minWeight = weight; minWeightEdges.clear(); minWeightEdges.add(edge); } else if (weight == minWeight) { minWeightEdges.add(edge); } } return minWeightEdges; } public void printDSM() { if (!activitiesSorted) { orderVertices(); } printDSM(graph, sortedActivities); } void printDSM(Graph graph, List sortedActivities) { System.out.println("Design Structure Matrix:"); System.out.print(" "); for (V col : sortedActivities) { System.out.print(col + " "); } System.out.println(); for (V row : sortedActivities) { System.out.print(row + " "); for (V col : sortedActivities) { if (col.equals(row)) { System.out.print("- "); } else { E edge = graph.getEdge(row, col); if (edge != null) { System.out.print((int) graph.getEdgeWeight(edge) + " "); } else { System.out.print("0 "); } } } System.out.println(); } } } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/dsm/EdgeRemovalCalculator.java ================================================ package org.hjug.dsm; import java.util.*; import java.util.stream.Collectors; import org.jgrapht.Graph; import org.jgrapht.graph.AsSubgraph; import org.jgrapht.graph.DefaultWeightedEdge; import org.jgrapht.graph.SimpleDirectedWeightedGraph; public class EdgeRemovalCalculator { private final Graph graph; private DSM dsm; private final Map> cycles; private Set edgesToRemove; public EdgeRemovalCalculator(Graph graph, DSM dsm) { this.graph = graph; this.dsm = dsm; this.cycles = new CircularReferenceChecker().getCycles(graph); } public EdgeRemovalCalculator(Graph graph, Set edgesToRemove) { this.graph = graph; this.edgesToRemove = edgesToRemove; this.cycles = new CircularReferenceChecker().getCycles(graph); } /** * Captures the impact of the removal of each edge above the diagonal. */ public List getImpactOfEdgesAboveDiagonalIfRemoved(int limit) { // get edges above diagonal for DSM graph List edgesAboveDiagonal; List allEdgesAboveDiagonal = dsm.getEdgesAboveDiagonal(); if (limit == 0 || allEdgesAboveDiagonal.size() <= limit) { edgesAboveDiagonal = allEdgesAboveDiagonal; } else { // get first 50 values of min weight List minimumWeightEdgesAboveDiagonal = dsm.getMinimumWeightEdgesAboveDiagonal(); int max = Math.min(minimumWeightEdgesAboveDiagonal.size(), limit); edgesAboveDiagonal = minimumWeightEdgesAboveDiagonal.subList(0, max); } int currentCycleCount = cycles.size(); return edgesAboveDiagonal.stream() .map(this::calculateEdgeToRemoveInfo) .sorted( Comparator.comparing((EdgeToRemoveInfo edgeToRemoveInfo) -> currentCycleCount - edgeToRemoveInfo.getNewCycleCount()) /*.thenComparing(EdgeToRemoveInfo::getEdgeWeight)*/ ) .collect(Collectors.toList()); } public List getImpactOfEdges() { int currentCycleCount = cycles.size(); return edgesToRemove.stream() .map(this::calculateEdgeToRemoveInfo) .sorted( Comparator.comparing((EdgeToRemoveInfo edgeToRemoveInfo) -> currentCycleCount - edgeToRemoveInfo.getNewCycleCount()) /*.thenComparing(EdgeToRemoveInfo::getEdgeWeight)*/ ) .collect(Collectors.toList()); } public EdgeToRemoveInfo calculateEdgeToRemoveInfo(DefaultWeightedEdge edgeToRemove) { // clone graph and remove edge Graph improvedGraph = new SimpleDirectedWeightedGraph<>(DefaultWeightedEdge.class); graph.vertexSet().forEach(improvedGraph::addVertex); for (DefaultWeightedEdge weightedEdge : graph.edgeSet()) { improvedGraph.addEdge(graph.getEdgeSource(weightedEdge), graph.getEdgeTarget(weightedEdge), weightedEdge); } improvedGraph.removeEdge(edgeToRemove); // Calculate new cycle count int newCycleCount = new CircularReferenceChecker() .getCycles(improvedGraph) .size(); // calculate new graph statistics double removedEdgeWeight = graph.getEdgeWeight(edgeToRemove); double payoff = newCycleCount / removedEdgeWeight; return new EdgeToRemoveInfo(edgeToRemove, (int) removedEdgeWeight, newCycleCount, payoff); } } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/dsm/EdgeToRemoveInfo.java ================================================ package org.hjug.dsm; import lombok.Data; import org.jgrapht.graph.DefaultWeightedEdge; @Data public class EdgeToRemoveInfo { private final DefaultWeightedEdge edge; private final int removedEdgeWeight; private final int newCycleCount; private final double payoff; // impact / effort } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/dsm/OptimalBackEdgeRemover.java ================================================ package org.hjug.dsm; import java.util.*; import org.jgrapht.Graph; import org.jgrapht.alg.cycle.CycleDetector; import org.jgrapht.alg.cycle.JohnsonSimpleCycles; import org.jgrapht.graph.AsSubgraph; public class OptimalBackEdgeRemover { private Graph graph; /** * Constructor initializing with the target graph. * @param graph The directed weighted graph to analyze */ public OptimalBackEdgeRemover(Graph graph) { this.graph = graph; } /** * Finds the optimal back edge(s) to remove to move the graph closer to a DAG. * @return A set of edges to remove */ public Set findOptimalBackEdgesToRemove() { CycleDetector cycleDetector = new CycleDetector<>(graph); // If the graph is already acyclic, return empty set if (!cycleDetector.detectCycles()) { return Collections.emptySet(); } // Find all cycles in the graph JohnsonSimpleCycles cycleFinder = new JohnsonSimpleCycles<>(graph); List> originalCycles = cycleFinder.findSimpleCycles(); int originalCycleCount = originalCycles.size(); // Identify edges that are part of at least one cycle Set edgesInCycles = new HashSet<>(); for (List cycle : originalCycles) { for (int i = 0; i < cycle.size(); i++) { V source = cycle.get(i); V target = cycle.get((i + 1) % cycle.size()); E edge = graph.getEdge(source, target); edgesInCycles.add(edge); } } // Calculate cycle elimination count for each edge Map edgeCycleEliminationCount = new HashMap<>(); for (E edge : edgesInCycles) { // Create a subgraph without this edge Graph subgraph = new AsSubgraph<>(graph, graph.vertexSet(), new HashSet<>(graph.edgeSet())); subgraph.removeEdge(edge); // Calculate how many cycles would be eliminated JohnsonSimpleCycles subgraphCycleFinder = new JohnsonSimpleCycles<>(subgraph); List> remainingCycles = subgraphCycleFinder.findSimpleCycles(); int cyclesEliminated = originalCycleCount - remainingCycles.size(); edgeCycleEliminationCount.put(edge, cyclesEliminated); } // Find edges that eliminate the most cycles int maxCycleElimination = 0; List maxEliminationEdges = new ArrayList<>(); for (Map.Entry entry : edgeCycleEliminationCount.entrySet()) { if (entry.getValue() > maxCycleElimination) { maxCycleElimination = entry.getValue(); maxEliminationEdges.clear(); maxEliminationEdges.add(entry.getKey()); } else if (entry.getValue() == maxCycleElimination) { maxEliminationEdges.add(entry.getKey()); } } // If no cycles are eliminated (shouldn't happen), return empty set if (maxEliminationEdges.isEmpty() || maxCycleElimination == 0) { return Collections.emptySet(); } // If multiple edges eliminate the same number of cycles, choose the one with the lowest weight if (maxEliminationEdges.size() > 1) { double minWeight = Double.MAX_VALUE; List minWeightEdges = new ArrayList<>(); for (E edge : maxEliminationEdges) { double weight = graph.getEdgeWeight(edge); if (weight < minWeight) { minWeight = weight; minWeightEdges.clear(); minWeightEdges.add(edge); } else if (weight == minWeight) { minWeightEdges.add(edge); } } return new HashSet<>(minWeightEdges); } // Return the single edge that eliminates the most cycles return new HashSet<>(maxEliminationEdges); } } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/dsm/SparseGraphCircularReferenceChecker.java ================================================ package org.hjug.dsm; import java.util.HashMap; import java.util.Map; import lombok.extern.slf4j.Slf4j; import org.jgrapht.alg.cycle.CycleDetector; import org.jgrapht.graph.AsSubgraph; import org.jgrapht.opt.graph.sparse.SparseIntDirectedWeightedGraph; @Slf4j public class SparseGraphCircularReferenceChecker { private final Map> uniqueSubGraphs = new HashMap<>(); /** * Detects cycles in the graph that is passed in * and returns the unique cycles in the graph as a map of subgraphs * * @param graph * @return a Map of unique cycles in the graph */ public Map> getCycles(SparseIntDirectedWeightedGraph graph) { if (!uniqueSubGraphs.isEmpty()) { return uniqueSubGraphs; } // use CycleDetector.findCycles()? Map> cycles = detectCycles(graph); cycles.forEach((vertex, subGraph) -> { int vertexCount = subGraph.vertexSet().size(); int edgeCount = subGraph.edgeSet().size(); if (vertexCount > 1 && edgeCount > 1 && !isDuplicateSubGraph(subGraph, vertex)) { uniqueSubGraphs.put(vertex, subGraph); log.debug("Vertex: {} vertex count: {} edge count: {}", vertex, vertexCount, edgeCount); } }); return uniqueSubGraphs; } private boolean isDuplicateSubGraph(AsSubgraph subGraph, Integer vertex) { if (!uniqueSubGraphs.isEmpty()) { for (AsSubgraph renderedSubGraph : uniqueSubGraphs.values()) { if (renderedSubGraph.vertexSet().size() == subGraph.vertexSet().size() && renderedSubGraph.edgeSet().size() == subGraph.edgeSet().size() && renderedSubGraph.vertexSet().contains(vertex)) { return true; } } } return false; } private Map> detectCycles(SparseIntDirectedWeightedGraph graph) { Map> cyclesForEveryVertexMap = new HashMap<>(); CycleDetector cycleDetector = new CycleDetector<>(graph); cycleDetector.findCycles().forEach(v -> { AsSubgraph subGraph = new AsSubgraph<>(graph, cycleDetector.findCyclesContainingVertex(v)); cyclesForEveryVertexMap.put(v, subGraph); }); return cyclesForEveryVertexMap; } } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/dsm/SparseIntDWGEdgeRemovalCalculator.java ================================================ package org.hjug.dsm; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentSkipListSet; import java.util.concurrent.CopyOnWriteArrayList; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.jgrapht.Graph; import org.jgrapht.Graphs; import org.jgrapht.alg.connectivity.KosarajuStrongConnectivityInspector; import org.jgrapht.alg.util.Triple; import org.jgrapht.graph.DefaultWeightedEdge; import org.jgrapht.opt.graph.sparse.SparseIntDirectedWeightedGraph; class SparseIntDWGEdgeRemovalCalculator { private final Graph graph; SparseIntDirectedWeightedGraph sparseGraph; List> sparseEdges; List sparseEdgesAboveDiagonal; private final double sumOfEdgeWeightsAboveDiagonal; int vertexCount; Map vertexToInt; Map intToVertex; SparseIntDWGEdgeRemovalCalculator( Graph graph, SparseIntDirectedWeightedGraph sparseGraph, List> sparseEdges, List sparseEdgesAboveDiagonal, double sumOfEdgeWeightsAboveDiagonal, int vertexCount, Map vertexToInt, Map intToVertex) { this.graph = graph; this.sparseGraph = sparseGraph; this.sparseEdges = new CopyOnWriteArrayList<>(sparseEdges); this.sparseEdgesAboveDiagonal = new CopyOnWriteArrayList<>(sparseEdgesAboveDiagonal); this.sumOfEdgeWeightsAboveDiagonal = sumOfEdgeWeightsAboveDiagonal; this.vertexCount = vertexCount; this.vertexToInt = new ConcurrentHashMap<>(vertexToInt); this.intToVertex = new ConcurrentHashMap<>(intToVertex); } public List getImpactOfSparseEdgesAboveDiagonalIfRemoved() { return sparseEdgesAboveDiagonal.parallelStream() .map(this::calculateSparseEdgeToRemoveInfo) .sorted(Comparator.comparing(EdgeToRemoveInfo::getPayoff) .thenComparing(EdgeToRemoveInfo::getRemovedEdgeWeight)) .collect(Collectors.toList()); } private EdgeToRemoveInfo calculateSparseEdgeToRemoveInfo(Integer edgeToRemove) { // clone graph and remove edge int source = sparseGraph.getEdgeSource(edgeToRemove); int target = sparseGraph.getEdgeTarget(edgeToRemove); double weight = sparseGraph.getEdgeWeight(edgeToRemove); Triple removedEdge = Triple.of(source, target, weight); List> tempUpdatedEdgeList = new ArrayList<>(sparseEdges); tempUpdatedEdgeList.remove(removedEdge); List> updatedEdgeList = new CopyOnWriteArrayList<>(tempUpdatedEdgeList); SparseIntDirectedWeightedGraph improvedGraph = new SparseIntDirectedWeightedGraph(vertexCount, updatedEdgeList); // find edges above diagonal List sortedSparseVertices = orderVertices(improvedGraph); List updatedEdges = getSparseEdgesAboveDiagonal(improvedGraph, sortedSparseVertices); // calculate new graph statistics int newEdgeCount = updatedEdges.size(); double newEdgeWeightSum = updatedEdges.stream().mapToDouble(improvedGraph::getEdgeWeight).sum(); DefaultWeightedEdge defaultWeightedEdge = graph.getEdge(intToVertex.get(source), intToVertex.get(target)); double payoff = (sumOfEdgeWeightsAboveDiagonal - newEdgeWeightSum) / weight; return new EdgeToRemoveInfo(defaultWeightedEdge, (int) weight, newEdgeCount, payoff); } private List orderVertices(SparseIntDirectedWeightedGraph sparseGraph) { List> sccs = new CopyOnWriteArrayList<>(findStronglyConnectedSparseGraphComponents(sparseGraph)); // List sparseIntSortedActivities = topologicalSortSparseGraph(sccs, sparseGraph); List sparseIntSortedActivities = topologicalParallelSortSparseGraph(sccs, sparseGraph); // reversing corrects rendering of the DSM // with sources as rows and targets as columns // was needed after AI solution was generated and iterated Collections.reverse(sparseIntSortedActivities); return new CopyOnWriteArrayList<>(sparseIntSortedActivities); } /** * Kosaraju SCC detector avoids stack overflow. * It is used by JGraphT's CycleDetector, and makes sense to use it here as well for consistency * * @param graph * @return */ private List> findStronglyConnectedSparseGraphComponents(Graph graph) { KosarajuStrongConnectivityInspector kosaraju = new KosarajuStrongConnectivityInspector<>(graph); return kosaraju.stronglyConnectedSets(); } private List topologicalSortSparseGraph(List> sccs, Graph graph) { List sortedActivities = new ArrayList<>(); Set visited = new HashSet<>(); sccs.parallelStream() .flatMap(Set::parallelStream) .filter(activity -> !visited.contains(activity)) .forEach(activity -> topologicalSortUtilSparseGraph(activity, visited, sortedActivities, graph)); Collections.reverse(sortedActivities); return sortedActivities; } private void topologicalSortUtilSparseGraph( Integer activity, Set visited, List sortedActivities, Graph graph) { visited.add(activity); for (Integer neighbor : Graphs.successorListOf(graph, activity)) { if (!visited.contains(neighbor)) { topologicalSortUtilSparseGraph(neighbor, visited, sortedActivities, graph); } } sortedActivities.add(activity); } private List getSparseEdgesAboveDiagonal( SparseIntDirectedWeightedGraph sparseGraph, List sortedActivities) { ConcurrentLinkedQueue sparseEdgesAboveDiagonal = new ConcurrentLinkedQueue<>(); int size = sortedActivities.size(); IntStream.range(0, size).parallel().forEach(i -> { for (int j = i + 1; j < size; j++) { Integer edge = sparseGraph.getEdge(sortedActivities.get(i), sortedActivities.get(j)); if (edge != null) { sparseEdgesAboveDiagonal.add(edge); } } }); return new ArrayList<>(sparseEdgesAboveDiagonal); } private List topologicalParallelSortSparseGraph(List> sccs, Graph graph) { ConcurrentLinkedQueue sortedActivities = new ConcurrentLinkedQueue<>(); Set visited = new ConcurrentSkipListSet<>(); sccs.parallelStream() .flatMap(Set::parallelStream) .filter(activity -> !visited.contains(activity)) .forEach(activity -> topologicalSortUtilSparseGraph(activity, visited, sortedActivities, graph)); ArrayList sortedActivitiesList = new ArrayList<>(sortedActivities); Collections.reverse(sortedActivitiesList); return sortedActivitiesList; } private void topologicalSortUtilSparseGraph( Integer activity, Set visited, ConcurrentLinkedQueue sortedActivities, Graph graph) { visited.add(activity); Graphs.successorListOf(graph, activity).parallelStream() .filter(neighbor -> !visited.contains(neighbor)) .forEach(neighbor -> topologicalSortUtilSparseGraph(neighbor, visited, sortedActivities, graph)); sortedActivities.add(activity); } } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/feedback/SuperTypeToken.java ================================================ package org.hjug.feedback; import java.lang.reflect.*; public abstract class SuperTypeToken { private final Type type; protected SuperTypeToken() { Type superclass = getClass().getGenericSuperclass(); if (superclass instanceof ParameterizedType) { this.type = ((ParameterizedType) superclass).getActualTypeArguments()[0]; } else { throw new RuntimeException("Missing type parameter."); } } public Type getType() { return type; } public Class getClassFromTypeToken() { return (Class) getClassFromTypeToken(type); } // ((ParameterizedType) type).getActualTypeArguments()[0] - returns String in List static Class getClassFromTypeToken(Type type) { if (type instanceof Class) { return (Class) type; } else if (type instanceof ParameterizedType) { return (Class) ((ParameterizedType) type).getRawType(); } else if (type instanceof GenericArrayType) { Type componentType = ((GenericArrayType) type).getGenericComponentType(); return java.lang.reflect.Array.newInstance(getClassFromTypeToken(componentType), 0) .getClass(); } else if (type instanceof TypeVariable) { // Type variables don't have a direct class representation return Object.class; // Fallback } else if (type instanceof WildcardType) { Type[] upperBounds = ((WildcardType) type).getUpperBounds(); return getClassFromTypeToken(upperBounds[0]); // Use the first upper bound } throw new IllegalArgumentException("Unsupported Type: " + type); } } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/feedback/arc/EdgeInfo.java ================================================ package org.hjug.feedback.arc; import lombok.Data; import org.jgrapht.graph.DefaultWeightedEdge; @Data public class EdgeInfo { private final DefaultWeightedEdge edge; private final int presentInCycleCount; private final boolean removeSource; private final boolean removeTarget; private final int weight; } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/feedback/arc/EdgeInfoCalculator.java ================================================ package org.hjug.feedback.arc; import java.util.*; import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; import org.jgrapht.Graph; import org.jgrapht.graph.AsSubgraph; import org.jgrapht.graph.DefaultWeightedEdge; @RequiredArgsConstructor public class EdgeInfoCalculator { private final Graph graph; private final Collection edgesToRemove; private final Set vertexesToRemove; private final Map> cycles; public Collection calculateEdgeInformation() { List edgeInfos = new ArrayList<>(); for (DefaultWeightedEdge edge : edgesToRemove) { int presentInCycleCount = (int) cycles.values().stream() .filter(cycle -> cycle.containsEdge(edge)) .count(); EdgeInfo edgeInfo = new EdgeInfo( edge, presentInCycleCount, vertexesToRemove.contains(graph.getEdgeSource(edge)), vertexesToRemove.contains(graph.getEdgeTarget(edge)), (int) graph.getEdgeWeight(edge)); edgeInfos.add(edgeInfo); } return edgeInfos.stream() .sorted(Comparator.comparing(EdgeInfo::getPresentInCycleCount) .reversed() .thenComparing(edgeInfo -> edgeInfo.isRemoveSource() ? 0 : 1) .thenComparing(edgeInfo -> edgeInfo.isRemoveTarget() ? 0 : 1) .thenComparing(EdgeInfo::getWeight)) .collect(Collectors.toList()); } } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/feedback/arc/approximate/FeedbackArcSetResult.java ================================================ package org.hjug.feedback.arc.approximate; import java.util.List; import java.util.Set; /** * Result container for the Feedback Arc Set algorithm */ public class FeedbackArcSetResult { private final List vertexSequence; private final Set feedbackArcs; public FeedbackArcSetResult(List vertexSequence, Set feedbackArcs) { this.vertexSequence = vertexSequence; this.feedbackArcs = feedbackArcs; } public List getVertexSequence() { return vertexSequence; } public Set getFeedbackArcs() { return feedbackArcs; } public int getFeedbackArcCount() { return feedbackArcs.size(); } @Override public String toString() { return String.format( "FeedbackArcSetResult{vertexSequence=%s, feedbackArcCount=%d}", vertexSequence, feedbackArcs.size()); } } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/feedback/arc/approximate/FeedbackArcSetSolver.java ================================================ package org.hjug.feedback.arc.approximate; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import org.jgrapht.Graph; /** * Parallel implementation of Algorithm GR for the Feedback Arc Set problem * Based on Eades, Lin, and Smyth's fast and effective heuristic * DOI: https://doi.org/10.1016/0020-0190(93)90079-O * https://researchportal.murdoch.edu.au/esploro/outputs/journalArticle/A-fast-and-effective-heuristic-for/991005543112107891 * Generated by Perplexity.ai's Research model */ public class FeedbackArcSetSolver { private final Graph graph; private final ConcurrentHashMap inDegreeMap; private final ConcurrentHashMap outDegreeMap; private final ConcurrentHashMap> vertexBins; public FeedbackArcSetSolver(Graph graph) { this.graph = graph; this.inDegreeMap = new ConcurrentHashMap<>(); this.outDegreeMap = new ConcurrentHashMap<>(); this.vertexBins = new ConcurrentHashMap<>(); initializeDegrees(); } /** * Initialize degree maps using parallel streams for better performance */ private void initializeDegrees() { graph.vertexSet().parallelStream().forEach(vertex -> { int inDegree = graph.inDegreeOf(vertex); int outDegree = graph.outDegreeOf(vertex); inDegreeMap.put(vertex, new AtomicInteger(inDegree)); outDegreeMap.put(vertex, new AtomicInteger(outDegree)); // Calculate delta value for bin sorting int delta = outDegree - inDegree; vertexBins.computeIfAbsent(delta, k -> new CopyOnWriteArrayList<>()).add(vertex); }); } /** * Executes Algorithm GR to find a feedback arc set * @return FeedbackArcSetResult containing the vertex sequence and feedback arcs */ public FeedbackArcSetResult solve() { List s1 = new CopyOnWriteArrayList<>(); // Left sequence List s2 = new CopyOnWriteArrayList<>(); // Right sequence Set remainingVertices = ConcurrentHashMap.newKeySet(); remainingVertices.addAll(graph.vertexSet()); Set feedbackArcs = ConcurrentHashMap.newKeySet(); while (!remainingVertices.isEmpty()) { // Process sinks in parallel List sinks = findSinks(remainingVertices); sinks.parallelStream().forEach(sink -> { s2.add(0, sink); removeVertex(sink, remainingVertices, feedbackArcs); }); if (remainingVertices.isEmpty()) break; // Process sources in parallel List sources = findSources(remainingVertices); sources.parallelStream().forEach(source -> { s1.add(source); removeVertex(source, remainingVertices, feedbackArcs); }); if (remainingVertices.isEmpty()) break; // Find vertex with maximum delta value Optional maxDeltaVertex = findMaxDeltaVertex(remainingVertices); if (maxDeltaVertex.isPresent()) { V vertex = maxDeltaVertex.get(); s1.add(vertex); removeVertex(vertex, remainingVertices, feedbackArcs); } } // Combine sequences List finalSequence = new ArrayList<>(s1); finalSequence.addAll(s2); // Calculate feedback arcs based on final sequence Set finalFeedbackArcs = calculateFeedbackArcs(finalSequence); return new FeedbackArcSetResult<>(finalSequence, finalFeedbackArcs); } /** * Find all sink vertices (vertices with out-degree 0) using parallel processing */ private List findSinks(Set vertices) { return vertices.parallelStream() .filter(v -> outDegreeMap.get(v).get() == 0) .collect(Collectors.toList()); } /** * Find all source vertices (vertices with in-degree 0) using parallel processing */ private List findSources(Set vertices) { return vertices.parallelStream() .filter(v -> inDegreeMap.get(v).get() == 0) .collect(Collectors.toList()); } /** * Find vertex with maximum delta value (out-degree - in-degree) */ private Optional findMaxDeltaVertex(Set vertices) { return vertices.parallelStream() .max(Comparator.comparingInt( v -> outDegreeMap.get(v).get() - inDegreeMap.get(v).get())); } /** * Remove vertex and update degrees of adjacent vertices */ private void removeVertex(V vertex, Set remainingVertices, Set feedbackArcs) { remainingVertices.remove(vertex); // Update degrees of adjacent vertices in parallel graph.incomingEdgesOf(vertex).parallelStream().forEach(edge -> { V source = graph.getEdgeSource(edge); if (remainingVertices.contains(source)) { outDegreeMap.get(source).decrementAndGet(); } }); graph.outgoingEdgesOf(vertex).parallelStream().forEach(edge -> { V target = graph.getEdgeTarget(edge); if (remainingVertices.contains(target)) { inDegreeMap.get(target).decrementAndGet(); } }); } /** * Calculate feedback arcs based on the final vertex sequence */ private Set calculateFeedbackArcs(List sequence) { Map vertexPosition = new HashMap<>(); for (int i = 0; i < sequence.size(); i++) { vertexPosition.put(sequence.get(i), i); } return graph.edgeSet().parallelStream() .filter(edge -> { V source = graph.getEdgeSource(edge); V target = graph.getEdgeTarget(edge); return vertexPosition.get(source) > vertexPosition.get(target); }) .collect(Collectors.toSet()); } } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/feedback/arc/exact/FeedbackArcSetResult.java ================================================ package org.hjug.feedback.arc.exact; import java.util.Set; /** * Result container for the minimum feedback arc set algorithm [2] */ public class FeedbackArcSetResult { private final Set feedbackArcSet; private final double objectiveValue; public FeedbackArcSetResult(Set feedbackArcSet, double objectiveValue) { this.feedbackArcSet = feedbackArcSet; this.objectiveValue = objectiveValue; } public Set getFeedbackArcSet() { return feedbackArcSet; } public double getObjectiveValue() { return objectiveValue; } public int size() { return feedbackArcSet.size(); } @Override public String toString() { return String.format( "FeedbackArcSetResult{arcSet=%s, objective=%.2f, size=%d}", feedbackArcSet, objectiveValue, feedbackArcSet.size()); } } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/feedback/arc/exact/MinimumFeedbackArcSetSolver.java ================================================ package org.hjug.feedback.arc.exact; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import org.hjug.feedback.SuperTypeToken; import org.jgrapht.Graph; import org.jgrapht.alg.connectivity.KosarajuStrongConnectivityInspector; import org.jgrapht.alg.cycle.CycleDetector; import org.jgrapht.graph.DefaultDirectedGraph; /** * Exact minimum feedback arc set solver using lazy constraint generation * Based on Baharev et al. "An Exact Method for the Minimum Feedback Arc Set Problem" * https://dl.acm.org/doi/10.1145/3446429 * https://doi.org/10.1145/3446429 * Generated by Perplexity.ai's Research model */ public class MinimumFeedbackArcSetSolver { private final Graph graph; private final Map edgeWeights; private final Class edgeClass; private final ConcurrentHashMap, Boolean> cycleMatrix; private final int maxIterations; public MinimumFeedbackArcSetSolver(Graph graph, Map edgeWeights, SuperTypeToken edgeTypeToken) { this.graph = graph; this.edgeWeights = edgeWeights != null ? edgeWeights : createUniformWeights(); this.cycleMatrix = new ConcurrentHashMap<>(); this.maxIterations = 1000; this.edgeClass = edgeTypeToken.getClassFromTypeToken(); } /** * Creates uniform weights for all edges when no weights are provided [2] */ private Map createUniformWeights() { Map weights = new ConcurrentHashMap<>(); graph.edgeSet().parallelStream().forEach(edge -> weights.put(edge, 1.0)); return weights; } /** * Main solving method implementing the lazy constraint generation algorithm [2] */ public FeedbackArcSetResult solve() { Set bestFeedbackArcSet = ConcurrentHashMap.newKeySet(); double bestObjectiveValue; // Initialize with a heuristic solution [2] Set initialSolution = computeInitialHeuristicSolution(); bestFeedbackArcSet.addAll(initialSolution); bestObjectiveValue = calculateObjectiveValue(initialSolution); AtomicInteger iteration = new AtomicInteger(0); AtomicBoolean optimalityProved = new AtomicBoolean(false); while (iteration.get() < maxIterations && !optimalityProved.get()) { // Solve relaxed problem with current cycle matrix [2] Set relaxedSolution = solveRelaxedProblem(); // Check if solution is acyclic [12][16] if (isAcyclic(createGraphWithoutEdges(relaxedSolution))) { // Found optimal solution double objectiveValue = calculateObjectiveValue(relaxedSolution); if (objectiveValue < bestObjectiveValue) { bestFeedbackArcSet.clear(); bestFeedbackArcSet.addAll(relaxedSolution); bestObjectiveValue = objectiveValue; } optimalityProved.set(true); break; } // Find cycles and extend cycle matrix [2] Set> newCycles = findCyclesInSolution(relaxedSolution); if (newCycles.isEmpty()) { break; // No more cycles found } // Add new cycles to matrix using parallel processing [18] newCycles.parallelStream().forEach(cycle -> { Set cycleEdges = new HashSet<>(cycle); cycleMatrix.put(cycleEdges, Boolean.TRUE); }); iteration.incrementAndGet(); } return new FeedbackArcSetResult<>(bestFeedbackArcSet, bestObjectiveValue); } /** * Computes initial heuristic solution using greedy approach [2] */ private Set computeInitialHeuristicSolution() { Set feedbackArcs = ConcurrentHashMap.newKeySet(); Graph tempGraph = createGraphCopy(); // Use parallel processing to identify cycles [18] while (hasCycles(tempGraph)) { // Find strongly connected components [17][21] KosarajuStrongConnectivityInspector inspector = new KosarajuStrongConnectivityInspector<>(tempGraph); List> sccs = inspector.stronglyConnectedSets(); // Process non-trivial SCCs in parallel [18] Optional edgeToRemove = sccs.parallelStream() .filter(scc -> scc.size() > 1) .flatMap(scc -> getEdgesInSCC(tempGraph, scc).stream()) .min(Comparator.comparingDouble(edge -> edgeWeights.getOrDefault(edge, 1.0))); if (edgeToRemove.isPresent()) { E edge = edgeToRemove.get(); feedbackArcs.add(edge); tempGraph.removeEdge(edge); } else { break; } } return feedbackArcs; } /** * Solves the relaxed integer programming problem [2] */ private Set solveRelaxedProblem() { // Simplified relaxed problem solver // In practice, this would use an integer programming solver Set solution = ConcurrentHashMap.newKeySet(); // Use greedy approach based on current cycle matrix [2] Map edgeCycleCounts = new ConcurrentHashMap<>(); // Count how many cycles each edge participates in [18] cycleMatrix.keySet().parallelStream() .forEach(cycle -> cycle.forEach(edge -> edgeCycleCounts.merge(edge, 1L, Long::sum))); // Select edges with highest cycle participation [2] while (!cycleMatrix.isEmpty() && !isAllCyclesCovered(solution)) { Optional bestEdge = edgeCycleCounts.entrySet().parallelStream() .filter(entry -> !solution.contains(entry.getKey())) .max(Map.Entry.comparingByValue() .thenComparing(entry -> 1.0 / edgeWeights.getOrDefault(entry.getKey(), 1.0))) .map(Map.Entry::getKey); if (bestEdge.isPresent()) { solution.add(bestEdge.get()); } else { break; } } return solution; } /** * Finds cycles in the current solution using breadth-first search [2][27] */ private Set> findCyclesInSolution(Set solution) { Set> cycles = ConcurrentHashMap.newKeySet(); Graph remainingGraph = createGraphWithoutEdges(solution); // Use parallel processing to find cycles [18] solution.parallelStream().forEach(edge -> { V source = graph.getEdgeSource(edge); V target = graph.getEdgeTarget(edge); // Find path from target back to source in remaining graph [27] List pathBackToSource = findShortestPath(remainingGraph, target, source); if (!pathBackToSource.isEmpty()) { List cycle = new ArrayList<>(pathBackToSource); cycle.add(edge); cycles.add(cycle); } }); return cycles; } /** * Finds shortest path using breadth-first search [27] */ private List findShortestPath(Graph graph, V start, V target) { if (!graph.containsVertex(start) || !graph.containsVertex(target)) { return List.of(); } Queue queue = new ConcurrentLinkedQueue<>(); Map predecessorEdge = new ConcurrentHashMap<>(); Set visited = ConcurrentHashMap.newKeySet(); queue.offer(start); visited.add(start); while (!queue.isEmpty()) { V current = queue.poll(); if (current.equals(target)) { // Reconstruct path [27] List path = new ArrayList<>(); V node = target; while (predecessorEdge.containsKey(node)) { E edge = predecessorEdge.get(node); path.add(0, edge); node = graph.getEdgeSource(edge); } return path; } // Explore neighbors using parallel processing [18] graph.outgoingEdgesOf(current).parallelStream() .map(graph::getEdgeTarget) .filter(neighbor -> !visited.contains(neighbor)) .forEach(neighbor -> { if (visited.add(neighbor)) { predecessorEdge.put(neighbor, graph.getEdge(current, neighbor)); queue.offer(neighbor); } }); } return List.of(); } /** * Checks if graph is acyclic using cycle detector [12][16] */ private boolean isAcyclic(Graph graph) { CycleDetector detector = new CycleDetector<>(graph); return !detector.detectCycles(); } /** * Checks if graph has cycles [12][16] */ private boolean hasCycles(Graph graph) { CycleDetector detector = new CycleDetector<>(graph); return detector.detectCycles(); } /** * Creates a copy of the graph without specified edges [11] */ private Graph createGraphWithoutEdges(Set excludedEdges) { Graph newGraph = new DefaultDirectedGraph<>(edgeClass); // Add all vertices [11] graph.vertexSet().forEach(newGraph::addVertex); // Add edges not in excluded set [18] graph.edgeSet().stream().filter(edge -> !excludedEdges.contains(edge)).forEach(edge -> { V source = graph.getEdgeSource(edge); V target = graph.getEdgeTarget(edge); newGraph.addEdge(source, target); }); return newGraph; } /** * Creates a complete copy of the graph [11] */ private Graph createGraphCopy() { Graph copy = new DefaultDirectedGraph<>(edgeClass); // Copy vertices and edges [11] graph.vertexSet().forEach(copy::addVertex); graph.edgeSet().forEach(edge -> { V source = graph.getEdgeSource(edge); V target = graph.getEdgeTarget(edge); copy.addEdge(source, target); }); return copy; } /** * Gets edges within a strongly connected component [17] */ private Set getEdgesInSCC(Graph graph, Set scc) { return graph.edgeSet().parallelStream() .filter(edge -> { V source = graph.getEdgeSource(edge); V target = graph.getEdgeTarget(edge); return scc.contains(source) && scc.contains(target); }) .collect(Collectors.toSet()); } /** * Checks if all cycles in the matrix are covered by the solution [2] */ private boolean isAllCyclesCovered(Set solution) { return cycleMatrix.keySet().parallelStream() .allMatch(cycle -> cycle.stream().anyMatch(solution::contains)); } /** * Calculates objective value for a solution [2] */ private double calculateObjectiveValue(Set solution) { return solution.parallelStream() .mapToDouble(edge -> edgeWeights.getOrDefault(edge, 1.0)) .sum(); } } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/feedback/arc/pageRank/DIAGRAM.md ================================================ # PageRank Feedback Arc Set (PageRankFAS) Algorithm Based on the paper *"Computing a Feedback Arc Set Using PageRank"* by Geladaris, Lionakis, and Tollis ([arXiv:2208.09234](https://arxiv.org/abs/2208.09234)). ## High-Level Algorithm Flow ```mermaid flowchart TD A["**Input:** Directed Graph G(V, E)"] --> B["Copy graph into
working graph G'"] B --> C{"Does G'
have cycles?"} C -- No --> D["**Output:** Feedback Arc Set
(set of removed edges)"] C -- Yes --> E["Find Strongly Connected
Components (SCCs)
using Kosaraju's algorithm"] E --> F["Filter to non-trivial SCCs
(size > 1)"] F --> G["Process each SCC"] G --> H["Extract subgraph
for this SCC"] H --> I["Build Line Digraph L(G)
from SCC subgraph"] I --> J["Run PageRank
on Line Digraph"] J --> K["Select edge with
highest PageRank score"] K --> L["Remove edge from G'
and add to FAS"] L --> C ``` ## Line Digraph Construction Each edge in the original graph becomes a **vertex** in the line digraph. Edges in the line digraph represent adjacency (consecutive traversal) in the original graph. ```mermaid flowchart TD subgraph Original["**Original SCC Subgraph**"] direction LR oA((A)) -->|e1| oB((B)) oB -->|e2| oC((C)) oC -->|e3| oA oA -->|e4| oC end Original --> Transform["Transform: each original edge
becomes a line digraph vertex"] subgraph Line["**Line Digraph L(G)**"] direction LR le1["e1 (A→B)"] -->|"B is source of e2"| le2["e2 (B→C)"] le2 -->|"C is source of e3"| le3["e3 (C→A)"] le2 -->|"C is source of... none extra"| le2x[ ] le3 -->|"A is source of e1"| le1 le3 -->|"A is source of e4"| le4["e4 (A→C)"] le4 -->|"C is source of e3"| le3 le1 -.-> le4x[ ] end style le2x display:none style le4x display:none ``` ## Line Digraph Edge Creation (DFS-Based — Algorithm 3) ```mermaid flowchart TD S["Start DFS from
arbitrary vertex v₀"] --> V["Visit vertex v,
mark as visited"] V --> OE["For each outgoing
edge e of v"] OE --> LV["Get LineVertex
for edge e"] LV --> PREV{"Previous
LineVertex
exists?"} PREV -- Yes --> ADD_EDGE["Add edge:
prevLineVertex → currentLineVertex
in Line Digraph"] PREV -- No --> CHECK ADD_EDGE --> CHECK{"Target of e
already visited?"} CHECK -- No --> REC["Recurse DFS on target
with currentLineVertex as prev"] CHECK -- Yes --> BACK["Add edges from currentLineVertex
to all LineVertices of
target's outgoing edges
(back-edge handling)"] REC --> OE BACK --> OE ``` ## PageRank Computation (Algorithm 4) ```mermaid flowchart TD INIT["Initialize all LineVertex scores
score(v) = 1 / N"] --> ITER{"Iteration
i < maxIterations?"} ITER -- No --> RESULT["Return PageRank scores
for all LineVertices"] ITER -- Yes --> NEWMAP["Create new score map
(all zeros)"] NEWMAP --> EACH["For each LineVertex v
(in parallel)"] EACH --> SINK{"v has outgoing
neighbors?"} SINK -- "No (sink)" --> SELF["newScore(v) += score(v)
(keep score on itself)"] SINK -- Yes --> DIST["Distribute score(v) equally
among outgoing neighbors:
each gets score(v) / outDegree(v)"] DIST --> MERGE["newScore(target) += share
using atomic merge"] SELF --> SWAP MERGE --> SWAP["Swap: currentScores = newScores"] SWAP --> ITER ``` ## Selecting the Feedback Edge ```mermaid flowchart LR PR["PageRank scores
on Line Digraph"] --> MAX["Find LineVertex with
**maximum** PageRank score"] MAX --> ORIG["Map back to
original edge via
LineVertex.getOriginalEdge()"] ORIG --> REMOVE["Remove edge from
working graph &
add to FAS"] ``` ## Class Relationships ```mermaid classDiagram class PageRankFAS~V, E~ { -Graph originalGraph -int pageRankIterations -Class edgeClass +computeFeedbackArcSet() Set~E~ -processStronglyConnectedComponent(graph, scc) E -createLineDigraph(graph) LineDigraph -createLineDigraphEdges(graph, lineDigraph, map) -createLineDigraphEdgesDFS(graph, lineDigraph, map, vertex, prev, visited) -computePageRank(lineDigraph) Map -applyOneIteration(vertices, lineDigraph, current, new) -findStronglyConnectedComponents(graph) List -hasCycles(graph) boolean -createGraphCopy(original) Graph -createSubgraph(graph, vertices) Graph } class LineDigraph~V, E~ { -Set vertices -Map adjacencyMap -Map incomingMap +addVertex(vertex) boolean +addEdge(source, target) boolean +vertexSet() Set +getOutgoingNeighbors(vertex) Set +getIncomingNeighbors(vertex) Set } class LineVertex~V, E~ { -V source -V target -E originalEdge +getSource() V +getTarget() V +getOriginalEdge() E } PageRankFAS --> LineDigraph : creates PageRankFAS --> LineVertex : creates LineDigraph o-- LineVertex : contains ``` ================================================ FILE: graph-algorithms/src/main/java/org/hjug/feedback/arc/pageRank/LineDigraph.java ================================================ package org.hjug.feedback.arc.pageRank; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; /** * Custom LineDigraph implementation that doesn't extend DefaultDirectedGraph. * Represents a directed graph where vertices are LineVertex objects representing * edges from the original graph, and edges represent adjacency relationships. */ class LineDigraph { // Internal storage for vertices and adjacency relationships private final Set> vertices; private final Map, Set>> adjacencyMap; private final Map, Set>> incomingMap; /** * Constructor for LineDigraph */ public LineDigraph() { this.vertices = ConcurrentHashMap.newKeySet(); this.adjacencyMap = new ConcurrentHashMap<>(); this.incomingMap = new ConcurrentHashMap<>(); } /** * Add a vertex to the line digraph * @param vertex The LineVertex to add * @return true if the vertex was added, false if it already existed */ public boolean addVertex(LineVertex vertex) { if (vertices.add(vertex)) { adjacencyMap.putIfAbsent(vertex, ConcurrentHashMap.newKeySet()); incomingMap.putIfAbsent(vertex, ConcurrentHashMap.newKeySet()); return true; } return false; } /** * Remove a vertex from the line digraph * @param vertex The LineVertex to remove * @return true if the vertex was removed, false if it didn't exist */ public boolean removeVertex(LineVertex vertex) { if (vertices.remove(vertex)) { // Remove all outgoing edges Set> outgoing = adjacencyMap.remove(vertex); if (outgoing != null) { outgoing.forEach(target -> incomingMap.get(target).remove(vertex)); } // Remove all incoming edges Set> incoming = incomingMap.remove(vertex); if (incoming != null) { incoming.forEach(source -> adjacencyMap.get(source).remove(vertex)); } return true; } return false; } /** * Add an edge between two vertices in the line digraph * @param source The source LineVertex * @param target The target LineVertex * @return true if the edge was added, false if it already existed */ public boolean addEdge(LineVertex source, LineVertex target) { // Ensure both vertices exist addVertex(source); addVertex(target); // Add edge if it doesn't exist if (adjacencyMap.get(source).add(target)) { incomingMap.get(target).add(source); return true; } return false; } /** * Remove an edge between two vertices * @param source The source LineVertex * @param target The target LineVertex * @return true if the edge was removed, false if it didn't exist */ public boolean removeEdge(LineVertex source, LineVertex target) { if (containsVertex(source) && containsVertex(target)) { if (adjacencyMap.get(source).remove(target)) { incomingMap.get(target).remove(source); return true; } } return false; } /** * Check if the digraph contains a specific vertex * @param vertex The LineVertex to check * @return true if the vertex exists, false otherwise */ public boolean containsVertex(LineVertex vertex) { return vertices.contains(vertex); } /** * Check if there's an edge between two vertices * @param source The source LineVertex * @param target The target LineVertex * @return true if the edge exists, false otherwise */ public boolean containsEdge(LineVertex source, LineVertex target) { return containsVertex(source) && adjacencyMap.get(source).contains(target); } /** * Get all vertices in the line digraph * @return Set of all LineVertex objects */ public Set> vertexSet() { return new HashSet<>(vertices); } /** * Get the number of vertices * @return Number of vertices in the digraph */ public int vertexCount() { return vertices.size(); } /** * Get the number of edges * @return Total number of edges in the digraph */ public int edgeCount() { return adjacencyMap.values().stream().mapToInt(Set::size).sum(); } /** * Get all outgoing neighbors of a vertex * @param vertex The source LineVertex * @return Set of target LineVertex objects */ public Set> getOutgoingNeighbors(LineVertex vertex) { return adjacencyMap.getOrDefault(vertex, Collections.emptySet()).stream() .collect(Collectors.toSet()); } /** * Get all incoming neighbors of a vertex * @param vertex The target LineVertex * @return Set of source LineVertex objects */ public Set> getIncomingNeighbors(LineVertex vertex) { return incomingMap.getOrDefault(vertex, Collections.emptySet()).stream().collect(Collectors.toSet()); } /** * Get all neighbors (both incoming and outgoing) of a vertex * @param vertex The LineVertex * @return Set of all neighboring LineVertex objects */ public Set> getAllNeighbors(LineVertex vertex) { Set> neighbors = new HashSet<>(); neighbors.addAll(getOutgoingNeighbors(vertex)); neighbors.addAll(getIncomingNeighbors(vertex)); return neighbors; } /** * Get the out-degree of a vertex * @param vertex The LineVertex * @return Number of outgoing edges */ public int getOutDegree(LineVertex vertex) { return adjacencyMap.getOrDefault(vertex, Collections.emptySet()).size(); } /** * Get the in-degree of a vertex * @param vertex The LineVertex * @return Number of incoming edges */ public int getInDegree(LineVertex vertex) { return incomingMap.getOrDefault(vertex, Collections.emptySet()).size(); } /** * Get the total degree (in + out) of a vertex * @param vertex The LineVertex * @return Total degree of the vertex */ public int getTotalDegree(LineVertex vertex) { return getInDegree(vertex) + getOutDegree(vertex); } /** * Check if the digraph is empty * @return true if no vertices exist, false otherwise */ public boolean isEmpty() { return vertices.isEmpty(); } /** * Clear all vertices and edges from the digraph */ public void clear() { vertices.clear(); adjacencyMap.clear(); incomingMap.clear(); } /** * Get all vertices with no incoming edges (sources) * @return Set of source LineVertex objects */ public Set> getSources() { return vertices.stream().filter(vertex -> getInDegree(vertex) == 0).collect(Collectors.toSet()); } /** * Get all vertices with no outgoing edges (sinks) * @return Set of sink LineVertex objects */ public Set> getSinks() { return vertices.stream().filter(vertex -> getOutDegree(vertex) == 0).collect(Collectors.toSet()); } /** * Get vertices reachable from a given vertex (BFS traversal) * @param startVertex The starting LineVertex * @return Set of reachable LineVertex objects */ public Set> getReachableVertices(LineVertex startVertex) { Set> reachable = new HashSet<>(); Queue> queue = new LinkedList<>(); if (containsVertex(startVertex)) { queue.offer(startVertex); reachable.add(startVertex); while (!queue.isEmpty()) { LineVertex current = queue.poll(); for (LineVertex neighbor : getOutgoingNeighbors(current)) { if (reachable.add(neighbor)) { queue.offer(neighbor); } } } } return reachable; } /** * Check if there's a path from source to target * @param source The source LineVertex * @param target The target LineVertex * @return true if a path exists, false otherwise */ public boolean hasPath(LineVertex source, LineVertex target) { if (!containsVertex(source) || !containsVertex(target)) { return false; } if (source.equals(target)) { return true; } return getReachableVertices(source).contains(target); } /** * Perform a topological sort of the digraph (if acyclic) * @return List of vertices in topological order, or empty list if cyclic */ public List> topologicalSort() { List> result = new ArrayList<>(); Map, Integer> inDegreeMap = new HashMap<>(); Queue> queue = new LinkedList<>(); // Initialize in-degree map for (LineVertex vertex : vertices) { inDegreeMap.put(vertex, getInDegree(vertex)); if (getInDegree(vertex) == 0) { queue.offer(vertex); } } // Process vertices with zero in-degree while (!queue.isEmpty()) { LineVertex current = queue.poll(); result.add(current); for (LineVertex neighbor : getOutgoingNeighbors(current)) { int newInDegree = inDegreeMap.get(neighbor) - 1; inDegreeMap.put(neighbor, newInDegree); if (newInDegree == 0) { queue.offer(neighbor); } } } // Return empty list if graph has cycles return result.size() == vertices.size() ? result : Collections.emptyList(); } /** * Create a copy of this line digraph * @return A new LineDigraph with the same structure */ public LineDigraph copy() { LineDigraph copy = new LineDigraph<>(); // Add all vertices vertices.forEach(copy::addVertex); // Add all edges for (LineVertex source : vertices) { for (LineVertex target : getOutgoingNeighbors(source)) { copy.addEdge(source, target); } } return copy; } /** * Get statistics about the line digraph * @return Map containing various statistics */ public Map getStatistics() { Map stats = new HashMap<>(); stats.put("vertexCount", vertexCount()); stats.put("edgeCount", edgeCount()); stats.put("sourceCount", getSources().size()); stats.put("sinkCount", getSinks().size()); stats.put("isEmpty", isEmpty()); if (!isEmpty()) { double avgOutDegree = vertices.stream().mapToInt(this::getOutDegree).average().orElse(0.0); double avgInDegree = vertices.stream().mapToInt(this::getInDegree).average().orElse(0.0); stats.put("avgOutDegree", avgOutDegree); stats.put("avgInDegree", avgInDegree); stats.put("density", (double) edgeCount() / (vertexCount() * (vertexCount() - 1))); } return stats; } /** * Convert to string representation for debugging */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("LineDigraph{"); sb.append("vertices=").append(vertices.size()); sb.append(", edges=").append(edgeCount()); sb.append("}"); return sb.toString(); } /** * Get detailed string representation with all edges * @return Detailed string representation */ public String toDetailedString() { StringBuilder sb = new StringBuilder(); sb.append("LineDigraph Details:\n"); sb.append("Vertices: ").append(vertices.size()).append("\n"); sb.append("Edges: ").append(edgeCount()).append("\n\n"); for (LineVertex vertex : vertices) { sb.append(vertex).append(" -> "); Set> outgoing = getOutgoingNeighbors(vertex); if (outgoing.isEmpty()) { sb.append("[]"); } else { sb.append(outgoing); } sb.append("\n"); } return sb.toString(); } /** * Validate the internal consistency of the digraph * @return true if consistent, false otherwise */ public boolean validateConsistency() { // Check that every outgoing edge has a corresponding incoming edge for (LineVertex source : vertices) { for (LineVertex target : getOutgoingNeighbors(source)) { if (!getIncomingNeighbors(target).contains(source)) { return false; } } } // Check that every incoming edge has a corresponding outgoing edge for (LineVertex target : vertices) { for (LineVertex source : getIncomingNeighbors(target)) { if (!getOutgoingNeighbors(source).contains(target)) { return false; } } } return true; } } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/feedback/arc/pageRank/PageRankFAS.java ================================================ package org.hjug.feedback.arc.pageRank; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import lombok.extern.slf4j.Slf4j; import org.hjug.feedback.SuperTypeToken; import org.jgrapht.Graph; import org.jgrapht.alg.connectivity.KosarajuStrongConnectivityInspector; import org.jgrapht.alg.cycle.CycleDetector; import org.jgrapht.graph.DefaultDirectedGraph; /** * PageRankFAS - A PageRank-based algorithm for computing Feedback Arc Set * Based on the paper "Computing a Feedback Arc Set Using PageRank" by * Geladaris, Lionakis, and Tollis * Generated by Perplexity AI and modified. * Based on https://arxiv.org/abs/2208.09234 * https://doi.org/10.48550/arXiv.2208.09234 */ @Slf4j public class PageRankFAS { private static final int DEFAULT_PAGERANK_ITERATIONS = 5; private static final double CONVERGENCE_THRESHOLD = 1e-6; private final Graph originalGraph; private final int pageRankIterations; private final Class edgeClass; /** * Constructor for PageRankFAS algorithm * * @param graph The input directed graph * @param edgeTypeToken */ public PageRankFAS(Graph graph, SuperTypeToken edgeTypeToken) { this(graph, DEFAULT_PAGERANK_ITERATIONS, edgeTypeToken); } /** * Constructor with custom PageRank iterations * * @param graph The input directed graph * @param pageRankIterations Number of PageRank iterations * @param edgeTypeToken */ public PageRankFAS(Graph graph, int pageRankIterations, SuperTypeToken edgeTypeToken) { this.originalGraph = graph; this.pageRankIterations = pageRankIterations; this.edgeClass = edgeTypeToken.getClassFromTypeToken(); } /** * Main method to compute the Feedback Arc Set * @return Set of edges that form the feedback arc set */ public Set computeFeedbackArcSet() { Set feedbackArcSet = new HashSet<>(); // Create a working copy of the graph Graph workingGraph = createGraphCopy(originalGraph); // Continue until the graph becomes acyclic while (hasCycles(workingGraph)) { // Find strongly connected components List> sccs = findStronglyConnectedComponents(workingGraph); // Process each SCC sccs.stream() .filter(scc -> scc.size() > 1) // Only non-trivial SCCs can have cycles .forEach(scc -> { E edgeToRemove = processStronglyConnectedComponent(workingGraph, scc); if (edgeToRemove != null) { synchronized (feedbackArcSet) { feedbackArcSet.add(edgeToRemove); workingGraph.removeEdge(edgeToRemove); } } }); } return feedbackArcSet; } /** * Process a single strongly connected component * @param graph The working graph * @param scc The strongly connected component vertices * @return The edge with the highest PageRank score to remove */ private E processStronglyConnectedComponent(Graph graph, Set scc) { // Create subgraph for this SCC Graph sccGraph = createSubgraph(graph, scc); // Create line digraph using the new custom implementation LineDigraph lineDigraph = createLineDigraph(sccGraph); // Run PageRank on line digraph Map, Double> pageRankScores = computePageRank(lineDigraph); // Find the edge (line vertex) with highest PageRank score return pageRankScores.entrySet().parallelStream() .max(Map.Entry.comparingByValue()) .map(entry -> entry.getKey().getOriginalEdge()) .orElse(null); } /** * Create line digraph from the input graph using custom LineDigraph implementation * @param graph Input graph * @return LineDigraph representation */ private LineDigraph createLineDigraph(Graph graph) { LineDigraph lineDigraph = new LineDigraph<>(); // Create nodes in line digraph (one for each edge in original graph) Map> edgeToLineVertex = new ConcurrentHashMap<>(); graph.edgeSet().parallelStream().forEach(edge -> { V source = graph.getEdgeSource(edge); V target = graph.getEdgeTarget(edge); LineVertex lineVertex = new LineVertex<>(source, target, edge); edgeToLineVertex.put(edge, lineVertex); lineDigraph.addVertex(lineVertex); }); // Create edges in line digraph using DFS-based approach from the paper createLineDigraphEdges(graph, lineDigraph, edgeToLineVertex); return lineDigraph; } /** * Create edges in line digraph based on Algorithm 3 from the paper * Updated to use custom LineDigraph methods */ private void createLineDigraphEdges( Graph graph, LineDigraph lineDigraph, Map> edgeToLineVertex) { Set visited = ConcurrentHashMap.newKeySet(); // Start DFS from a random vertex if graph is not empty if (!graph.vertexSet().isEmpty()) { V startVertex = graph.vertexSet().iterator().next(); createLineDigraphEdgesDFS(graph, lineDigraph, edgeToLineVertex, startVertex, null, visited); } } /** * DFS-based creation of line digraph edges (Algorithm 3 implementation) * Updated to use custom LineDigraph.addEdge method */ private void createLineDigraphEdgesDFS( Graph graph, LineDigraph lineDigraph, Map> edgeToLineVertex, V vertex, LineVertex prevLineVertex, Set visited) { visited.add(vertex); // Get outgoing edges from current vertex Set outgoingEdges = graph.outgoingEdgesOf(vertex); for (E edge : outgoingEdges) { V target = graph.getEdgeTarget(edge); LineVertex currentLineVertex = edgeToLineVertex.get(edge); // if currentLineVertex is null, skip processing // for this edge since it will result in an NPE if (currentLineVertex == null) { continue; } // Add edge from previous line vertex to current (if prev exists) if (prevLineVertex != null) { lineDigraph.addEdge(prevLineVertex, currentLineVertex); } if (!visited.contains(target)) { // Continue DFS createLineDigraphEdgesDFS(graph, lineDigraph, edgeToLineVertex, target, currentLineVertex, visited); } else { // Target is already visited - add edges to all line vertices originating from target graph.outgoingEdgesOf(target).stream() .map(edgeToLineVertex::get) .filter(Objects::nonNull) .forEach(targetLineVertex -> lineDigraph.addEdge(currentLineVertex, targetLineVertex)); } } } /** * Compute PageRank scores on the line digraph (Algorithm 4 implementation) * @param lineDigraph The line digraph * @return Map of line vertices to their PageRank scores */ private Map, Double> computePageRank(LineDigraph lineDigraph) { Set> vertices = lineDigraph.vertexSet(); int numVertices = vertices.size(); if (numVertices == 0) return new HashMap<>(); // Initialize PageRank scores Map, Double> currentScores = new ConcurrentHashMap<>(Math.max(16, (int) (numVertices / 0.75f) + 1)); final double initialScore = 1.0 / numVertices; // No lambdas here, so nothing captures a non-final variable for (LineVertex v : vertices) { currentScores.put(v, initialScore); } // Run PageRank iterations for (int iteration = 0; iteration < pageRankIterations; iteration++) { // Fresh map each iteration; pre-seed zeros so all vertices exist in the map ConcurrentMap, Double> newScores = new ConcurrentHashMap<>(currentScores.size()); for (LineVertex v : vertices) { newScores.put(v, 0.0); } // Do one iteration in parallel; lambdas only see method parameters (effectively final) applyOneIteration(vertices, lineDigraph, currentScores, newScores); // Swap for next iteration (this reassigns local variables, not captured by lambdas) currentScores = newScores; } return currentScores; } private void applyOneIteration( Set> vertices, LineDigraph lineDigraph, Map, Double> currentScores, ConcurrentMap, Double> newScores) { vertices.parallelStream().forEach(vertex -> { double score = currentScores.get(vertex); Set> outgoing = lineDigraph.getOutgoingNeighbors(vertex); if (outgoing.isEmpty()) { // Sink: keep score on itself newScores.merge(vertex, score, Double::sum); } else { double scorePerEdge = score / outgoing.size(); // Inner loop kept sequential: nested parallel often hurts more than it helps for (LineVertex target : outgoing) { newScores.merge(target, scorePerEdge, Double::sum); } } }); } /** * Find strongly connected components using Kosaraju's algorithm */ private List> findStronglyConnectedComponents(Graph graph) { KosarajuStrongConnectivityInspector inspector = new KosarajuStrongConnectivityInspector<>(graph); return inspector.stronglyConnectedSets(); } /** * Check if graph has cycles */ private boolean hasCycles(Graph graph) { CycleDetector detector = new CycleDetector<>(graph); return detector.detectCycles(); } /** * Create a copy of the graph */ private Graph createGraphCopy(Graph original) { Graph copy = new DefaultDirectedGraph<>(edgeClass); // Add vertices original.vertexSet().forEach(copy::addVertex); // Add edges original.edgeSet().forEach(edge -> { V source = original.getEdgeSource(edge); V target = original.getEdgeTarget(edge); copy.addEdge(source, target, edge); }); return copy; } /** * Create subgraph containing only specified vertices and their edges */ private Graph createSubgraph(Graph graph, Set vertices) { Graph subgraph = new DefaultDirectedGraph<>(edgeClass); // Add vertices vertices.forEach(subgraph::addVertex); // Add edges between vertices in the set graph.edgeSet().stream() .filter(edge -> vertices.contains(graph.getEdgeSource(edge)) && vertices.contains(graph.getEdgeTarget(edge))) .forEach(edge -> { V source = graph.getEdgeSource(edge); V target = graph.getEdgeTarget(edge); subgraph.addEdge(source, target, edge); }); // ConcurrenModificationException // at org.hjug.feedback.arc.pageRank.PageRankFAS.createSubgraph (PageRankFAS.java:302) // at org.hjug.feedback.arc.pageRank.PageRankFAS.processStronglyConnectedComponent (PageRankFAS.java:92) // at org.hjug.feedback.arc.pageRank.PageRankFAS.lambda$computeFeedbackArcSet$1 (PageRankFAS.java:71) return subgraph; } /** * Get detailed statistics about the algorithm execution * @return Map containing execution statistics */ public Map getExecutionStatistics(Graph graph) { Map stats = new HashMap<>(); stats.put("originalVertices", graph.vertexSet().size()); stats.put("originalEdges", graph.edgeSet().size()); stats.put("pageRankIterations", pageRankIterations); // Analyze SCCs List> sccs = findStronglyConnectedComponents(graph); stats.put("sccCount", sccs.size()); stats.put( "trivialSCCs", sccs.stream().mapToInt(scc -> scc.size() == 1 ? 1 : 0).sum()); stats.put( "nonTrivialSCCs", sccs.stream().mapToInt(scc -> scc.size() > 1 ? 1 : 0).sum()); // Find largest SCC int maxSCCSize = sccs.stream().mapToInt(Set::size).max().orElse(0); stats.put("largestSCCSize", maxSCCSize); return stats; } } /** * Represents a vertex in the line digraph (corresponds to an edge in original graph) */ class LineVertex { private final V source; private final V target; private final E originalEdge; public LineVertex(V source, V target, E originalEdge) { this.source = source; this.target = target; this.originalEdge = originalEdge; } public V getSource() { return source; } public V getTarget() { return target; } public E getOriginalEdge() { return originalEdge; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (!(obj instanceof LineVertex)) return false; LineVertex other = (LineVertex) obj; return Objects.equals(originalEdge, other.originalEdge); } @Override public int hashCode() { return Objects.hash(originalEdge); } @Override public String toString() { return String.format("LineVertex(%s->%s)", source, target); } } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/feedback/vertex/approximate/FeedbackVertexSetResult.java ================================================ package org.hjug.feedback.vertex.approximate; import java.util.Set; /** * Result container for the Feedback Vertex Set algorithm */ public class FeedbackVertexSetResult { private final Set feedbackVertices; public FeedbackVertexSetResult(Set feedbackVertices) { this.feedbackVertices = feedbackVertices; } public Set getFeedbackVertices() { return feedbackVertices; } public int size() { return feedbackVertices.size(); } @Override public String toString() { return String.format( "FeedbackVertexSetResult{vertices=%s, size=%d}", feedbackVertices, feedbackVertices.size()); } } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/feedback/vertex/approximate/FeedbackVertexSetSolver.java ================================================ package org.hjug.feedback.vertex.approximate; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import java.util.stream.Collectors; import org.jgrapht.Graph; import org.jgrapht.alg.connectivity.KosarajuStrongConnectivityInspector; import org.jgrapht.alg.interfaces.ShortestPathAlgorithm; import org.jgrapht.alg.interfaces.StrongConnectivityAlgorithm; import org.jgrapht.alg.shortestpath.DijkstraShortestPath; import org.jgrapht.graph.AsSubgraph; import org.jgrapht.graph.AsWeightedGraph; /** * Parallel implementation of the Feedback Vertex Set algorithm * Based on "Approximating Minimum Feedback Sets and Multicuts in Directed Graphs" * DOI:10.1007/PL00009191 * https://www.researchgate.net/publication/227278349_Approximating_Minimum_Feedback_Sets_and_Multicuts_in_Directed_Graphs * Generated by Perplexity.ai's Research model */ public class FeedbackVertexSetSolver { private final Graph graph; private final Set specialVertices; private final Map vertexWeights; private final Map fractionalSolution; private final double epsilon; private final ForkJoinPool forkJoinPool; public FeedbackVertexSetSolver( Graph graph, Set specialVertices, Map vertexWeights, double epsilon) { this.graph = graph; this.specialVertices = specialVertices != null ? specialVertices : new HashSet<>(graph.vertexSet()); this.vertexWeights = vertexWeights != null ? vertexWeights : createUniformWeights(); this.epsilon = epsilon; this.forkJoinPool = ForkJoinPool.commonPool(); this.fractionalSolution = computeFractionalSolution(); } /** * Creates uniform weights for all vertices when no weights are provided[3] */ private Map createUniformWeights() { Map weights = new ConcurrentHashMap<>(); graph.vertexSet().parallelStream().forEach(v -> weights.put(v, 1.0)); return weights; } /** * Computes the fractional solution using the combinatorial algorithm from the paper[1] */ private Map computeFractionalSolution() { Map y = new ConcurrentHashMap<>(); graph.vertexSet().parallelStream().forEach(v -> y.put(v, 0.0)); AtomicInteger iteration = new AtomicInteger(0); while (hasInterestingCycle()) { // Compute cycle counts for each vertex in parallel[9] Map cycleCounts = computeCycleCounts(); // Find vertex minimizing w(v)/f(v) using parallel streams[10] Optional minVertex = graph.vertexSet().parallelStream() .filter(v -> cycleCounts.getOrDefault(v, 0L) > 0) .min(Comparator.comparingDouble(v -> vertexWeights.get(v) / cycleCounts.get(v))); if (!minVertex.isPresent()) break; V vertex = minVertex.get(); double increment = vertexWeights.get(vertex) / cycleCounts.get(vertex); // Update fractional solution atomically y.compute(vertex, (k, val) -> Math.min(1.0, val + increment * (1 + epsilon))); iteration.incrementAndGet(); if (iteration.get() > graph.vertexSet().size() * 10) break; // Safety check } return y; } /** * Computes cycle counts for vertices using strongly connected components[9][12] */ private Map computeCycleCounts() { Map counts = new ConcurrentHashMap<>(); StrongConnectivityAlgorithm scAlg = new KosarajuStrongConnectivityInspector<>(graph); scAlg.stronglyConnectedSets().parallelStream() .filter(this::isInterestingComponent) .forEach(scc -> { scc.parallelStream().forEach(v -> counts.merge(v, 1L, Long::sum)); }); return counts; } /** * Checks if a strongly connected component contains special vertices and forms cycles[1] */ private boolean isInterestingComponent(Set scc) { boolean containsSpecial = scc.stream().anyMatch(specialVertices::contains); boolean hasCycle = scc.size() > 1 || (scc.size() == 1 && graph.containsEdge( scc.iterator().next(), scc.iterator().next())); return containsSpecial && hasCycle; } /** * Checks if the graph contains interesting cycles[1] */ private boolean hasInterestingCycle() { StrongConnectivityAlgorithm scAlg = new KosarajuStrongConnectivityInspector<>(graph); return scAlg.stronglyConnectedSets().parallelStream().anyMatch(this::isInterestingComponent); } /** * Main solving method implementing the recursive decomposition algorithm[1] */ public FeedbackVertexSetResult solve() { return solveRecursive(graph, specialVertices); } /** * Recursive solver using graph decomposition and parallel processing[1][25] */ private FeedbackVertexSetResult solveRecursive(Graph currentGraph, Set currentSpecial) { if (!hasInterestingCycleInSubgraph(currentGraph, currentSpecial)) { return new FeedbackVertexSetResult<>(new HashSet<>()); } // Select source vertex from special vertices V source = currentSpecial.iterator().next(); // Compute distances using transformed edge weights[20][21] Map distances = computeDistances(currentGraph, source); // Find all distinct distance values List distValues = distances.values().parallelStream().distinct().sorted().collect(Collectors.toList()); // Evaluate cut candidates in parallel[10] List> candidates = distValues.parallelStream() .map(dist -> evaluateCut(currentGraph, distances, dist)) .filter(Objects::nonNull) .collect(Collectors.toList()); if (candidates.isEmpty()) { // Fallback: select vertex with maximum degree Optional maxDegreeVertex = currentGraph.vertexSet().parallelStream() .max(Comparator.comparingInt(v -> currentGraph.inDegreeOf(v) + currentGraph.outDegreeOf(v))); if (maxDegreeVertex.isPresent()) { Set solution = new HashSet<>(); solution.add(maxDegreeVertex.get()); return new FeedbackVertexSetResult<>(solution); } return new FeedbackVertexSetResult<>(new HashSet<>()); } // Select best cut candidate CutCandidate bestCandidate = candidates.parallelStream() .min(Comparator.comparingDouble(c -> c.ratio)) .orElseThrow(); // Create subgraphs using AsSubgraph[24] Set leftVertices = createLeftPartition(currentGraph, distances, bestCandidate.distance); Set rightVertices = createRightPartition(currentGraph, distances, bestCandidate.distance); // Recursive solve using ForkJoinPool[25] CompletableFuture> leftFuture = CompletableFuture.supplyAsync( () -> { if (!leftVertices.isEmpty()) { Graph leftGraph = new AsSubgraph<>(currentGraph, leftVertices); Set leftSpecial = intersection(currentSpecial, leftVertices); return solveRecursive(leftGraph, leftSpecial); } return new FeedbackVertexSetResult<>(new HashSet<>()); }, forkJoinPool); CompletableFuture> rightFuture = CompletableFuture.supplyAsync( () -> { if (!rightVertices.isEmpty()) { Graph rightGraph = new AsSubgraph<>(currentGraph, rightVertices); Set rightSpecial = intersection(currentSpecial, rightVertices); return solveRecursive(rightGraph, rightSpecial); } return new FeedbackVertexSetResult<>(new HashSet<>()); }, forkJoinPool); // Combine results try { FeedbackVertexSetResult leftResult = leftFuture.get(); FeedbackVertexSetResult rightResult = rightFuture.get(); Set solution = new HashSet<>(bestCandidate.cut); solution.addAll(leftResult.getFeedbackVertices()); solution.addAll(rightResult.getFeedbackVertices()); return new FeedbackVertexSetResult<>(solution); } catch (InterruptedException | ExecutionException e) { Thread.currentThread().interrupt(); throw new RuntimeException("Parallel execution failed", e); } } /** * Computes shortest path distances using Dijkstra algorithm with transformed weights[20][26] */ private Map computeDistances(Graph graph, V source) { // Transform to weighted graph using fractional solution values[26] Function weightFunction = edge -> { V target = graph.getEdgeTarget(edge); return fractionalSolution.getOrDefault(target, 0.0); }; AsWeightedGraph weightedGraph = new AsWeightedGraph<>(graph, weightFunction, false, false); // Compute shortest paths using Dijkstra[20] DijkstraShortestPath dijkstra = new DijkstraShortestPath<>(weightedGraph); ShortestPathAlgorithm.SingleSourcePaths paths = dijkstra.getPaths(source); Map distances = new ConcurrentHashMap<>(); graph.vertexSet().parallelStream().forEach(v -> { double distance = paths.getWeight(v); if (Double.isInfinite(distance)) { distance = Double.MAX_VALUE; } distances.put(v, distance + fractionalSolution.getOrDefault(source, 0.0)); }); return distances; } /** * Evaluates a cut candidate based on the ratio of actual weight to fractional weight[1] */ private CutCandidate evaluateCut(Graph graph, Map distances, double cutDistance) { Set cut = graph.vertexSet().parallelStream() .filter(v -> Math.abs(distances.get(v) - cutDistance) < 1e-10) .collect(Collectors.toSet()); if (cut.isEmpty()) return null; double actualWeight = cut.parallelStream() .mapToDouble(v -> vertexWeights.getOrDefault(v, 1.0)) .sum(); double fractionalWeight = cut.parallelStream() .mapToDouble(v -> fractionalSolution.getOrDefault(v, 0.0)) .sum(); if (fractionalWeight <= 1e-10) return null; return new CutCandidate<>(cut, actualWeight / fractionalWeight, cutDistance); } /** * Creates left partition of vertices[1] */ private Set createLeftPartition(Graph graph, Map distances, double cutDistance) { return graph.vertexSet().parallelStream() .filter(v -> distances.get(v) < cutDistance - 1e-10) .collect(Collectors.toSet()); } /** * Creates right partition of vertices[1] */ private Set createRightPartition(Graph graph, Map distances, double cutDistance) { return graph.vertexSet().parallelStream() .filter(v -> distances.get(v) > cutDistance + 1e-10) .collect(Collectors.toSet()); } /** * Checks for interesting cycles in a subgraph[9] */ private boolean hasInterestingCycleInSubgraph(Graph subgraph, Set special) { if (subgraph.vertexSet().isEmpty()) return false; StrongConnectivityAlgorithm scAlg = new KosarajuStrongConnectivityInspector<>(subgraph); return scAlg.stronglyConnectedSets().parallelStream().anyMatch(scc -> { boolean containsSpecial = scc.stream().anyMatch(special::contains); boolean hasCycle = scc.size() > 1 || (scc.size() == 1 && subgraph.containsEdge( scc.iterator().next(), scc.iterator().next())); return containsSpecial && hasCycle; }); } /** * Computes intersection of two sets using parallel streams[10] */ private Set intersection(Set set1, Set set2) { return set1.parallelStream().filter(set2::contains).collect(Collectors.toSet()); } /** * Cut candidate data structure[1] */ private static class CutCandidate { final Set cut; final double ratio; final double distance; CutCandidate(Set cut, double ratio, double distance) { this.cut = cut; this.ratio = ratio; this.distance = distance; } } } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/feedback/vertex/kernelized/DIAGRAM.md ================================================ # Kernelized Directed Feedback Vertex Set (DFVS) Algorithm Based on: *"Wannabe Bounded Treewidth Graphs Admit a Polynomial Kernel for DFVS"* (Lokshtanov et al.) https://doi.org/10.1145/3711669 ## Class Architecture ```mermaid classDiagram direction TB class DirectedFeedbackVertexSetSolver~V,E~ { -Graph graph -Set~V~ modulator -Map vertexWeights -int eta -Set~V~ remainder -Map zones -Map kDfvsRepresentatives -int k +solve() DirectedFeedbackVertexSetResult +solve(int k) DirectedFeedbackVertexSetResult -computeZoneDecomposition(k) -computeKDfvsRepresentatives(k) -solveWithReductionRules(k) } class EnhancedParameterComputer~V,E~ { -TreewidthComputer treewidthComputer -FeedbackVertexSetComputer fvsComputer -ModulatorComputer modulatorComputer +computeOptimalParameters(graph, maxSize) EnhancedParameters +computeParameters(graph, modulator) EnhancedParameters } class ParameterComputer~V,E~ { -TreewidthComputer treewidthComputer -FeedbackVertexSetComputer fvsComputer +computeParameters(graph) Parameters +computeParametersWithOptimalModulator(graph, maxSize) Parameters } class FeedbackVertexSetComputer~V,E~ { +computeK(graph) int +greedyFeedbackVertexSet(graph) Set~V~ -stronglyConnectedComponentsBasedFVS(graph) Set~V~ -degreeBasedFeedbackVertexSet(graph) Set~V~ -localSearchFeedbackVertexSet(graph) Set~V~ } class TreewidthComputer~V,E~ { +computeEta(graph, modulator) int -minDegreeEliminationTreewidth(graph) int -fillInHeuristicTreewidth(graph) int -maxCliqueTreewidth(graph) int -greedyTriangulationTreewidth(graph) int } class ModulatorComputer~V,E~ { +computeModulator(graph, targetTw, maxSize) ModulatorResult -computeGreedyDegreeModulator() Set~V~ -computeFeedbackVertexSetModulator() Set~V~ -computeTreewidthDecompositionModulator() Set~V~ -computeHighDegreeVertexModulator() Set~V~ -computeBottleneckVertexModulator() Set~V~ } class DirectedFeedbackVertexSetResult~V~ { -Set~V~ feedbackVertices +getFeedbackVertices() Set~V~ +size() int } EnhancedParameterComputer --> TreewidthComputer : uses EnhancedParameterComputer --> FeedbackVertexSetComputer : uses EnhancedParameterComputer --> ModulatorComputer : uses ParameterComputer --> TreewidthComputer : uses ParameterComputer --> FeedbackVertexSetComputer : uses ModulatorComputer --> TreewidthComputer : uses ModulatorComputer --> FeedbackVertexSetComputer : uses DirectedFeedbackVertexSetSolver --> DirectedFeedbackVertexSetResult : produces ``` ## Algorithm Overview — Three-Phase Kernelization ```mermaid flowchart TD Start(["`**Input:** Directed graph G, modulator M, treewidth η, weights`"]) --> SCC SCC["`**Compute default k** Kosaraju SCC count as lower bound`"] SCC --> P1 subgraph P1["Phase 1 — Zone Decomposition"] direction TB P1A["`**Remove modulator** from graph G' = G ∖ M`"] P1A --> P1B["`**Compute minimal FVS** S in G' (greedy, up to k vertices)`"] P1B --> P1C{"|S| > k?"} P1C -- Yes --> NO_INST(["`**NO-instance** return empty`"]) P1C -- No --> P1D["`**Compute flow-blocker F** For each modulator pair (u,v): find min vertex cut ≤ k in G'`"] P1D --> P1E["`**Compute remainder R** R = S ∪ F Bound: |R| ≤ 2k(η+1)(|M|²+1)`"] P1E --> P1F["`**Partition into zones** Vertices not in M or R → connected components = zones`"] end P1 --> P2 subgraph P2["Phase 2 — k-DFVS Representative Marking"] direction TB P2A["`**For each zone Z** (in parallel):`"] P2A --> P2B["`Compute **SCCs** within zone subgraph`"] P2B --> P2C["`From each non-trivial SCC, select **highest-degree vertex** as representative`"] P2C --> P2D["`Bound representative size: |rep| ≤ (k · |M|)^(η²)`"] end P2 --> P3 subgraph P3["Phase 3 — Reduction Rules & Solve"] direction TB P3A["`**Apply Reduction Rules 5 & 6** For each zone:`"] P3A --> P3B["`Identify **non-representative** zone vertices`"] P3B --> P3C["`Remove edges between **modulator ↔ non-representative** vertices`"] P3C --> P3D["`**Add bypass edges** through representatives to preserve cycle structure`"] P3D --> P3E["`**Solve kernelized instance** Collect all representatives + high-degree remainder vertices`"] end P3 --> Result(["`**Output:** DirectedFeedbackVertexSetResult containing the DFVS`"]) style P1 fill:#1a3a5c,stroke:#4a9eff,color:#fff style P2 fill:#3a1a5c,stroke:#9a4aff,color:#fff style P3 fill:#5c3a1a,stroke:#ff9a4a,color:#fff style Start fill:#0d7377,stroke:#14ffec,color:#fff style Result fill:#0d7377,stroke:#14ffec,color:#fff style NO_INST fill:#7a1a1a,stroke:#ff4a4a,color:#fff ``` ## Bypass Edge Creation Detail ```mermaid flowchart TD BE_Start(["`Edge to remove: **source → target**`"]) --> M1 M1{"`**Method 1:** Find single representative R where source→R and R→target?`"} M1 -- Found --> M1A["`Add edges: source→R, R→target`"] M1A --> Done M1 -- Not found --> M2 M2{"`**Method 2:** Find chain of representatives via BFS source→R₁→…→Rₙ→target?`"} M2 -- Found --> M2A["`Add edges along bypass chain`"] M2A --> Done M2 -- Not found --> M3 M3["`**Method 3:** Minimal bypass Find sourceReachable ∩ reps Find targetReachable ∩ reps`"] M3 --> M3A{Same rep?} M3A -- Yes --> M3B["`source→rep→target`"] M3A -- No --> M3C["`source→srcRep→tgtRep→target`"] M3B --> Done M3C --> Done Done(["`Bypass complete *(rollback on failure)*`"]) style BE_Start fill:#0d7377,stroke:#14ffec,color:#fff style Done fill:#0d7377,stroke:#14ffec,color:#fff ``` ## Parameter Computation Pipeline ```mermaid flowchart LR G(["`**Input Graph G**`"]) --> PC subgraph PC["EnhancedParameterComputer"] direction TB FVS["`**FeedbackVertexSetComputer** 4 parallel algorithms: • Greedy max-degree • SCC-based • Degree-scored • Local search → min result = **k**`"] MC["`**ModulatorComputer** 5 parallel strategies: • Greedy degree • FVS-based • Treewidth decomposition • High-degree vertex • Bottleneck vertex → best modulator = **M**`"] TWC["`**TreewidthComputer** 4 parallel heuristics: • Min-degree elimination • Fill-in heuristic • Max-clique • Greedy triangulation → min result = **η**`"] FVS --> PARAMS MC --> TWC TWC --> PARAMS PARAMS["`**Parameters** k, M, η, quality`"] end PC --> SOLVER(["`**DirectedFeedbackVertexSetSolver** solve(k) with M and η`"]) style G fill:#0d7377,stroke:#14ffec,color:#fff style SOLVER fill:#0d7377,stroke:#14ffec,color:#fff style PC fill:#1a1a3a,stroke:#4a4aff,color:#fff ``` ## Key Concepts | Symbol | Meaning | |--------|---------| | **G** | Input directed graph | | **M** (modulator) | Set of vertices whose removal yields a bounded-treewidth graph | | **η** (eta) | Treewidth of G ∖ M (undirected) | | **k** | Size of the minimum directed feedback vertex set | | **S** | Minimal FVS of G ∖ M | | **F** | Flow-blocker — min vertex cuts between modulator pairs | | **R** | Remainder = S ∪ F | | **Zones** | Connected components of V ∖ (M ∪ R) | | **Representatives** | Highest-degree vertices from each non-trivial SCC per zone | | **Kernel bound** | (k · \|M\|)^O(η²) | ================================================ FILE: graph-algorithms/src/main/java/org/hjug/feedback/vertex/kernelized/DirectedFeedbackVertexSetResult.java ================================================ package org.hjug.feedback.vertex.kernelized; import java.util.Set; /** * Result container for the Directed Feedback Vertex Set algorithm[1] */ public class DirectedFeedbackVertexSetResult { private final Set feedbackVertices; public DirectedFeedbackVertexSetResult(Set feedbackVertices) { this.feedbackVertices = feedbackVertices; } public Set getFeedbackVertices() { return feedbackVertices; } public int size() { return feedbackVertices.size(); } @Override public String toString() { return String.format( "DirectedFeedbackVertexSetResult{vertices=%s, size=%d}", feedbackVertices, feedbackVertices.size()); } } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/feedback/vertex/kernelized/DirectedFeedbackVertexSetSolver.java ================================================ package org.hjug.feedback.vertex.kernelized; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import java.util.stream.Stream; import org.hjug.feedback.SuperTypeToken; import org.jgrapht.Graph; import org.jgrapht.alg.connectivity.KosarajuStrongConnectivityInspector; import org.jgrapht.alg.cycle.CycleDetector; import org.jgrapht.graph.AsSubgraph; import org.jgrapht.graph.DefaultDirectedGraph; /** * Parallel implementation of the Directed Feedback Vertex Set algorithm * Based on Lokshtanov et al. "Kernel for Directed Feedback Vertex Set" * Generated by Perplexity.ai's Research model * from paper "Wannabe Bounded Treewidth Graphs Admit a Polynomial Kernel for Directed Feedback Vertex Set" * ... * ... * */ public class DirectedFeedbackVertexSetSolver { private final Graph graph; private final Class edgeClass; private final Set modulator; private final Map vertexWeights; private final int eta; // Treewidth parameter private final ForkJoinPool forkJoinPool; // Zone decomposition components private Set remainder; private Map> zones; private Map, Set> kDfvsRepresentatives; private int k; public DirectedFeedbackVertexSetSolver( Graph graph, Set modulator, Map vertexWeights, int eta, SuperTypeToken edgeTypeToken) { this.graph = graph; this.modulator = modulator != null ? modulator : new HashSet<>(); this.vertexWeights = vertexWeights != null ? vertexWeights : createUniformWeights(); this.eta = eta; this.forkJoinPool = ForkJoinPool.commonPool(); this.zones = new ConcurrentHashMap<>(); this.kDfvsRepresentatives = new ConcurrentHashMap<>(); this.edgeClass = edgeTypeToken.getClassFromTypeToken(); } /** * Creates uniform weights for all vertices when no weights are provided[1] */ private Map createUniformWeights() { Map weights = new ConcurrentHashMap<>(); graph.vertexSet().parallelStream().forEach(v -> weights.put(v, 1.0)); return weights; } /** * Use # of Strongly Connected components as a default k value * SCC size is a lower bound of k (the lower the better) */ public DirectedFeedbackVertexSetResult solve() { KosarajuStrongConnectivityInspector kosaraju = new KosarajuStrongConnectivityInspector<>(graph); return solve(kosaraju.stronglyConnectedSets().size()); } /** * Main solving method implementing the three-phase kernelization algorithm[1] */ public DirectedFeedbackVertexSetResult solve(int k) { this.k = k; // Phase 1: Zone Decomposition computeZoneDecomposition(k); // Phase 2: k-DFVS Representative Marking computeKDfvsRepresentatives(k); // Phase 3: Apply Reduction Rules and Solve return solveWithReductionRules(k); } /** * Phase 1: Computes zone decomposition as described in Section 3[1] */ private void computeZoneDecomposition(int k) { // Compute solution S in graph without modulator Set graphWithoutModulator = graph.vertexSet().stream().filter(v -> !modulator.contains(v)).collect(Collectors.toSet()); Graph subgraph = new AsSubgraph<>(graph, graphWithoutModulator); Set solutionS = computeMinimalFeedbackVertexSet(subgraph, k); if (solutionS.size() > k) { // Instance is NO-instance this.remainder = new HashSet<>(); this.zones.clear(); return; } // Compute flow-blocker F using parallel processing[18] Set flowBlockerF = computeFlowBlocker(solutionS, k); // Compute LCA-closure to derive remainder R this.remainder = computeRemainder(solutionS, flowBlockerF, k); // Partition remaining vertices into zones[1] partitionIntoZones(); } /** * Computes flow-blocker F as described in Phase II of Section 3[1] */ private Set computeFlowBlocker(Set solutionS, int k) { Set flowBlocker = ConcurrentHashMap.newKeySet(); // For every ordered pair of vertices in modulator modulator.parallelStream().forEach(u -> { modulator.parallelStream().forEach(v -> { if (!u.equals(v) && !graph.containsEdge(u, v)) { Set minCut = computeMinimumVertexCut(u, v, solutionS, k); if (minCut.size() <= k) { flowBlocker.addAll(minCut); } } }); }); return flowBlocker; } /** * Computes minimum vertex cut between two vertices[1] */ private Set computeMinimumVertexCut(V source, V target, Set excludeSet, int k) { // Simplified implementation using max-flow approach Set cut = new HashSet<>(); // Use parallel BFS to find vertex cut Queue queue = new ConcurrentLinkedQueue<>(); Set visited = ConcurrentHashMap.newKeySet(); Map parent = new ConcurrentHashMap<>(); queue.offer(source); visited.add(source); while (!queue.isEmpty() && cut.size() <= k) { V current = queue.poll(); if (current.equals(target)) { // Reconstruct path and find bottleneck V node = target; while (!node.equals(source) && parent.containsKey(node)) { if (!modulator.contains(node) && !excludeSet.contains(node)) { cut.add(node); } node = parent.get(node); } break; } // Explore neighbors in parallel[18] graph.outgoingEdgesOf(current).parallelStream() .map(graph::getEdgeTarget) .filter(neighbor -> !visited.contains(neighbor)) .forEach(neighbor -> { if (visited.add(neighbor)) { parent.put(neighbor, current); queue.offer(neighbor); } }); } return cut; } /** * Computes remainder R using LCA-closure as described in Phase III[1] */ private Set computeRemainder(Set solutionS, Set flowBlockerF, int k) { Set remainder = new HashSet<>(solutionS); remainder.addAll(flowBlockerF); // Bound size according to Observation 2[1] int maxRemainderSize = 2 * k * (eta + 1) * (modulator.size() * modulator.size() + 1); if (remainder.size() > maxRemainderSize) { // Trim to most important vertices based on degree remainder = remainder.stream() .sorted(Comparator.comparingInt(v -> -(graph.inDegreeOf(v) + graph.outDegreeOf(v)))) .limit(maxRemainderSize) .collect(Collectors.toSet()); } return remainder; } /** * Partitions remaining vertices into zones[1] */ private void partitionIntoZones() { Set remainingVertices = graph.vertexSet().stream() .filter(v -> !modulator.contains(v) && !remainder.contains(v)) .collect(Collectors.toSet()); // Use connected components to partition into zones AtomicInteger zoneId = new AtomicInteger(0); Set processed = ConcurrentHashMap.newKeySet(); remainingVertices.parallelStream().forEach(vertex -> { if (!processed.contains(vertex)) { Set component = computeConnectedComponent(vertex, remainingVertices); component.forEach(processed::add); zones.put(zoneId.getAndIncrement(), component); } }); } /** * Computes connected component containing the given vertex */ private Set computeConnectedComponent(V startVertex, Set candidateVertices) { Set component = new HashSet<>(); Queue queue = new ArrayDeque<>(); queue.offer(startVertex); component.add(startVertex); while (!queue.isEmpty()) { V current = queue.poll(); // Add all adjacent vertices in candidate set graph.edgesOf(current).stream() .flatMap(edge -> Stream.of(graph.getEdgeSource(edge), graph.getEdgeTarget(edge))) .filter(candidateVertices::contains) .filter(v -> !component.contains(v)) .forEach(v -> { component.add(v); queue.offer(v); }); } return component; } /** * Phase 2: Computes k-DFVS representatives as described in Section 4[1] */ private void computeKDfvsRepresentatives(int k) { zones.entrySet().parallelStream().forEach(entry -> { Set zone = entry.getValue(); Set representative = computeKDfvsRepresentativeForZone(zone, k); kDfvsRepresentatives.put(zone, representative); }); } /** * Computes k-DFVS representative for a single zone using the important separators approach[1] */ private Set computeKDfvsRepresentativeForZone(Set zone, int k) { Set representative = ConcurrentHashMap.newKeySet(); // Compute strongly connected components in zone Graph zoneSubgraph = new AsSubgraph<>(graph, zone); KosarajuStrongConnectivityInspector sccInspector = new KosarajuStrongConnectivityInspector<>(zoneSubgraph); // For each non-trivial SCC, add important vertices to representative sccInspector.stronglyConnectedSets().parallelStream() .filter(scc -> scc.size() > 1 || hasSelfLoop(scc.iterator().next())) .forEach(scc -> { // Add vertices with highest degree from each SCC scc.stream() .max(Comparator.comparingInt(v -> graph.inDegreeOf(v) + graph.outDegreeOf(v))) .ifPresent(representative::add); }); // Bound size according to Lemma 4.2[1] int maxRepresentativeSize = (int) Math.pow(k * modulator.size(), eta * eta); if (representative.size() > maxRepresentativeSize) { return representative.stream() .sorted(Comparator.comparingDouble(v -> -vertexWeights.getOrDefault(v, 1.0))) .limit(maxRepresentativeSize) .collect(Collectors.toSet()); } return representative; } /** * Checks if a vertex has a self-loop */ private boolean hasSelfLoop(V vertex) { return graph.containsEdge(vertex, vertex); } /** * Phase 3: Applies reduction rules and solves the reduced instance[1] */ private DirectedFeedbackVertexSetResult solveWithReductionRules(int k) { Set feedbackVertexSet = ConcurrentHashMap.newKeySet(); // Apply reduction rules to limit interaction between modulator and zones applyReductionRules(); // Solve on the kernelized instance Set kernelSolution = solveKernelizedInstance(k); feedbackVertexSet.addAll(kernelSolution); return new DirectedFeedbackVertexSetResult<>(feedbackVertexSet); } /** * Applies reduction rules as described in Section 5[1] */ private void applyReductionRules() { // Apply rules to remove arcs between modulator and non-representative zone vertices kDfvsRepresentatives.entrySet().parallelStream().forEach(entry -> { Set zone = entry.getKey(); Set representative = entry.getValue(); Set nonRepresentative = zone.stream().filter(v -> !representative.contains(v)).collect(Collectors.toSet()); // Remove edges between modulator and non-representative vertices applyReductionRulesForZone(nonRepresentative, representative); }); } /** * Applies reduction rules for a specific zone */ private void applyReductionRulesForZone(Set nonRepresentative, Set representative) { // Reduction Rule 5 & 6: Remove arcs between modulator and non-representative vertices[1] nonRepresentative.parallelStream().forEach(vertex -> { modulator.parallelStream().forEach(modulatorVertex -> { // Remove incoming edges from modulator if (graph.containsEdge(modulatorVertex, vertex)) { // Mark for removal (in actual implementation, would remove) addBypassEdges(modulatorVertex, vertex, representative); } // Remove outgoing edges to modulator if (graph.containsEdge(vertex, modulatorVertex)) { // Mark for removal (in actual implementation, would remove) addBypassEdges(vertex, modulatorVertex, representative); } }); }); } /** * Adds bypass edges through representatives when removing direct edges[1] */ private void addBypassEdges(V source, V target, Set representatives) { if (source == null || target == null || representatives == null || representatives.isEmpty()) { return; } // Avoid self-loops and direct edges if (source.equals(target) || graph.containsEdge(source, target)) { return; } // Track added edges for potential rollback Set addedEdges = new HashSet<>(); boolean bypassAdded = false; try { // Method 1: Find a single representative that can serve as bypass Optional directBypass = representatives.parallelStream() .filter(rep -> !rep.equals(source) && !rep.equals(target)) .filter(rep -> hasPath(source, rep) && hasPath(rep, target)) .findFirst(); if (directBypass.isPresent()) { V rep = directBypass.get(); // Add edge from source to representative if not exists if (!graph.containsEdge(source, rep)) { E edge1 = graph.addEdge(source, rep); if (edge1 != null) { addedEdges.add(edge1); } } // Add edge from representative to target if not exists if (!graph.containsEdge(rep, target)) { E edge2 = graph.addEdge(rep, target); if (edge2 != null) { addedEdges.add(edge2); } } bypassAdded = true; } else { // Method 2: Find a chain of representatives that can form a bypass path List bypassChain = findBypassChain(source, target, representatives); if (!bypassChain.isEmpty()) { // Add edges along the bypass chain V current = source; for (V next : bypassChain) { if (!graph.containsEdge(current, next)) { E edge = graph.addEdge(current, next); if (edge != null) { addedEdges.add(edge); } } current = next; } // Add final edge to target if (!graph.containsEdge(current, target)) { E edge = graph.addEdge(current, target); if (edge != null) { addedEdges.add(edge); } } bypassAdded = true; } } // Method 3: If no direct bypass found, try to create minimal bypass structure if (!bypassAdded) { createMinimalBypass(source, target, representatives, addedEdges); } } catch (Exception e) { // Rollback any added edges on failure for (E edge : addedEdges) { try { graph.removeEdge(edge); } catch (Exception rollbackException) { // Log but don't throw - we're already in error handling } } // Optionally log the error or handle it based on your error handling strategy System.err.println("Failed to add bypass edges from " + source + " to " + target + ": " + e.getMessage()); } } /** * Finds a chain of representative vertices that can form a bypass path */ private List findBypassChain(V source, V target, Set representatives) { if (representatives.size() <= 1) { return Collections.emptyList(); } // Use BFS to find shortest chain through representatives Map predecessor = new HashMap<>(); Queue queue = new LinkedList<>(); Set visited = new HashSet<>(); // Start from representatives reachable from source for (V rep : representatives) { if (!rep.equals(source) && !rep.equals(target) && hasPath(source, rep)) { queue.offer(rep); visited.add(rep); predecessor.put(rep, null); // Mark as starting point } } // BFS through representatives while (!queue.isEmpty()) { V current = queue.poll(); // Check if we can reach target from current representative if (hasPath(current, target)) { // Reconstruct path List chain = new ArrayList<>(); V node = current; while (node != null) { chain.add(0, node); // Add to front to reverse order node = predecessor.get(node); } return chain; } // Explore adjacent representatives for (V nextRep : representatives) { if (!visited.contains(nextRep) && !nextRep.equals(current) && !nextRep.equals(source) && !nextRep.equals(target)) { if (hasPath(current, nextRep)) { queue.offer(nextRep); visited.add(nextRep); predecessor.put(nextRep, current); } } } } return Collections.emptyList(); } /** * Creates a minimal bypass structure when direct bypass is not available */ private void createMinimalBypass(V source, V target, Set representatives, Set addedEdges) { // Find representatives reachable from source Set sourceReachable = representatives.parallelStream() .filter(rep -> !rep.equals(source) && !rep.equals(target)) .filter(rep -> hasPath(source, rep)) .collect(Collectors.toSet()); // Find representatives that can reach target Set targetReachable = representatives.parallelStream() .filter(rep -> !rep.equals(source) && !rep.equals(target)) .filter(rep -> hasPath(rep, target)) .collect(Collectors.toSet()); if (sourceReachable.isEmpty() || targetReachable.isEmpty()) { return; } // Strategy: Connect source-reachable to target-reachable representatives V sourceRep = sourceReachable.iterator().next(); V targetRep = targetReachable.iterator().next(); // If they're the same representative, we have a complete bypass if (sourceRep.equals(targetRep)) { if (!graph.containsEdge(source, sourceRep)) { E edge1 = graph.addEdge(source, sourceRep); if (edge1 != null) { addedEdges.add(edge1); } } if (!graph.containsEdge(sourceRep, target)) { E edge2 = graph.addEdge(sourceRep, target); if (edge2 != null) { addedEdges.add(edge2); } } } else { // Connect through both representatives if (!graph.containsEdge(source, sourceRep)) { E edge1 = graph.addEdge(source, sourceRep); if (edge1 != null) { addedEdges.add(edge1); } } if (!graph.containsEdge(sourceRep, targetRep)) { E edge2 = graph.addEdge(sourceRep, targetRep); if (edge2 != null) { addedEdges.add(edge2); } } if (!graph.containsEdge(targetRep, target)) { E edge3 = graph.addEdge(targetRep, target); if (edge3 != null) { addedEdges.add(edge3); } } } } /** * Enhanced path checking with caching for better performance */ private final Map pathCache = new ConcurrentHashMap<>(); // updated implementation private boolean hasPath(V source, V target) { if (source.equals(target)) { return true; } // Use cache to avoid redundant path computations String cacheKey = source.toString() + "->" + target.toString(); return pathCache.computeIfAbsent(cacheKey, k -> { try { // Use DFS with depth limit to avoid infinite loops in cyclic graphs return hasPathDFS(source, target, new HashSet<>(), MAX_PATH_LENGTH); } catch (Exception e) { return false; } }); } private boolean hasPathDFS(V source, V target, Set visited, int maxDepth) { if (maxDepth <= 0) { return false; } if (source.equals(target)) { return true; } if (visited.contains(source)) { return false; } visited.add(source); try { for (E edge : graph.outgoingEdgesOf(source)) { V neighbor = graph.getEdgeTarget(edge); if (hasPathDFS(neighbor, target, new HashSet<>(visited), maxDepth - 1)) { return true; } } } catch (Exception e) { // Handle case where vertex might have been removed return false; } finally { visited.remove(source); } return false; } /** * Clears the path cache when graph structure changes significantly */ private void clearPathCache() { pathCache.clear(); } /** * Validates the bypass edges to ensure they don't create unwanted cycles */ private boolean validateBypassEdges(V source, V target, Set representatives) { // Check if adding bypass would create problematic cycles // This is a simplified check - in practice, might need more sophisticated validation for (V rep : representatives) { if (hasPath(target, rep) && hasPath(rep, source)) { // Adding bypass through this representative would create a cycle // involving source -> rep -> target -> ... -> rep -> source return false; } } return true; } /** * Alternative implementation that respects the kernelization structure from the paper */ private void addBypassEdgesKernelized(V source, V target, Set representatives) { // This follows the reduction rules from Section 5.1 of the paper // Specifically implements Reduction Rules 1, 3, and 4 if (!validateBypassEdges(source, target, representatives)) { return; } // Find paths through zone representatives (following the paper's zone decomposition) for (V representative : representatives) { if (representative.equals(source) || representative.equals(target)) { continue; } // Check if there's a path from source to representative and representative to target // where all internal vertices are in the same zone (Z\ΓDFVS from the paper) if (hasPathThroughZone(source, representative) && hasPathThroughZone(representative, target)) { // Add bypass edges as per Reduction Rule 1 if (!graph.containsEdge(source, representative)) { graph.addEdge(source, representative); } if (!graph.containsEdge(representative, target)) { graph.addEdge(representative, target); } break; // One bypass is sufficient } } } /** * Checks if there's a path through the same zone (implements zone-aware path checking) */ private boolean hasPathThroughZone(V source, V target) { // Simplified implementation - in practice, would need to track zone membership return hasPath(source, target); } /** * Checks if there's a path between two vertices * original implementation */ /*private boolean hasPath(V source, V target) { if (source.equals(target)) return true; Set visited = new HashSet<>(); Queue queue = new ArrayDeque<>(); queue.offer(source); visited.add(source); while (!queue.isEmpty()) { V current = queue.poll(); for (E edge : graph.outgoingEdgesOf(current)) { V neighbor = graph.getEdgeTarget(edge); if (neighbor.equals(target)) return true; if (!visited.contains(neighbor)) { visited.add(neighbor); queue.offer(neighbor); } } } return false; }*/ /** * Solves the kernelized instance using parallel processing[18] */ private Set solveKernelizedInstance(int k) { Set solution = ConcurrentHashMap.newKeySet(); // Add all representatives to solution (simplified approach) kDfvsRepresentatives.values().parallelStream().forEach(solution::addAll); // Add high-degree vertices from remainder if needed if (solution.size() < k) { remainder.stream() .sorted(Comparator.comparingInt(v -> -(graph.inDegreeOf(v) + graph.outDegreeOf(v)))) .limit(k - solution.size()) .forEach(solution::add); } return solution; } /** * Computes minimal feedback vertex set for a subgraph */ private Set computeMinimalFeedbackVertexSet(Graph subgraph, int k) { Set feedbackSet = new HashSet<>(); CycleDetector cycleDetector = new CycleDetector<>(subgraph); // Greedy approach: remove vertices with highest degree until acyclic Graph workingGraph = new DefaultDirectedGraph<>(edgeClass); subgraph.vertexSet().forEach(workingGraph::addVertex); subgraph.edgeSet().forEach(edge -> { V source = subgraph.getEdgeSource(edge); V target = subgraph.getEdgeTarget(edge); workingGraph.addEdge(source, target); }); while (cycleDetector.detectCycles() && feedbackSet.size() < k) { // Find vertex with highest degree in remaining graph V maxDegreeVertex = workingGraph.vertexSet().stream() .max(Comparator.comparingInt(v -> workingGraph.inDegreeOf(v) + workingGraph.outDegreeOf(v))) .orElse(null); if (maxDegreeVertex != null) { feedbackSet.add(maxDegreeVertex); workingGraph.removeVertex(maxDegreeVertex); cycleDetector = new CycleDetector<>(workingGraph); } else { break; } } return feedbackSet; } /* * Code to CALCULATE MAX_PATH_LENGTH is below * May not be necessary. * Not currently used - causes NPEs */ /** * Computes the maximum path length for path-finding operations in the DFVS solver. * This value is used to prevent infinite loops in cyclic graphs and to bound the * computational complexity of path-checking operations. * * The value is computed based on: * 1. Graph size (number of vertices) * 2. Parameter k (solution size) * 3. Treewidth considerations from the kernelization algorithm * 4. Theoretical bounds from the paper * * @return the maximum path length to use in DFS and path-checking operations */ private int computeMaxPathLength() { int n = graph.vertexSet().size(); // Base case: very small graphs if (n <= 1) { return 1; } // For empty or trivial cases if (k <= 0) { return Math.min(n, 10); } // Theoretical considerations from the paper: // - The kernelization algorithm produces graphs of size (k*ℓ)^O(η²) // - In practice, meaningful paths for cycle detection are much shorter // - We need to balance completeness with performance // Method 1: Based on graph density and structure int densityBasedLimit = computeDensityBasedLimit(n); // Method 2: Based on parameter k and theoretical bounds int parameterBasedLimit = computeParameterBasedLimit(k, n); // Method 3: Based on strongly connected components int sccBasedLimit = computeSCCBasedLimit(n); // Method 4: Based on treewidth considerations (if available) int treewidthBasedLimit = computeTreewidthBasedLimit(n, k); // Take the minimum of all approaches to ensure efficiency int computedLimit = Math.min( Math.min(densityBasedLimit, parameterBasedLimit), Math.min(sccBasedLimit, treewidthBasedLimit)); // Apply safety bounds int minLimit = Math.max(k + 1, 5); // At least k+1 for meaningful cycle detection int maxLimit = Math.min(n, 1000); // Never exceed graph size or reasonable upper bound return Math.max(minLimit, Math.min(computedLimit, maxLimit)); } /** * Computes path length limit based on graph density */ private int computeDensityBasedLimit(int n) { int m = graph.edgeSet().size(); if (n <= 1) return 1; // Density = m / (n * (n-1)) for directed graphs double density = (double) m / (n * (n - 1)); if (density > 0.5) { // Dense graph: shorter paths are sufficient return Math.min(n / 2, 50); } else if (density > 0.1) { // Medium density return Math.min(2 * n / 3, 100); } else { // Sparse graph: may need longer paths return Math.min(n, 200); } } /** * Computes path length limit based on parameter k and theoretical bounds */ private int computeParameterBasedLimit(int k, int n) { // From the paper: after kernelization, meaningful structures are bounded // In practice, cycles in minimal feedback vertex set problems are often short if (k >= n / 2) { // Large k relative to n: graph is almost acyclic return Math.min(n, 20); } // Heuristic: paths longer than O(k * log n) are unlikely to be critical // This is based on the observation that feedback vertex sets create // a bounded structure in the remaining graph int theoreticalLimit = k * (int) Math.ceil(Math.log(n + 1) / Math.log(2)); return Math.min(theoreticalLimit + k, n); } /** * Computes path length limit based on strongly connected component analysis */ private int computeSCCBasedLimit(int n) { // Quick heuristic: if we can detect SCC structure efficiently try { // Estimate SCC sizes - in well-structured graphs, large SCCs are rare // This is a simplified version - could be made more sophisticated Set> sccs = estimateStronglyConnectedComponents(); if (sccs.isEmpty()) { return Math.min(n, 10); // Likely acyclic } int maxSCCSize = sccs.stream().mapToInt(Set::size).max().orElse(1); // Path length should be at most twice the largest SCC size return Math.min(2 * maxSCCSize, n); } catch (Exception e) { // Fallback if SCC analysis fails return Math.min(n / 2, 100); } } /** * Computes path length limit based on treewidth considerations */ private int computeTreewidthBasedLimit(int n, int k) { // From the paper: the algorithm works with treewidth-η modulators // Graphs with small treewidth have bounded path lengths for meaningful cycles // Heuristic estimation of effective treewidth influence // In practice, graphs arising in DFVS often have some tree-like structure if (k == 0) { return 1; // Graph should be acyclic } // Conservative estimate: assume moderate treewidth // Path lengths in bounded-treewidth graphs are typically small int treewidthEstimate = Math.min(k + 3, (int) Math.sqrt(n)); // Bound based on treewidth: paths in tree-decomposition are limited return Math.min(n, 3 * treewidthEstimate + k); } /** * Fast estimation of strongly connected components for path length computation */ private Set> estimateStronglyConnectedComponents() { // Simplified SCC detection for bound computation // This is a heuristic - not a complete SCC algorithm Set> sccs = new HashSet<>(); Set visited = new HashSet<>(); for (V vertex : graph.vertexSet()) { if (!visited.contains(vertex)) { Set component = new HashSet<>(); // Simple reachability check within reasonable bounds exploreComponent( vertex, component, visited, 0, Math.min(20, graph.vertexSet().size())); if (component.size() > 1) { sccs.add(component); } } } return sccs; } /** * Helper method for component exploration with depth limit */ private void exploreComponent(V vertex, Set component, Set visited, int depth, int maxDepth) { if (depth >= maxDepth || visited.contains(vertex)) { return; } visited.add(vertex); component.add(vertex); try { for (E edge : graph.outgoingEdgesOf(vertex)) { V neighbor = graph.getEdgeTarget(edge); if (!visited.contains(neighbor)) { exploreComponent(neighbor, component, visited, depth + 1, maxDepth); } } } catch (Exception e) { // Handle potential graph modification during traversal } } /** * Static method to get a reasonable default MAX_PATH_LENGTH * when graph context is not available */ public static int getDefaultMaxPathLength() { return 50; // Conservative default for most practical cases } /** * Adaptive method that updates MAX_PATH_LENGTH based on runtime performance */ private int getAdaptiveMaxPathLength() { // Start with computed value int baseLimit = computeMaxPathLength(); // Adjust based on previous performance if tracking is enabled if (pathComputationStats != null && pathComputationStats.getAverageTime() > 0) { double avgTime = pathComputationStats.getAverageTime(); if (avgTime > 100) { // ms - too slow return Math.max(baseLimit / 2, 10); } else if (avgTime < 10) { // ms - can afford larger limit return Math.min(baseLimit * 2, graph.vertexSet().size()); } } return baseLimit; } /** * Context-aware MAX_PATH_LENGTH computation * This version considers the specific operation being performed */ private int getContextAwareMaxPathLength(PathContext context) { int baseLimit = computeMaxPathLength(); switch (context) { case CYCLE_DETECTION: // Cycle detection needs sufficient depth but can be more conservative return Math.min(baseLimit, graph.vertexSet().size() / 2); case BYPASS_CREATION: // Bypass creation might need shorter paths for efficiency return Math.min(baseLimit / 2, 20); case SOLUTION_VERIFICATION: // Verification should be thorough but bounded return Math.min(baseLimit, 100); case REPRESENTATIVE_COMPUTATION: // Representative computation from the paper - can use larger bounds return baseLimit; default: return baseLimit; } } /** * Enum for different path computation contexts */ private enum PathContext { CYCLE_DETECTION, BYPASS_CREATION, SOLUTION_VERIFICATION, REPRESENTATIVE_COMPUTATION } /** * Simple performance tracking for adaptive behavior */ private static class PathComputationStats { private long totalTime = 0; private int callCount = 0; public void recordTime(long time) { totalTime += time; callCount++; } public double getAverageTime() { return callCount > 0 ? (double) totalTime / callCount : 0; } } // Instance variable for tracking performance (optional) private PathComputationStats pathComputationStats = new PathComputationStats(); /** * Main method to get MAX_PATH_LENGTH - delegates to appropriate implementation */ private int getMaxPathLength() { return getAdaptiveMaxPathLength(); } // Constant declaration that uses the computed value // private final int MAX_PATH_LENGTH = computeMaxPathLength(); // set to constant for now - computeMaxPathLength() causes NPEs private final int MAX_PATH_LENGTH = 10; } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/feedback/vertex/kernelized/EnhancedParameterComputer.java ================================================ package org.hjug.feedback.vertex.kernelized; import java.util.*; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.hjug.feedback.SuperTypeToken; import org.jgrapht.Graph; /** * Enhanced parameter computer with integrated modulator calculation * Generated by Perplexity.ai's Research model */ public class EnhancedParameterComputer { private final TreewidthComputer treewidthComputer; private final FeedbackVertexSetComputer fvsComputer; private final ModulatorComputer modulatorComputer; private final ExecutorService executorService; public EnhancedParameterComputer(SuperTypeToken edgeTypeToken) { this.treewidthComputer = new TreewidthComputer<>(); this.fvsComputer = new FeedbackVertexSetComputer<>(edgeTypeToken); this.modulatorComputer = new ModulatorComputer<>(edgeTypeToken); this.executorService = Executors.newWorkStealingPool(); } public EnhancedParameterComputer(SuperTypeToken edgeTypeToken, int parallelismLevel) { this.treewidthComputer = new TreewidthComputer<>(parallelismLevel); this.fvsComputer = new FeedbackVertexSetComputer<>(edgeTypeToken, parallelismLevel); this.modulatorComputer = new ModulatorComputer<>(edgeTypeToken, parallelismLevel); this.executorService = Executors.newWorkStealingPool(parallelismLevel); } /** * Computes parameters with automatic modulator optimization */ public EnhancedParameters computeOptimalParameters(Graph graph, int maxModulatorSize) { return computeOptimalParameters(graph, maxModulatorSize, 3); // Default target treewidth } /** * Computes parameters with specific target treewidth */ public EnhancedParameters computeOptimalParameters( Graph graph, int maxModulatorSize, int targetTreewidth) { // Compute k (feedback vertex set size) - this doesn't depend on modulator CompletableFuture kFuture = CompletableFuture.supplyAsync(() -> fvsComputer.computeK(graph), executorService); // Compute optimal modulator CompletableFuture> modulatorFuture = CompletableFuture.supplyAsync( () -> modulatorComputer.computeModulator(graph, targetTreewidth, maxModulatorSize), executorService); // Wait for both computations try { int k = kFuture.get(); ModulatorComputer.ModulatorResult modulatorResult = modulatorFuture.get(); return new EnhancedParameters<>( k, modulatorResult.getModulator(), modulatorResult.getResultingTreewidth(), modulatorResult.getQualityScore()); } catch (Exception e) { throw new RuntimeException("Parameter computation failed", e); } } /** * Computes parameters with given modulator */ public EnhancedParameters computeParameters(Graph graph, Set modulator) { int k = fvsComputer.computeK(graph); int eta = treewidthComputer.computeEta(graph, modulator); double quality = computeParameterQuality(k, modulator.size(), eta); return new EnhancedParameters<>(k, modulator, eta, quality); } /** * Finds multiple good modulators and returns the best parameters */ public List> computeMultipleParameterOptions( Graph graph, int maxModulatorSize, int numOptions) { List>> futures = new ArrayList<>(); // Try different target treewidths for (int targetTreewidth = 1; targetTreewidth <= Math.min(5, maxModulatorSize); targetTreewidth++) { final int tw = targetTreewidth; futures.add(CompletableFuture.supplyAsync( () -> computeOptimalParameters(graph, maxModulatorSize, tw), executorService)); } // Try different modulator size limits for (int maxSize = Math.min(3, maxModulatorSize); maxSize <= maxModulatorSize; maxSize += Math.max(1, maxModulatorSize / 4)) { final int size = maxSize; futures.add(CompletableFuture.supplyAsync(() -> computeOptimalParameters(graph, size, 3), executorService)); } return futures.stream() .map(CompletableFuture::join) .distinct() .sorted((p1, p2) -> Double.compare(p1.getQualityScore(), p2.getQualityScore())) .limit(numOptions) .collect(java.util.stream.Collectors.toList()); } /** * Validates that a modulator actually achieves the desired treewidth */ public boolean validateModulator(Graph graph, Set modulator, int targetTreewidth) { int actualTreewidth = treewidthComputer.computeEta(graph, modulator); return actualTreewidth <= targetTreewidth; } /** * Computes parameter quality score */ private double computeParameterQuality(int k, int modulatorSize, int eta) { // Lower is better: prioritize small k, then small modulator, then small eta return k * 10.0 + modulatorSize * 5.0 + eta * 1.0; } public void shutdown() { treewidthComputer.shutdown(); fvsComputer.shutdown(); modulatorComputer.shutdown(); if (executorService != null && !executorService.isShutdown()) { executorService.shutdown(); } } /** * Enhanced parameters container with modulator information */ public static class EnhancedParameters { private final int k; // feedback vertex set size private final Set modulator; // treewidth modulator private final int eta; // treewidth after modulator removal private final double qualityScore; // overall quality score public EnhancedParameters(int k, Set modulator, int eta, double qualityScore) { this.k = k; this.modulator = new HashSet<>(modulator); this.eta = eta; this.qualityScore = qualityScore; } public int getK() { return k; } public Set getModulator() { return new HashSet<>(modulator); } public int getModulatorSize() { return modulator.size(); } public int getEta() { return eta; } public double getQualityScore() { return qualityScore; } /** * Total parameter for the DFVS kernelization: k + ℓ */ public int getTotalParameter() { return k + modulator.size(); } /** * Kernel size bound: (k·ℓ)^O(η²) */ public double getKernelSizeBound() { if (k == 0 || modulator.size() == 0) return 1.0; return Math.pow(k * modulator.size(), eta * eta); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (!(obj instanceof EnhancedParameters)) return false; EnhancedParameters other = (EnhancedParameters) obj; return k == other.k && eta == other.eta && modulator.equals(other.modulator); } @Override public int hashCode() { return Objects.hash(k, modulator, eta); } @Override public String toString() { return String.format( "EnhancedParameters{k=%d, |M|=%d, η=%d, quality=%.2f, kernelBound=%.0f}", k, modulator.size(), eta, qualityScore, getKernelSizeBound()); } } } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/feedback/vertex/kernelized/FeedbackVertexSetComputer.java ================================================ package org.hjug.feedback.vertex.kernelized; import java.util.*; import java.util.concurrent.*; import java.util.stream.Collectors; import org.hjug.feedback.SuperTypeToken; import org.jgrapht.Graph; import org.jgrapht.alg.connectivity.KosarajuStrongConnectivityInspector; import org.jgrapht.alg.cycle.CycleDetector; import org.jgrapht.graph.DefaultDirectedGraph; /** * Multithreaded feedback vertex set computer implementing multiple algorithms * for approximating minimum directed feedback vertex sets. * Generated by Perplexity.ai's Research model */ public class FeedbackVertexSetComputer { private final Class edgeClass; private final ExecutorService executorService; private final Map, Set> greedyFeedbackVertexSetCache; public FeedbackVertexSetComputer(SuperTypeToken edgeTypeToken) { this.edgeClass = edgeTypeToken.getClassFromTypeToken(); this.executorService = ForkJoinPool.commonPool(); this.greedyFeedbackVertexSetCache = new ConcurrentHashMap<>(); } public FeedbackVertexSetComputer(SuperTypeToken edgeTypeToken, int parallelismLevel) { this.edgeClass = edgeTypeToken.getClassFromTypeToken(); this.executorService = Executors.newWorkStealingPool(parallelismLevel); this.greedyFeedbackVertexSetCache = new ConcurrentHashMap<>(); } /** * Computes k: the size of minimum directed feedback vertex set */ public int computeK(Graph graph) { if (!hasCycles(graph)) { return 0; } // Run multiple approximation algorithms in parallel List>> algorithms = Arrays.asList( () -> greedyFeedbackVertexSet(graph), () -> stronglyConnectedComponentsBasedFVS(graph), () -> degreeBasedFeedbackVertexSet(graph), () -> localSearchFeedbackVertexSet(graph)); try { List>> results = executorService.invokeAll(algorithms, 60, TimeUnit.SECONDS); return results.parallelStream() .map(this::getFutureValue) .filter(Objects::nonNull) .filter(fvs -> isValidFeedbackVertexSet(graph, fvs)) .mapToInt(Set::size) .min() .orElse(computeFallbackK(graph)); } catch (InterruptedException e) { Thread.currentThread().interrupt(); return computeFallbackK(graph); } } /** * Greedy feedback vertex set algorithm */ Set greedyFeedbackVertexSet(Graph graph) { return greedyFeedbackVertexSetCache.computeIfAbsent(graph, g -> { Set feedbackSet = ConcurrentHashMap.newKeySet(); Graph workingGraph = copyGraph(g); while (hasCycles(workingGraph)) { // Find vertex with maximum degree in current SCCs V maxDegreeVertex = findVertexInCyclesWithMaxDegree(workingGraph); if (maxDegreeVertex == null) break; feedbackSet.add(maxDegreeVertex); workingGraph.removeVertex(maxDegreeVertex); } return feedbackSet; }); } /** * SCC-based feedback vertex set algorithm */ private Set stronglyConnectedComponentsBasedFVS(Graph graph) { Set feedbackSet = ConcurrentHashMap.newKeySet(); Graph workingGraph = copyGraph(graph); while (hasCycles(workingGraph)) { KosarajuStrongConnectivityInspector inspector = new KosarajuStrongConnectivityInspector<>(workingGraph); List> sccs = inspector.stronglyConnectedSets(); // Process non-trivial SCCs in parallel Optional vertexToRemove = sccs.parallelStream() .filter(scc -> scc.size() > 1) .flatMap(Collection::stream) .max(Comparator.comparingInt(v -> workingGraph.inDegreeOf(v) + workingGraph.outDegreeOf(v))); if (vertexToRemove.isPresent()) { V vertex = vertexToRemove.get(); feedbackSet.add(vertex); workingGraph.removeVertex(vertex); } else { break; } } return feedbackSet; } /** * Degree-based feedback vertex set algorithm */ private Set degreeBasedFeedbackVertexSet(Graph graph) { Set feedbackSet = ConcurrentHashMap.newKeySet(); Graph workingGraph = copyGraph(graph); while (hasCycles(workingGraph)) { // Calculate degree scores in parallel Map degreeScores = workingGraph.vertexSet().parallelStream() .collect(Collectors.toConcurrentMap(v -> v, v -> calculateDegreeScore(workingGraph, v))); Optional bestVertex = degreeScores.entrySet().parallelStream() .filter(entry -> entry.getValue() > 0) .max(Map.Entry.comparingByValue()) .map(Map.Entry::getKey); if (bestVertex.isPresent()) { V vertex = bestVertex.get(); feedbackSet.add(vertex); workingGraph.removeVertex(vertex); } else { break; } } return feedbackSet; } /** * Local search improvement for feedback vertex set */ private Set localSearchFeedbackVertexSet(Graph graph) { Set currentSolution = greedyFeedbackVertexSet(graph); boolean improved = true; int maxIterations = 100; int iteration = 0; while (improved && iteration < maxIterations) { improved = false; iteration++; // Try to improve by removing and adding vertices for (V vertex : new HashSet<>(currentSolution)) { Set candidateSolution = new HashSet<>(currentSolution); candidateSolution.remove(vertex); if (isValidFeedbackVertexSet(graph, candidateSolution)) { currentSolution = candidateSolution; improved = true; break; } // Try swapping with non-solution vertices for (V replacement : graph.vertexSet()) { if (!currentSolution.contains(replacement)) { Set swapSolution = new HashSet<>(currentSolution); swapSolution.remove(vertex); swapSolution.add(replacement); if (isValidFeedbackVertexSet(graph, swapSolution) && swapSolution.size() < currentSolution.size()) { currentSolution = swapSolution; improved = true; break; } } } if (improved) break; } } return currentSolution; } /** * Finds vertex in cycles with maximum degree */ private V findVertexInCyclesWithMaxDegree(Graph graph) { KosarajuStrongConnectivityInspector inspector = new KosarajuStrongConnectivityInspector<>(graph); return inspector.stronglyConnectedSets().parallelStream() .filter(scc -> scc.size() > 1 || hasSelfLoop(graph, scc.iterator().next())) .flatMap(Collection::stream) .max(Comparator.comparingInt(v -> graph.inDegreeOf(v) + graph.outDegreeOf(v))) .orElse(null); } /** * Calculates degree-based score for vertex selection */ private double calculateDegreeScore(Graph graph, V vertex) { int inDegree = graph.inDegreeOf(vertex); int outDegree = graph.outDegreeOf(vertex); // Check if vertex is in any SCC with size > 1 KosarajuStrongConnectivityInspector inspector = new KosarajuStrongConnectivityInspector<>(graph); boolean inNonTrivialSCC = inspector.stronglyConnectedSets().stream().anyMatch(scc -> scc.size() > 1 && scc.contains(vertex)); if (!inNonTrivialSCC && !hasSelfLoop(graph, vertex)) { return 0.0; // Not in any cycle } return (inDegree + outDegree) + (inDegree * outDegree * 0.5) + (hasSelfLoop(graph, vertex) ? 1.0 : 0.0); } /** * Checks if a vertex has a self-loop */ private boolean hasSelfLoop(Graph graph, V vertex) { return graph.containsEdge(vertex, vertex); } /** * Checks if the graph has cycles */ private boolean hasCycles(Graph graph) { CycleDetector detector = new CycleDetector<>(graph); return detector.detectCycles(); } /** * Validates if a set is a feedback vertex set */ private boolean isValidFeedbackVertexSet(Graph graph, Set feedbackSet) { Graph testGraph = copyGraph(graph); feedbackSet.forEach(testGraph::removeVertex); return !hasCycles(testGraph); } /** * Creates a copy of the graph */ @SuppressWarnings("unchecked") private Graph copyGraph(Graph original) { // TODO: consider using SparseIntDirectedGraph to improve copy performance Graph copy = new DefaultDirectedGraph<>(edgeClass); // Add vertices original.vertexSet().forEach(copy::addVertex); // Add edges original.edgeSet().forEach(edge -> { V source = original.getEdgeSource(edge); V target = original.getEdgeTarget(edge); // adding a large number of edges takes time copy.addEdge(source, target); }); return copy; } /** * Fallback computation for k */ private int computeFallbackK(Graph graph) { // Simple fallback: count self-loops + rough estimate long selfLoops = graph.vertexSet().parallelStream() .filter(v -> graph.containsEdge(v, v)) .count(); KosarajuStrongConnectivityInspector inspector = new KosarajuStrongConnectivityInspector<>(graph); long nonTrivialSCCs = inspector.stronglyConnectedSets().parallelStream() .filter(scc -> scc.size() > 1) .count(); return (int) Math.max(1, selfLoops + Math.max(1, nonTrivialSCCs / 2)); } private Set getFutureValue(Future> future) { try { return future.get(); } catch (Exception e) { return null; } } public void shutdown() { if (executorService != null && !executorService.isShutdown()) { executorService.shutdown(); } } } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/feedback/vertex/kernelized/ModulatorComputer.java ================================================ package org.hjug.feedback.vertex.kernelized; import com.google.common.util.concurrent.AtomicDouble; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.hjug.feedback.SuperTypeToken; import org.jgrapht.Graph; import org.jgrapht.Graphs; import org.jgrapht.alg.connectivity.ConnectivityInspector; import org.jgrapht.graph.DefaultEdge; import org.jgrapht.graph.DefaultUndirectedGraph; /** * Multithreaded modulator computer that finds treewidth-η modulators * based on the algorithms described in the DFVS paper. * Generated by Perplexity.ai's Research model */ public class ModulatorComputer { private final TreewidthComputer treewidthComputer; private final FeedbackVertexSetComputer fvsComputer; private final ExecutorService executorService; private final Map, Map> betweennessCentralityCache; public ModulatorComputer(SuperTypeToken edgeTypeToken) { this.treewidthComputer = new TreewidthComputer<>(); this.fvsComputer = new FeedbackVertexSetComputer<>(edgeTypeToken); this.executorService = ForkJoinPool.commonPool(); this.betweennessCentralityCache = new ConcurrentHashMap<>(); } public ModulatorComputer(SuperTypeToken edgeTypeToken, int parallelismLevel) { this.treewidthComputer = new TreewidthComputer<>(parallelismLevel); this.fvsComputer = new FeedbackVertexSetComputer<>(edgeTypeToken, parallelismLevel); this.executorService = Executors.newWorkStealingPool(parallelismLevel); this.betweennessCentralityCache = new ConcurrentHashMap<>(); } /** * Computes an optimal treewidth-η modulator using multiple strategies */ public ModulatorResult computeModulator(Graph graph, int targetTreewidth, int maxModulatorSize) { if (maxModulatorSize <= 0) { return new ModulatorResult<>(new HashSet<>(), treewidthComputer.computeEta(graph, new HashSet<>()), 0); } // Run multiple modulator finding strategies in parallel List>> strategies = Arrays.asList( () -> computeGreedyDegreeModulator(graph, targetTreewidth, maxModulatorSize), () -> computeFeedbackVertexSetModulator(graph, targetTreewidth, maxModulatorSize), () -> computeTreewidthDecompositionModulator(graph, targetTreewidth, maxModulatorSize), () -> computeHighDegreeVertexModulator(graph, targetTreewidth, maxModulatorSize), () -> computeBottleneckVertexModulator(graph, targetTreewidth, maxModulatorSize)); try { List>> results = executorService.invokeAll(strategies, 60, TimeUnit.SECONDS); return results.parallelStream() .map(this::getFutureValue) .filter(Objects::nonNull) .filter(modulator -> modulator.size() <= maxModulatorSize && !modulator.isEmpty()) .map(modulator -> new ModulatorResult<>( modulator, treewidthComputer.computeEta(graph, modulator), computeModulatorQuality(graph, modulator, targetTreewidth))) .filter(result -> result.getResultingTreewidth() <= targetTreewidth) .min(Comparator.comparingDouble(ModulatorResult::getQualityScore)) .orElse(computeFallbackModulator(graph, targetTreewidth, maxModulatorSize)); } catch (InterruptedException e) { Thread.currentThread().interrupt(); return computeFallbackModulator(graph, targetTreewidth, maxModulatorSize); } } /** * Computes modulator using iterative vertex removal based on degree */ private Set computeGreedyDegreeModulator(Graph graph, int targetTreewidth, int maxSize) { Set modulator = ConcurrentHashMap.newKeySet(); Graph workingGraph = convertToUndirected(graph); while (modulator.size() < maxSize) { int currentTreewidth = treewidthComputer.computeEta(graph, modulator); if (currentTreewidth <= targetTreewidth) { break; } Optional> bestVertex = computeVertexRemovalScore(workingGraph, targetTreewidth).entrySet().parallelStream() .max(Map.Entry.comparingByValue()); if (bestVertex == null || bestVertex.isEmpty()) break; modulator.add(bestVertex.get().getKey()); workingGraph.removeVertex(bestVertex.get().getKey()); } return modulator; } /** * Uses feedback vertex set as starting point for modulator */ private Set computeFeedbackVertexSetModulator(Graph graph, int targetTreewidth, int maxSize) { Set modulator = new HashSet<>(); // Start with feedback vertex set vertices (they're often good modulator candidates) Set fvs = fvsComputer.greedyFeedbackVertexSet(graph); // Add FVS vertices up to budget Iterator fvsIter = fvs.iterator(); while (fvsIter.hasNext() && modulator.size() < maxSize) { V vertex = fvsIter.next(); modulator.add(vertex); int currentTreewidth = treewidthComputer.computeEta(graph, modulator); if (currentTreewidth <= targetTreewidth) { break; } } // If still not good enough, add high-degree vertices if (modulator.size() < maxSize) { List remainingVertices = graph.vertexSet().stream() .filter(v -> !modulator.contains(v)) .sorted((v1, v2) -> Integer.compare( graph.inDegreeOf(v2) + graph.outDegreeOf(v2), graph.inDegreeOf(v1) + graph.outDegreeOf(v1))) .collect(Collectors.toList()); for (V vertex : remainingVertices) { if (modulator.size() >= maxSize) break; modulator.add(vertex); int currentTreewidth = treewidthComputer.computeEta(graph, modulator); if (currentTreewidth <= targetTreewidth) { break; } } } return modulator; } /** * Uses treewidth decomposition analysis to find modulator */ private Set computeTreewidthDecompositionModulator(Graph graph, int targetTreewidth, int maxSize) { Set modulator = ConcurrentHashMap.newKeySet(); Graph undirected = convertToUndirected(graph); // Identify vertices that appear in many high-width bags Map bagAppearances = new ConcurrentHashMap<>(); Map centralityScores = computeBetweennessCentralityParallel(undirected); // Compute vertex importance based on structural properties Map vertexImportance = undirected.vertexSet().parallelStream() .collect(Collectors.toConcurrentMap( v -> v, v -> computeStructuralImportance(undirected, v, centralityScores.getOrDefault(v, 0.0)))); // Greedily select vertices with highest importance List sortedVertices = vertexImportance.entrySet().stream() .sorted(Map.Entry.comparingByValue().reversed()) .map(Map.Entry::getKey) .collect(Collectors.toList()); for (V vertex : sortedVertices) { if (modulator.size() >= maxSize) break; modulator.add(vertex); int currentTreewidth = treewidthComputer.computeEta(graph, modulator); if (currentTreewidth <= targetTreewidth) { break; } } return modulator; } /** * Focuses on highest degree vertices first */ private Set computeHighDegreeVertexModulator(Graph graph, int targetTreewidth, int maxSize) { Set modulator = new HashSet<>(); List verticesByDegree = graph.vertexSet().stream() .sorted((v1, v2) -> Integer.compare( graph.inDegreeOf(v2) + graph.outDegreeOf(v2), graph.inDegreeOf(v1) + graph.outDegreeOf(v1))) .collect(Collectors.toList()); for (V vertex : verticesByDegree) { if (modulator.size() >= maxSize) break; modulator.add(vertex); int currentTreewidth = treewidthComputer.computeEta(graph, modulator); if (currentTreewidth <= targetTreewidth) { break; } } return modulator; } /** * Identifies bottleneck vertices that connect different components */ private Set computeBottleneckVertexModulator(Graph graph, int targetTreewidth, int maxSize) { Set modulator = ConcurrentHashMap.newKeySet(); Graph undirected = convertToUndirected(graph); // Find articulation points and vertices with high betweenness centrality Set articulationPoints = findArticulationPoints(undirected); Map centralityScores = computeBetweennessCentralityParallel(undirected); // Combine articulation points with high centrality vertices Set candidates = new HashSet<>(articulationPoints); candidates.addAll(centralityScores.entrySet().stream() .sorted(Map.Entry.comparingByValue().reversed()) .limit(Math.max(10, maxSize * 2)) .map(Map.Entry::getKey) .collect(Collectors.toSet())); // Greedily select best candidates for (V vertex : candidates) { if (modulator.size() >= maxSize) break; modulator.add(vertex); int currentTreewidth = treewidthComputer.computeEta(graph, modulator); if (currentTreewidth <= targetTreewidth) { break; } } return modulator; } /** * Computes vertex removal scores based on their impact on achieving the target treewidth. * * This method evaluates vertices based on multiple criteria: * 1. Direct treewidth reduction impact * 2. Degree-based scoring relative to target treewidth * 3. Structural importance (betweenness centrality, clustering coefficient) * 4. Connectivity disruption potential * 5. Distance from target treewidth achievement * * @param targetTreewidth the desired treewidth after vertex removal * @return concurrent map of vertices to their removal scores (higher = more beneficial to remove) */ public ConcurrentHashMap computeVertexRemovalScore(Graph graph, int targetTreewidth) { Set vertices = graph.vertexSet(); int n = vertices.size(); if (n == 0 || targetTreewidth < 0) { return new ConcurrentHashMap<>(); } // Initialize concurrent data structures ConcurrentHashMap scores = new ConcurrentHashMap<>(); ConcurrentHashMap degrees = new ConcurrentHashMap<>(); ConcurrentHashMap structuralImportance = new ConcurrentHashMap<>(); // Custom thread pool for optimal performance ForkJoinPool customThreadPool = new ForkJoinPool(Math.min(Runtime.getRuntime().availableProcessors(), Math.max(1, n / 100))); try { CompletableFuture computation = CompletableFuture.runAsync( () -> { // Phase 1: Compute basic metrics in parallel computeBasicMetricsParallel(graph, vertices, degrees, targetTreewidth); // Phase 2: Compute structural importance in parallel computeStructuralImportanceParallel(graph, vertices, structuralImportance, targetTreewidth); // Phase 3: Compute comprehensive scores in parallel computeComprehensiveScoresParallel( graph, vertices, scores, degrees, structuralImportance, targetTreewidth); // Phase 4: Apply target treewidth specific adjustments applyTargetTreewidthAdjustmentsParallel(graph, vertices, scores, targetTreewidth); }, customThreadPool); computation.get(); } catch (InterruptedException | ExecutionException e) { Thread.currentThread().interrupt(); throw new RuntimeException("Parallel vertex scoring computation failed", e); } finally { shutdownThreadPool(customThreadPool); } return scores; } /** * Computes basic graph metrics in parallel for vertex scoring. */ private void computeBasicMetricsParallel( Graph graph, Set vertices, ConcurrentHashMap degrees, int targetTreewidth) { // Compute degrees in parallel vertices.parallelStream().forEach(vertex -> { int degree = graph.inDegreeOf(vertex) + graph.outDegreeOf(vertex); degrees.put(vertex, degree); }); } /** * Computes structural importance metrics in parallel. */ private void computeStructuralImportanceParallel( Graph graph, Set vertices, ConcurrentHashMap structuralImportance, int targetTreewidth) { // Compute structural metrics in parallel vertices.parallelStream().forEach(vertex -> { double importance = 0.0; // Factor 1: Local clustering coefficient impact importance += computeLocalClusteringImpact(graph, vertex, targetTreewidth); // Factor 2: Connectivity importance importance += computeConnectivityImportance(graph, vertex, targetTreewidth); // Factor 3: Neighborhood density impact importance += computeNeighborhoodDensityImpact(graph, vertex, targetTreewidth); structuralImportance.put(vertex, importance); }); } /** * Computes comprehensive removal scores incorporating all factors and target treewidth. */ private void computeComprehensiveScoresParallel( Graph graph, Set vertices, ConcurrentHashMap scores, ConcurrentHashMap degrees, ConcurrentHashMap structuralImportance, int targetTreewidth) { // Compute statistics for normalization DoubleSummaryStatistics degreeStats = degrees.values().parallelStream() .mapToDouble(Integer::doubleValue) .summaryStatistics(); DoubleSummaryStatistics importanceStats = structuralImportance.values().parallelStream() .mapToDouble(Double::doubleValue) .summaryStatistics(); // Compute comprehensive scores in parallel vertices.parallelStream().forEach(vertex -> { double score = 0.0; int degree = degrees.get(vertex); double importance = structuralImportance.get(vertex); // Component 1: Degree-based score relative to target treewidth score += computeDegreeBasedScore(degree, targetTreewidth, degreeStats); // Component 2: Structural importance score score += computeNormalizedImportanceScore(importance, importanceStats); // Component 3: Target treewidth proximity score score += computeTargetProximityScore(graph, vertex, degree, targetTreewidth); // Component 4: Treewidth reduction potential score += computeTreewidthReductionPotential(graph, vertex, targetTreewidth); // Component 5: Graph connectivity preservation penalty score -= computeConnectivityPreservationPenalty(graph, vertex, targetTreewidth); scores.put(vertex, score); }); } /** * Computes degree-based score considering the target treewidth. * Higher degree vertices that exceed target treewidth get higher scores. */ private double computeDegreeBasedScore(int degree, int targetTreewidth, DoubleSummaryStatistics degreeStats) { // Normalize degree double normalizedDegree = degreeStats.getMax() > degreeStats.getMin() ? (degree - degreeStats.getMin()) / (degreeStats.getMax() - degreeStats.getMin()) : 0.0; // Base score from normalized degree double baseScore = normalizedDegree; // Boost score if degree significantly exceeds target treewidth if (degree > targetTreewidth) { double excess = (double) (degree - targetTreewidth) / Math.max(1, targetTreewidth); baseScore *= (1.0 + excess); // Amplify score for high-degree vertices } // Penalty if degree is already below or at target else if (degree <= targetTreewidth) { double deficit = (double) (targetTreewidth - degree) / Math.max(1, targetTreewidth); baseScore *= (1.0 - deficit * 0.5); // Reduce score but don't eliminate } return baseScore * 0.3; // Weight: 30% of total score } /** * Computes local clustering coefficient impact on treewidth. */ private double computeLocalClusteringImpact(Graph graph, V vertex, int targetTreewidth) { Set neighbors = getNeighbors(vertex, graph); if (neighbors.size() < 2) { return 0.0; } // Count edges among neighbors AtomicInteger edgeCount = new AtomicInteger(0); List neighborList = new ArrayList<>(neighbors); neighborList.parallelStream().forEach(n1 -> { int index1 = neighborList.indexOf(n1); neighborList.stream() .skip(index1 + 1) .filter(n2 -> graph.containsEdge(n1, n2) || graph.containsEdge(n2, n1)) .forEach(n2 -> edgeCount.incrementAndGet()); }); int maxPossibleEdges = neighbors.size() * (neighbors.size() - 1) / 2; double clusteringCoefficient = maxPossibleEdges > 0 ? (double) edgeCount.get() / maxPossibleEdges : 0.0; // High clustering + high degree suggests clique-like structures that increase treewidth double impact = clusteringCoefficient * Math.min(1.0, (double) neighbors.size() / (targetTreewidth + 1)); return impact; } /** * Computes connectivity importance based on how removal affects graph connectivity. */ private double computeConnectivityImportance(Graph graph, V vertex, int targetTreewidth) { Set neighbors = getNeighbors(vertex, graph); if (neighbors.size() <= 1) { return 0.1; // Low importance for low-degree vertices } // Estimate impact on connectivity double connectivityScore = 0.0; // Factor 1: Bridge potential (connecting different components) connectivityScore += estimateBridgePotential(graph, vertex, neighbors, targetTreewidth); // Factor 2: Articulation point potential connectivityScore += estimateArticulationPotential(graph, vertex, neighbors, targetTreewidth); return Math.min(1.0, connectivityScore); } /** * Estimates if vertex acts as a bridge relative to target treewidth constraints. */ private double estimateBridgePotential( Graph graph, V vertex, Set neighbors, int targetTreewidth) { if (neighbors.size() < 2) { return 0.0; } // Simple heuristic: check if neighbors are well-connected without this vertex AtomicInteger interNeighborConnections = new AtomicInteger(0); neighbors.parallelStream().forEach(n1 -> { long connections = neighbors.parallelStream() .filter(n2 -> !n1.equals(n2)) .filter(n2 -> graph.containsEdge(n1, n2) || graph.containsEdge(n2, n1)) .count(); interNeighborConnections.addAndGet((int) connections); }); double expectedConnections = neighbors.size() * (neighbors.size() - 1) / 2.0; double actualConnectionRatio = expectedConnections > 0 ? interNeighborConnections.get() / (2.0 * expectedConnections) : 0.0; // If neighbors are poorly connected, vertex is more important as bridge double bridgeScore = 1.0 - actualConnectionRatio; // Scale by target treewidth considerations double targetFactor = Math.min(1.0, (double) neighbors.size() / Math.max(1, targetTreewidth)); return bridgeScore * targetFactor; } /** * Estimates articulation point potential. */ private double estimateArticulationPotential( Graph graph, V vertex, Set neighbors, int targetTreewidth) { // Simplified articulation point detection if (neighbors.size() < 2) { return 0.0; } // High-degree vertices in sparse neighborhoods are likely articulation points double degreeRatio = Math.min(1.0, (double) neighbors.size() / Math.max(1, targetTreewidth)); double sparsityFactor = computeNeighborhoodSparsity(graph, neighbors); return degreeRatio * sparsityFactor; } /** * Computes neighborhood density impact. */ private double computeNeighborhoodDensityImpact(Graph graph, V vertex, int targetTreewidth) { Set neighbors = getNeighbors(vertex, graph); if (neighbors.size() <= targetTreewidth) { return 0.2; // Low impact if neighborhood already small } // Count edges in the neighborhood AtomicInteger neighborhoodEdges = new AtomicInteger(0); List neighborList = new ArrayList<>(neighbors); neighborList.parallelStream().forEach(n1 -> { int index1 = neighborList.indexOf(n1); long edgeCount = neighborList.stream() .skip(index1 + 1) .parallel() .filter(n2 -> graph.containsEdge(n1, n2) || graph.containsEdge(n2, n1)) .count(); neighborhoodEdges.addAndGet((int) edgeCount); }); int maxPossibleEdges = neighbors.size() * (neighbors.size() - 1) / 2; double density = maxPossibleEdges > 0 ? (double) neighborhoodEdges.get() / maxPossibleEdges : 0.0; // High density neighborhoods contribute more to treewidth double sizeFactor = (double) neighbors.size() / Math.max(1, targetTreewidth); return density * Math.min(2.0, sizeFactor); } /** * Computes neighborhood sparsity factor. */ private double computeNeighborhoodSparsity(Graph graph, Set neighbors) { if (neighbors.size() < 2) { return 1.0; } AtomicInteger edgeCount = new AtomicInteger(0); List neighborList = new ArrayList<>(neighbors); neighborList.parallelStream().forEach(n1 -> { int index1 = neighborList.indexOf(n1); long connections = neighborList.stream() .skip(index1 + 1) .parallel() .filter(n2 -> graph.containsEdge(n1, n2) || graph.containsEdge(n2, n1)) .count(); edgeCount.addAndGet((int) connections); }); int maxPossibleEdges = neighbors.size() * (neighbors.size() - 1) / 2; double density = maxPossibleEdges > 0 ? (double) edgeCount.get() / maxPossibleEdges : 0.0; return 1.0 - density; // Higher sparsity = higher score } /** * Computes normalized importance score. */ private double computeNormalizedImportanceScore(double importance, DoubleSummaryStatistics importanceStats) { if (importanceStats.getMax() <= importanceStats.getMin()) { return 0.0; } double normalized = (importance - importanceStats.getMin()) / (importanceStats.getMax() - importanceStats.getMin()); return normalized * 0.25; // Weight: 25% of total score } /** * Computes score based on proximity to target treewidth achievement. */ private double computeTargetProximityScore(Graph graph, V vertex, int degree, int targetTreewidth) { Set neighbors = getNeighbors(vertex, graph); // Estimate local treewidth contribution double localTreewidthContribution = Math.max(degree, neighbors.size()); // Score based on how much this vertex exceeds the target if (localTreewidthContribution > targetTreewidth) { double excess = (localTreewidthContribution - targetTreewidth) / Math.max(1, targetTreewidth); return Math.min(1.0, excess) * 0.25; // Weight: 25% of total score } return 0.0; } /** * Estimates the potential for treewidth reduction by removing this vertex. */ private double computeTreewidthReductionPotential(Graph graph, V vertex, int targetTreewidth) { Set neighbors = getNeighbors(vertex, graph); if (neighbors.isEmpty()) { return 0.1; // Isolated vertices have low reduction potential } // Estimate reduction potential based on vertex properties double potential = 0.0; // Factor 1: High-degree vertices in dense neighborhoods double degreeContribution = Math.min(1.0, (double) neighbors.size() / (targetTreewidth + 1)); potential += degreeContribution * 0.4; // Factor 2: Vertices that create large cliques when eliminated double cliqueFormationPotential = computeCliqueFormationPotential(graph, vertex, neighbors, targetTreewidth); potential += cliqueFormationPotential * 0.4; // Factor 3: Vertices in high-treewidth substructures double substructurePotential = computeSubstructurePotential(graph, vertex, neighbors, targetTreewidth); potential += substructurePotential * 0.2; return Math.min(1.0, potential) * 0.15; // Weight: 15% of total score } /** * Computes potential for clique formation when vertex is eliminated. */ private double computeCliqueFormationPotential( Graph graph, V vertex, Set neighbors, int targetTreewidth) { if (neighbors.size() <= targetTreewidth) { return 0.2; // Low potential if neighborhood already small } // Estimate how many edges would need to be added to make neighborhood a clique AtomicInteger existingEdges = new AtomicInteger(0); List neighborList = new ArrayList<>(neighbors); neighborList.parallelStream().forEach(n1 -> { int index1 = neighborList.indexOf(n1); long edgeCount = neighborList.stream() .skip(index1 + 1) .parallel() .filter(n2 -> graph.containsEdge(n1, n2) || graph.containsEdge(n2, n1)) .count(); existingEdges.addAndGet((int) edgeCount); }); int maxPossibleEdges = neighbors.size() * (neighbors.size() - 1) / 2; int missingEdges = maxPossibleEdges - existingEdges.get(); // Higher missing edges = higher potential for treewidth increase if not removed double missingRatio = maxPossibleEdges > 0 ? (double) missingEdges / maxPossibleEdges : 0.0; // Scale by size relative to target treewidth double sizeFactor = Math.min(2.0, (double) neighbors.size() / Math.max(1, targetTreewidth)); return missingRatio * sizeFactor; } /** * Computes substructure potential impact. */ private double computeSubstructurePotential( Graph graph, V vertex, Set neighbors, int targetTreewidth) { // Simple heuristic: vertices with many high-degree neighbors return neighbors.parallelStream() .mapToInt(neighbor -> graph.inDegreeOf(neighbor) + graph.outDegreeOf(neighbor)) .filter(degree -> degree > targetTreewidth) .count() / (double) Math.max(1, neighbors.size()); } /** * Computes penalty for removing vertices that are crucial for connectivity. */ private double computeConnectivityPreservationPenalty(Graph graph, V vertex, int targetTreewidth) { Set neighbors = getNeighbors(vertex, graph); // Penalty for removing vertices that maintain important connections double penalty = 0.0; // Factor 1: Bridge vertices get higher penalty if (isBridgeVertex(graph, vertex, neighbors)) { penalty += 0.3; } // Factor 2: Articulation points get penalty if (isLikelyArticulationPoint(graph, vertex, neighbors)) { penalty += 0.2; } // Factor 3: Vertices connecting different high-degree components penalty += computeComponentConnectionPenalty(graph, vertex, neighbors, targetTreewidth); return Math.min(0.5, penalty); // Cap penalty at 50% of score } /** * Applies target treewidth specific adjustments to scores. */ private void applyTargetTreewidthAdjustmentsParallel( Graph graph, Set vertices, ConcurrentHashMap scores, int targetTreewidth) { // Compute current graph statistics DoubleSummaryStatistics scoreStats = scores.values().parallelStream() .mapToDouble(Double::doubleValue) .summaryStatistics(); // Apply adjustments in parallel vertices.parallelStream().forEach(vertex -> { double currentScore = scores.get(vertex); double adjustedScore = currentScore; // Adjustment 1: Boost vertices that significantly exceed target treewidth int degree = graph.inDegreeOf(vertex) + graph.outDegreeOf(vertex); if (degree > targetTreewidth * 1.5) { adjustedScore *= 1.3; // 30% boost for high-degree vertices } // Adjustment 2: Normalize relative to target treewidth double targetNormalizedFactor = 1.0 + (double) Math.max(0, degree - targetTreewidth) / Math.max(1, targetTreewidth); adjustedScore *= targetNormalizedFactor; // Adjustment 3: Apply final bounds adjustedScore = Math.max(0.0, Math.min(10.0, adjustedScore)); scores.put(vertex, adjustedScore); }); } /** * Helper method to get all neighbors of a vertex. */ private Set getNeighbors(V vertex, Graph graph) { Set neighbors = ConcurrentHashMap.newKeySet(); // Add in-neighbors graph.incomingEdgesOf(vertex).parallelStream() .map(graph::getEdgeSource) .filter(neighbor -> !neighbor.equals(vertex)) .forEach(neighbors::add); // Add out-neighbors graph.outgoingEdgesOf(vertex).parallelStream() .map(graph::getEdgeTarget) .filter(neighbor -> !neighbor.equals(vertex)) .forEach(neighbors::add); return neighbors; } /** * Simple bridge vertex detection heuristic. */ private boolean isBridgeVertex(Graph graph, V vertex, Set neighbors) { if (neighbors.size() < 2) { return false; } // Check if removal would significantly disconnect the neighborhood long interNeighborConnections = neighbors.parallelStream() .mapToLong(n1 -> neighbors.parallelStream() .filter(n2 -> !n1.equals(n2)) .filter(n2 -> graph.containsEdge(n1, n2) || graph.containsEdge(n2, n1)) .count()) .sum() / 2; // Divide by 2 to avoid double counting double expectedConnections = neighbors.size() * (neighbors.size() - 1) / 2.0; return interNeighborConnections < expectedConnections * 0.3; // Less than 30% connected } /** * Simple articulation point detection heuristic. */ private boolean isLikelyArticulationPoint(Graph graph, V vertex, Set neighbors) { return neighbors.size() >= 3 && isBridgeVertex(graph, vertex, neighbors); } /** * Computes penalty for removing vertices that connect different components. */ private double computeComponentConnectionPenalty( Graph graph, V vertex, Set neighbors, int targetTreewidth) { if (neighbors.size() < 2) { return 0.0; } // Count high-degree neighbors (potential component representatives) long highDegreeNeighbors = neighbors.parallelStream() .mapToInt(neighbor -> graph.inDegreeOf(neighbor) + graph.outDegreeOf(neighbor)) .filter(degree -> degree > targetTreewidth) .count(); if (highDegreeNeighbors >= 2) { // Vertex connects multiple high-degree components return Math.min(0.3, highDegreeNeighbors * 0.1); } return 0.0; } /** * Utility method to safely shutdown thread pool. */ private void shutdownThreadPool(ForkJoinPool threadPool) { threadPool.shutdown(); try { if (!threadPool.awaitTermination(60, TimeUnit.SECONDS)) { threadPool.shutdownNow(); } } catch (InterruptedException e) { threadPool.shutdownNow(); Thread.currentThread().interrupt(); } } /** * Alternative method for adaptive scoring based on current vs target treewidth. * TODO: Revisit? */ public ConcurrentHashMap computeAdaptiveVertexRemovalScore( Graph graph, int targetTreewidth, int currentTreewidth) { ConcurrentHashMap baseScores = computeVertexRemovalScore(graph, targetTreewidth); if (currentTreewidth <= targetTreewidth) { return baseScores; // Already at or below target } // Apply adaptive scaling based on the gap between current and target treewidth double scalingFactor = (double) (currentTreewidth - targetTreewidth) / Math.max(1, targetTreewidth); baseScores.entrySet().parallelStream().forEach(entry -> { double adjustedScore = entry.getValue() * (1.0 + scalingFactor); entry.setValue(Math.min(10.0, adjustedScore)); }); return baseScores; } /** * Computes structural importance of a vertex */ private double computeStructuralImportance(Graph graph, V vertex, double centrality) { int degree = graph.degreeOf(vertex); Set neighbors = Graphs.neighborSetOf(graph, vertex); // Count triangles involving this vertex long triangles = neighbors.parallelStream() .mapToLong(n1 -> neighbors.stream() .filter(n2 -> !n1.equals(n2) && graph.containsEdge(n1, n2)) .count()) .sum() / 2; return degree + centrality * 10 + triangles * 0.5; } /** * Computes betweenness centrality for all vertices */ private Map originalComputeBetweennessCentrality(Graph graph) { Map centrality = new ConcurrentHashMap<>(); List vertices = new ArrayList<>(graph.vertexSet()); // Initialize all centralities to 0 vertices.parallelStream().forEach(v -> centrality.put(v, 0.0)); // For efficiency, sample pairs of vertices for large graphs // sampleSize and random were not used... int sampleSize = Math.min(vertices.size() * (vertices.size() - 1) / 2, 1000); Random random = new Random(42); // Fixed seed for reproducibility vertices.parallelStream().limit(Math.min(50, vertices.size())).forEach(source -> { Map> predecessors = new HashMap<>(); Map distances = new HashMap<>(); Map pathCounts = new HashMap<>(); Stack stack = new Stack<>(); // BFS from source Queue queue = new ArrayDeque<>(); queue.offer(source); distances.put(source, 0); pathCounts.put(source, 1); while (!queue.isEmpty()) { V current = queue.poll(); stack.push(current); for (V neighbor : Graphs.neighborListOf(graph, current)) { if (!distances.containsKey(neighbor)) { distances.put(neighbor, distances.get(current) + 1); pathCounts.put(neighbor, 0); queue.offer(neighbor); } if (distances.get(neighbor) == distances.get(current) + 1) { pathCounts.put(neighbor, pathCounts.get(neighbor) + pathCounts.get(current)); predecessors .computeIfAbsent(neighbor, k -> new ArrayList<>()) .add(current); } } } // Accumulate centrality values Map dependency = new HashMap<>(); vertices.forEach(v -> dependency.put(v, 0.0)); while (!stack.isEmpty()) { V vertex = stack.pop(); if (predecessors.containsKey(vertex)) { for (V predecessor : predecessors.get(vertex)) { double contribution = (pathCounts.get(predecessor) / (double) pathCounts.get(vertex)) * (1.0 + dependency.get(vertex)); dependency.put(predecessor, dependency.get(predecessor) + contribution); } } if (!vertex.equals(source)) { synchronized (centrality) { centrality.put(vertex, centrality.get(vertex) + dependency.get(vertex)); } } } }); return centrality; } /** * Computes approximated betweenness centrality using random sampling. * * This implementation is based on Brandes' approximation algorithm that uses * random sampling of source vertices to approximate betweenness centrality values. * Instead of computing shortest paths from all vertices, we sample only a subset * to achieve significant speedup while maintaining reasonable accuracy. * * @return a map containing approximate betweenness centrality values for each vertex */ public Map computeBetweennessCentrality(Graph graph) { Set vertices = graph.vertexSet(); int n = vertices.size(); if (n <= 2) { // For very small graphs, return exact computation return computeExactBetweennessCentrality(graph); } // Calculate sample size based on graph characteristics and desired accuracy // Using the formula from Riondato & Kornaropoulos and Brandes & Pich research double epsilon = 0.1; // Desired approximation error (can be made configurable) double delta = 0.1; // Probability of exceeding error bound (can be made configurable) // Compute sample size - various strategies exist in literature: // 1. Fixed percentage of nodes (simple but effective) // 2. Based on graph diameter and error bounds (more theoretical) // 3. Adaptive sampling based on convergence int sampleSize = Math.min(n, Math.max(10, (int) Math.ceil( Math.log(2.0 / delta) / (2 * epsilon * epsilon) * Math.log(n) // Additional factor based on network size ))); // For very large graphs, cap the sample size to ensure efficiency if (n > 10000) { sampleSize = Math.min(sampleSize, n / 10); // At most 10% of vertices } System.out.println("Computing approximated betweenness centrality with " + sampleSize + " samples out of " + n + " vertices"); // Initialize betweenness centrality scores Map betweenness = new HashMap<>(); vertices.forEach(v -> betweenness.put(v, 0.0)); // Random number generator for sampling Random random = ThreadLocalRandom.current(); // Convert vertices to list for random sampling List vertexList = new ArrayList<>(vertices); // Sample source vertices and compute contributions Set sampledSources = sampleSourceVertices(graph, vertexList, sampleSize, random); // Compute betweenness contributions from sampled sources for (V source : sampledSources) { Map contributions = computeSingleSourceBetweennessContributions(graph, source); // Add contributions to total betweenness (scaled by sampling factor) double scalingFactor = (double) n / sampleSize; for (Map.Entry entry : contributions.entrySet()) { V vertex = entry.getKey(); double contribution = entry.getValue() * scalingFactor; betweenness.merge(vertex, contribution, Double::sum); } } return betweenness; } /** * Samples source vertices using different strategies based on graph characteristics. * * @param vertexList list of all vertices * @param sampleSize number of vertices to sample * @param random random number generator * @return set of sampled source vertices */ private Set sampleSourceVertices( Graph graph, List vertexList, int sampleSize, Random random) { Set sampledSources = new HashSet<>(); // Strategy 1: Degree-weighted sampling (Brandes & Pich approach) // Higher degree vertices are more likely to be selected as they lie on more paths if (shouldUseDegreeWeightedSampling(graph)) { sampledSources = degreeWeightedSampling(graph, vertexList, sampleSize, random); } // Strategy 2: Uniform random sampling (simpler, often effective) else { sampledSources = uniformRandomSampling(vertexList, sampleSize, random); } return sampledSources; } /** * Determines whether to use degree-weighted sampling based on graph characteristics. */ private boolean shouldUseDegreeWeightedSampling(Graph graph) { // Use degree-weighted sampling for larger, more complex networks return graph.vertexSet().size() > 100; } /** * Performs degree-weighted random sampling of source vertices. * Vertices with higher degrees have higher probability of being selected. */ private Set degreeWeightedSampling( Graph graph, List vertexList, int sampleSize, Random random) { Set sampledSources = new HashSet<>(); // Calculate degree weights Map degrees = new HashMap<>(); int totalDegree = 0; for (V vertex : vertexList) { int degree = graph.inDegreeOf(vertex) + graph.outDegreeOf(vertex); degrees.put(vertex, degree); totalDegree += degree; } // If all vertices have degree 0, fall back to uniform sampling if (totalDegree == 0) { return uniformRandomSampling(vertexList, sampleSize, random); } // Sample vertices with probability proportional to their degree while (sampledSources.size() < sampleSize && sampledSources.size() < vertexList.size()) { double randomValue = random.nextDouble() * totalDegree; double cumulativeWeight = 0; for (V vertex : vertexList) { if (sampledSources.contains(vertex)) continue; cumulativeWeight += degrees.get(vertex); if (randomValue <= cumulativeWeight) { sampledSources.add(vertex); break; } } // Prevent infinite loop in edge cases if (sampledSources.size() == vertexList.size()) break; } return sampledSources; } /** * Performs uniform random sampling of source vertices. */ private Set uniformRandomSampling(List vertexList, int sampleSize, Random random) { Set sampledSources = new HashSet<>(); // Use reservoir sampling for efficiency for (int i = 0; i < Math.min(sampleSize, vertexList.size()); i++) { V vertex; do { vertex = vertexList.get(random.nextInt(vertexList.size())); } while (sampledSources.contains(vertex)); sampledSources.add(vertex); } return sampledSources; } /** * Computes betweenness centrality contributions from a single source vertex. * This is the core Brandes algorithm for single-source shortest paths. * * @param graph * @param source the source vertex * @return map of betweenness contributions for each vertex */ private Map computeSingleSourceBetweennessContributions(Graph graph, V source) { Map contributions = new HashMap<>(); Map> predecessors = new HashMap<>(); Map sigma = new HashMap<>(); // Number of shortest paths Map distance = new HashMap<>(); Map delta = new HashMap<>(); // Dependency values // Initialize graph.vertexSet().forEach(v -> { predecessors.put(v, new ArrayList<>()); sigma.put(v, 0.0); distance.put(v, -1); delta.put(v, 0.0); contributions.put(v, 0.0); }); sigma.put(source, 1.0); distance.put(source, 0); // BFS to find shortest paths and count them Queue queue = new LinkedList<>(); Stack stack = new Stack<>(); queue.offer(source); while (!queue.isEmpty()) { V vertex = queue.poll(); stack.push(vertex); // Examine outgoing edges for (DefaultEdge edge : graph.outgoingEdgesOf(vertex)) { V neighbor = graph.getEdgeTarget(edge); // First time visiting neighbor if (distance.get(neighbor) < 0) { queue.offer(neighbor); distance.put(neighbor, distance.get(vertex) + 1); } // Shortest path to neighbor via vertex if (distance.get(neighbor).equals(distance.get(vertex) + 1)) { sigma.put(neighbor, sigma.get(neighbor) + sigma.get(vertex)); predecessors.get(neighbor).add(vertex); } } } // Accumulation phase - compute dependencies while (!stack.isEmpty()) { V vertex = stack.pop(); for (V predecessor : predecessors.get(vertex)) { double contribution = (sigma.get(predecessor) / sigma.get(vertex)) * (1 + delta.get(vertex)); delta.put(predecessor, delta.get(predecessor) + contribution); } if (!vertex.equals(source)) { contributions.put(vertex, delta.get(vertex)); } } return contributions; } /** * Computes exact betweenness centrality for small graphs or when high precision is needed. * * @return map of exact betweenness centrality values */ private Map computeExactBetweennessCentrality(Graph graph) { Map betweenness = new HashMap<>(); Set vertices = graph.vertexSet(); // Initialize all betweenness values to 0 vertices.forEach(v -> betweenness.put(v, 0.0)); // Compute contributions from each vertex as source for (V source : vertices) { Map contributions = computeSingleSourceBetweennessContributions(graph, source); for (Map.Entry entry : contributions.entrySet()) { V vertex = entry.getKey(); betweenness.merge(vertex, entry.getValue(), Double::sum); } } return betweenness; } /** * Alternative adaptive sampling approach that adjusts sample size based on convergence. * This can provide better accuracy guarantees but is more computationally expensive. */ public Map computeBetweennessCentralityAdaptive(Graph graph) { Set vertices = graph.vertexSet(); int n = vertices.size(); Map betweenness = new HashMap<>(); vertices.forEach(v -> betweenness.put(v, 0.0)); List vertexList = new ArrayList<>(vertices); Random random = ThreadLocalRandom.current(); int minSamples = Math.max(10, n / 100); int maxSamples = Math.min(n, n / 2); Map previousBetweenness = new HashMap<>(betweenness); double convergenceThreshold = 0.01; // 1% change threshold for (int sampleCount = minSamples; sampleCount <= maxSamples; sampleCount += minSamples) { // Sample additional vertices Set newSamples = uniformRandomSampling(vertexList, minSamples, random); // Compute contributions from new samples for (V source : newSamples) { Map contributions = computeSingleSourceBetweennessContributions(graph, source); double scalingFactor = (double) n / sampleCount; for (Map.Entry entry : contributions.entrySet()) { V vertex = entry.getKey(); double contribution = entry.getValue() * scalingFactor; betweenness.merge(vertex, contribution, Double::sum); } } // Check for convergence if (hasConverged(betweenness, previousBetweenness, convergenceThreshold)) { System.out.println("Converged after " + sampleCount + " samples"); break; } previousBetweenness = new HashMap<>(betweenness); } return betweenness; } /** * Checks if betweenness centrality values have converged. */ private boolean hasConverged(Map current, Map previous, double threshold) { for (V vertex : current.keySet()) { double currentValue = current.get(vertex); double previousValue = previous.getOrDefault(vertex, 0.0); if (previousValue > 0) { double relativeChange = Math.abs(currentValue - previousValue) / previousValue; if (relativeChange > threshold) { return false; } } else if (currentValue > threshold) { return false; // Significant change from zero } } return true; } /** * Finds articulation points in the graph */ private Set findArticulationPoints(Graph graph) { Set articulationPoints = ConcurrentHashMap.newKeySet(); for (V vertex : graph.vertexSet()) { // Check if removing this vertex increases number of connected components Graph testGraph = new DefaultUndirectedGraph<>(DefaultEdge.class); // Copy graph without the test vertex graph.vertexSet().stream().filter(v -> !v.equals(vertex)).forEach(testGraph::addVertex); graph.edgeSet().forEach(edge -> { V source = graph.getEdgeSource(edge); V target = graph.getEdgeTarget(edge); if (!source.equals(vertex) && !target.equals(vertex)) { testGraph.addEdge(source, target); } }); // Count connected components ConnectivityInspector originalInspector = new ConnectivityInspector<>(graph); ConnectivityInspector testInspector = new ConnectivityInspector<>(testGraph); if (testInspector.connectedSets().size() > originalInspector.connectedSets().size()) { articulationPoints.add(vertex); } } return articulationPoints; } /** * Computes approximated betweenness centrality using random sampling. * * This implementation is based on Brandes' approximation algorithm that uses * random sampling of source vertices to approximate betweenness centrality values. * Instead of computing shortest paths from all vertices, we sample only a subset * to achieve significant speedup while maintaining reasonable accuracy. * * @return a map containing approximate betweenness centrality values for each vertex */ private Map computeBetweennessCentralityParallel(Graph graph) { return betweennessCentralityCache.computeIfAbsent(graph, g -> { Set vertices = g.vertexSet(); int n = vertices.size(); if (n <= 2) { // For very small graphs, return exact computation return computeExactBetweennessCentralityParallel(g); } // Calculate sample size based on graph characteristics and desired accuracy double epsilon = 0.1; // Desired approximation error double delta = 0.1; // Probability of exceeding error bound int initialSampleSize = Math.min( n, Math.max(10, (int) Math.ceil(Math.log(2.0 / delta) / (2 * epsilon * epsilon) * Math.log(n)))); int sampleSize; // For very large graphs, cap the sample size if (n > 10000) { sampleSize = Math.min(initialSampleSize, n / 10); } else { sampleSize = initialSampleSize; } System.out.println("Computing approximated betweenness centrality with " + sampleSize + " samples out of " + n + " vertices (parallel)"); // Initialize concurrent betweenness centrality scores ConcurrentHashMap betweenness = new ConcurrentHashMap<>(); vertices.parallelStream().forEach(v -> betweenness.put(v, 0.0)); // Thread-safe random number generator ThreadLocalRandom random = ThreadLocalRandom.current(); // Convert vertices to concurrent list for thread-safe access List vertexList = new CopyOnWriteArrayList<>(vertices); // Custom ForkJoinPool for better control over parallelization ForkJoinPool customThreadPool = new ForkJoinPool( Math.min( Runtime.getRuntime().availableProcessors(), Math.max(1, sampleSize / 10)) // Scale threads based on sample size ); try { CompletableFuture computation = CompletableFuture.runAsync( () -> { // Sample source vertices in parallel Set sampledSources = sampleSourceVerticesParallel(g, vertexList, sampleSize, random); // Scaling factor for approximation double scalingFactor = (double) n / sampleSize; // Process sampled sources in parallel and accumulate results sampledSources.parallelStream().forEach(source -> { ConcurrentHashMap contributions = computeSingleSourceBetweennessContributionsParallel(g, source); // Atomically update betweenness values with scaling contributions.entrySet().parallelStream().forEach(entry -> { V vertex = entry.getKey(); double scaledContribution = entry.getValue() * scalingFactor; betweenness.merge(vertex, scaledContribution, Double::sum); }); }); }, customThreadPool); // Wait for completion computation.get(); } catch (InterruptedException | ExecutionException e) { Thread.currentThread().interrupt(); throw new RuntimeException("Parallel betweenness centrality computation failed", e); } finally { customThreadPool.shutdown(); try { if (!customThreadPool.awaitTermination(60, TimeUnit.SECONDS)) { customThreadPool.shutdownNow(); } } catch (InterruptedException e) { customThreadPool.shutdownNow(); Thread.currentThread().interrupt(); } } return betweenness; }); } /** * Samples source vertices using parallel processing with different sampling strategies. */ private Set sampleSourceVerticesParallel( Graph graph, List vertexList, int sampleSize, ThreadLocalRandom random) { if (shouldUseDegreeWeightedSampling(graph)) { return degreeWeightedSamplingParallel(graph, vertexList, sampleSize, random); } else { return uniformRandomSamplingParallel(vertexList, sampleSize, random); } } /** * Performs degree-weighted random sampling using parallel streams. */ private Set degreeWeightedSamplingParallel( Graph graph, List vertexList, int sampleSize, ThreadLocalRandom random) { // Calculate degrees in parallel ConcurrentMap degrees = vertexList.parallelStream() .collect(Collectors.toConcurrentMap( vertex -> vertex, vertex -> graph.inDegreeOf(vertex) + graph.outDegreeOf(vertex))); // Calculate total degree int totalDegree = degrees.values().parallelStream().mapToInt(Integer::intValue).sum(); if (totalDegree == 0) { return uniformRandomSamplingParallel(vertexList, sampleSize, random); } // Use concurrent set for thread-safe sampling Set sampledSources = ConcurrentHashMap.newKeySet(); AtomicInteger samplesNeeded = new AtomicInteger(sampleSize); // Parallel sampling with retry mechanism vertexList.parallelStream().filter(vertex -> samplesNeeded.get() > 0).forEach(vertex -> { if (samplesNeeded.get() <= 0 || sampledSources.contains(vertex)) { return; } // Thread-local random for each thread ThreadLocalRandom localRandom = ThreadLocalRandom.current(); double probability = (double) degrees.get(vertex) / totalDegree; // Adaptive probability to ensure we get enough samples double adjustedProbability = Math.min(1.0, probability * sampleSize * 2.0 / vertexList.size()); if (localRandom.nextDouble() < adjustedProbability && sampledSources.size() < sampleSize) { sampledSources.add(vertex); samplesNeeded.decrementAndGet(); } }); // Fill remaining slots with uniform sampling if needed if (sampledSources.size() < sampleSize) { Set additionalSamples = vertexList.parallelStream() .filter(vertex -> !sampledSources.contains(vertex)) .limit(sampleSize - sampledSources.size()) .collect(Collectors.toSet()); sampledSources.addAll(additionalSamples); } return sampledSources; } /** * Performs uniform random sampling using parallel streams. */ private Set uniformRandomSamplingParallel(List vertexList, int sampleSize, ThreadLocalRandom random) { // Use parallel stream to shuffle and take first sampleSize elements return vertexList.parallelStream() .unordered() // Allow parallel processing without ordering constraints .distinct() // Ensure uniqueness .limit(sampleSize) .collect(Collectors.toConcurrentMap( vertex -> vertex, vertex -> ThreadLocalRandom.current().nextDouble())) .entrySet() .parallelStream() .sorted(Map.Entry.comparingByValue()) // Sort by random values .limit(sampleSize) .map(Map.Entry::getKey) .collect(Collectors.toSet()); } /** * Computes single-source betweenness contributions using parallel processing. */ private ConcurrentHashMap computeSingleSourceBetweennessContributionsParallel( Graph graph, V source) { Set vertices = graph.vertexSet(); ConcurrentHashMap contributions = new ConcurrentHashMap<>(); ConcurrentHashMap> predecessors = new ConcurrentHashMap<>(); ConcurrentHashMap sigma = new ConcurrentHashMap<>(); ConcurrentHashMap distance = new ConcurrentHashMap<>(); ConcurrentHashMap delta = new ConcurrentHashMap<>(); // Parallel initialization vertices.parallelStream().forEach(v -> { predecessors.put(v, new CopyOnWriteArrayList<>()); sigma.put(v, new AtomicDouble(0.0)); distance.put(v, new AtomicInteger(-1)); delta.put(v, new AtomicDouble(0.0)); contributions.put(v, 0.0); }); sigma.get(source).set(1.0); distance.get(source).set(0); // BFS with level-wise parallel processing ConcurrentLinkedQueue currentLevel = new ConcurrentLinkedQueue<>(); ConcurrentLinkedQueue nextLevel = new ConcurrentLinkedQueue<>(); ConcurrentLinkedQueue visitOrder = new ConcurrentLinkedQueue<>(); currentLevel.offer(source); while (!currentLevel.isEmpty()) { nextLevel.clear(); // Process current level for (V vertex : currentLevel) { visitOrder.offer(vertex); // Examine outgoing edges for (DefaultEdge edge : graph.outgoingEdgesOf(vertex)) { V neighbor = graph.getEdgeTarget(edge); int currentDist = distance.get(vertex).get(); // Atomic check and update for first visit if (distance.get(neighbor).compareAndSet(-1, currentDist + 1)) { nextLevel.offer(neighbor); } // Check if this is a shortest path if (distance.get(neighbor).get() == currentDist + 1) { sigma.get(neighbor).addAndGet(sigma.get(vertex).get()); predecessors.get(neighbor).add(vertex); } } } // Swap levels ConcurrentLinkedQueue temp = currentLevel; currentLevel = nextLevel; nextLevel = temp; } // Accumulation phase - process in reverse order List reversedOrder = new ArrayList<>(visitOrder); Collections.reverse(reversedOrder); // Process accumulation in parallel batches to maintain dependencies reversedOrder.parallelStream().forEach(vertex -> { if (!vertex.equals(source)) { // Process predecessors in parallel predecessors.get(vertex).parallelStream().forEach(predecessor -> { double sigmaRatio = sigma.get(predecessor).get() / sigma.get(vertex).get(); double contribution = sigmaRatio * (1 + delta.get(vertex).get()); delta.get(predecessor).addAndGet(contribution); }); contributions.put(vertex, delta.get(vertex).get()); } }); return contributions; } /** * Computes exact betweenness centrality for small graphs using parallel processing. */ private ConcurrentHashMap computeExactBetweennessCentralityParallel(Graph graph) { Set vertices = graph.vertexSet(); ConcurrentHashMap betweenness = new ConcurrentHashMap<>(); // Initialize in parallel vertices.parallelStream().forEach(v -> betweenness.put(v, 0.0)); // Compute contributions from each vertex as source in parallel vertices.parallelStream().forEach(source -> { ConcurrentHashMap contributions = computeSingleSourceBetweennessContributionsParallel(graph, source); // Atomically merge contributions contributions.entrySet().parallelStream().forEach(entry -> { betweenness.merge(entry.getKey(), entry.getValue(), Double::sum); }); }); return betweenness; } /** * Adaptive parallel sampling with convergence detection. */ public ConcurrentHashMap computeBetweennessCentralityAdaptiveParallel(Graph graph) { Set vertices = graph.vertexSet(); int n = vertices.size(); ConcurrentHashMap betweenness = new ConcurrentHashMap<>(); vertices.parallelStream().forEach(v -> betweenness.put(v, 0.0)); List vertexList = new CopyOnWriteArrayList<>(vertices); AtomicInteger totalSamples = new AtomicInteger(0); int minSamples = Math.max(10, n / 100); int maxSamples = Math.min(n, n / 2); int batchSize = Math.max(1, minSamples / 4); ConcurrentHashMap previousBetweenness = new ConcurrentHashMap<>(betweenness); double convergenceThreshold = 0.01; // Parallel adaptive sampling with convergence checking IntStream.range(0, (maxSamples - minSamples) / batchSize + 1) .parallel() .takeWhile(batchIndex -> { int currentBatchStart = minSamples + batchIndex * batchSize; int currentBatchSize = Math.min(batchSize, maxSamples - currentBatchStart); if (currentBatchSize <= 0) return false; // Sample new batch in parallel Set newSamples = uniformRandomSamplingParallel(vertexList, currentBatchSize, ThreadLocalRandom.current()); // Compute contributions from new samples in parallel AtomicInteger currentTotal = new AtomicInteger(totalSamples.addAndGet(currentBatchSize)); newSamples.parallelStream().forEach(source -> { ConcurrentHashMap contributions = computeSingleSourceBetweennessContributionsParallel(graph, source); double scalingFactor = (double) n / currentTotal.get(); contributions.entrySet().parallelStream().forEach(entry -> { V vertex = entry.getKey(); double contribution = entry.getValue() * scalingFactor; betweenness.merge(vertex, contribution, Double::sum); }); }); // Check convergence in parallel boolean converged = hasConvergedParallel(betweenness, previousBetweenness, convergenceThreshold); if (converged) { System.out.println("Converged after " + currentTotal.get() + " samples (parallel)"); return false; // Stop sampling } // Update previous values for next iteration previousBetweenness.clear(); betweenness.entrySet().parallelStream() .forEach(entry -> previousBetweenness.put(entry.getKey(), entry.getValue())); return true; // Continue sampling }) .forEach(batchIndex -> { /* Processing handled in takeWhile */ }); return betweenness; } /** * Parallel convergence checking. */ private boolean hasConvergedParallel( ConcurrentHashMap current, ConcurrentHashMap previous, double threshold) { return current.entrySet().parallelStream().allMatch(entry -> { V vertex = entry.getKey(); double currentValue = entry.getValue(); double previousValue = previous.getOrDefault(vertex, 0.0); if (previousValue > 0) { double relativeChange = Math.abs(currentValue - previousValue) / previousValue; return relativeChange <= threshold; } else { return currentValue <= threshold; } }); } /** * Utility method to get thread-safe metrics about the sampling process. */ public ConcurrentHashMap getSamplingMetrics(int sampleSize, int totalVertices) { ConcurrentHashMap metrics = new ConcurrentHashMap<>(); metrics.put("sample_ratio", (double) sampleSize / totalVertices); metrics.put("expected_speedup", (double) totalVertices / sampleSize); metrics.put( "parallel_efficiency", (double) Runtime.getRuntime().availableProcessors() / Math.max(1, sampleSize / 10)); return metrics; } /** * Computes quality score for a modulator */ private double computeModulatorQuality(Graph graph, Set modulator, int targetTreewidth) { int resultingTreewidth = treewidthComputer.computeEta(graph, modulator); if (resultingTreewidth > targetTreewidth) { return Double.MAX_VALUE; // Invalid solution } // Quality = size penalty + treewidth penalty return modulator.size() + (resultingTreewidth * 0.1); } /** * Converts directed graph to undirected */ private Graph convertToUndirected(Graph directed) { Graph undirected = new DefaultUndirectedGraph<>(DefaultEdge.class); directed.vertexSet().forEach(undirected::addVertex); directed.edgeSet().forEach(edge -> { V source = directed.getEdgeSource(edge); V target = directed.getEdgeTarget(edge); if (!source.equals(target) && !undirected.containsEdge(source, target)) { undirected.addEdge(source, target); } }); return undirected; } /** * Fallback modulator computation */ private ModulatorResult computeFallbackModulator(Graph graph, int targetTreewidth, int maxSize) { Set modulator = graph.vertexSet().stream() .sorted((v1, v2) -> Integer.compare( graph.inDegreeOf(v2) + graph.outDegreeOf(v2), graph.inDegreeOf(v1) + graph.outDegreeOf(v1))) .limit(maxSize) .collect(Collectors.toSet()); return new ModulatorResult<>( modulator, treewidthComputer.computeEta(graph, modulator), computeModulatorQuality(graph, modulator, targetTreewidth)); } private Set getFutureValue(Future> future) { try { return future.get(); } catch (Exception e) { return null; } } public void shutdown() { treewidthComputer.shutdown(); fvsComputer.shutdown(); if (executorService != null && !executorService.isShutdown()) { executorService.shutdown(); } } /** * Result container for modulator computation */ public static class ModulatorResult { private final Set modulator; private final int resultingTreewidth; private final double qualityScore; public ModulatorResult(Set modulator, int resultingTreewidth, double qualityScore) { this.modulator = new HashSet<>(modulator); this.resultingTreewidth = resultingTreewidth; this.qualityScore = qualityScore; } public Set getModulator() { return new HashSet<>(modulator); } public int getResultingTreewidth() { return resultingTreewidth; } public double getQualityScore() { return qualityScore; } public int getSize() { return modulator.size(); } @Override public String toString() { return String.format( "ModulatorResult{size=%d, treewidth=%d, quality=%.2f}", modulator.size(), resultingTreewidth, qualityScore); } } } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/feedback/vertex/kernelized/ParameterComputer.java ================================================ package org.hjug.feedback.vertex.kernelized; import java.util.HashSet; import java.util.Set; import org.hjug.feedback.SuperTypeToken; import org.jgrapht.Graph; /** * Main facade for computing eta and k parameters needed for DirectedFeedbackVertexSetSolver * Generated by Perplexity.ai's Research model */ public class ParameterComputer { private final TreewidthComputer treewidthComputer; private final FeedbackVertexSetComputer fvsComputer; public ParameterComputer(SuperTypeToken edgeTypeToken) { this.treewidthComputer = new TreewidthComputer<>(); this.fvsComputer = new FeedbackVertexSetComputer<>(edgeTypeToken); } public ParameterComputer(SuperTypeToken edgeTypeToken, int parallelismLevel) { this.treewidthComputer = new TreewidthComputer<>(parallelismLevel); this.fvsComputer = new FeedbackVertexSetComputer<>(edgeTypeToken, parallelismLevel); } /** * Computes both eta and k parameters */ public Parameters computeParameters(Graph graph) { return computeParameters(graph, new HashSet<>()); } /** * Computes eta and k with a given modulator */ public Parameters computeParameters(Graph graph, Set modulator) { int eta = treewidthComputer.computeEta(graph, modulator); int k = fvsComputer.computeK(graph); return new Parameters(k, modulator.size(), eta); } /** * Computes a good modulator and then the parameters */ public Parameters computeParametersWithOptimalModulator(Graph graph, int maxModulatorSize) { Set bestModulator = findGoodModulator(graph, maxModulatorSize); return computeParameters(graph, bestModulator); } /** * Finds a good treewidth modulator using various heuristics */ private Set findGoodModulator(Graph graph, int maxSize) { if (maxSize <= 0) return new HashSet<>(); // Try different modulator finding strategies Set degreeBasedModulator = findDegreeBasedModulator(graph, maxSize); Set fvsBasedModulator = findFeedbackVertexSetBasedModulator(graph, maxSize); // Choose the one that gives better treewidth int etaDegree = treewidthComputer.computeEta(graph, degreeBasedModulator); int etaFVS = treewidthComputer.computeEta(graph, fvsBasedModulator); return etaDegree <= etaFVS ? degreeBasedModulator : fvsBasedModulator; } private Set findDegreeBasedModulator(Graph graph, int maxSize) { return graph.vertexSet().parallelStream() .sorted((v1, v2) -> Integer.compare( graph.inDegreeOf(v2) + graph.outDegreeOf(v2), graph.inDegreeOf(v1) + graph.outDegreeOf(v1))) .limit(maxSize) .collect(java.util.stream.Collectors.toSet()); } private Set findFeedbackVertexSetBasedModulator(Graph graph, int maxSize) { Set fvs = fvsComputer.greedyFeedbackVertexSet(graph); if (fvs.size() <= maxSize) { return fvs; } else { return fvs.stream().limit(maxSize).collect(java.util.stream.Collectors.toSet()); } } public void shutdown() { treewidthComputer.shutdown(); fvsComputer.shutdown(); } /** * Result container for computed parameters */ public static class Parameters { private final int k; // feedback vertex set size private final int modulatorSize; // modulator size (ℓ) private final int eta; // treewidth after modulator removal public Parameters(int k, int modulatorSize, int eta) { this.k = k; this.modulatorSize = modulatorSize; this.eta = eta; } public int getK() { return k; } public int getModulatorSize() { return modulatorSize; } public int getEta() { return eta; } @Override public String toString() { return String.format("Parameters{k=%d, ℓ=%d, η=%d}", k, modulatorSize, eta); } } } ================================================ FILE: graph-algorithms/src/main/java/org/hjug/feedback/vertex/kernelized/TreewidthComputer.java ================================================ package org.hjug.feedback.vertex.kernelized; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import org.jgrapht.Graph; import org.jgrapht.Graphs; import org.jgrapht.alg.cycle.CycleDetector; import org.jgrapht.graph.DefaultEdge; import org.jgrapht.graph.DefaultUndirectedGraph; /** * Multithreaded treewidth computer that implements multiple heuristic algorithms * for approximating treewidth of graphs after modulator removal. * Generated by Perplexity.ai's Research model from papers by Hans L. Bodlaender et al. * https://dl.acm.org/doi/10.1137/S0097539793251219 * https://dl.acm.org/doi/10.1145/2973749 * */ public class TreewidthComputer { private final ExecutorService executorService; public TreewidthComputer() { this.executorService = ForkJoinPool.commonPool(); } public TreewidthComputer(int parallelismLevel) { this.executorService = Executors.newWorkStealingPool(parallelismLevel); } /** * Computes eta (η): the treewidth of the undirected version of the graph * after removing the modulator vertices. */ public int computeEta(Graph graph, Set modulator) { // Convert to undirected graph and remove modulator Graph undirectedGraph = convertToUndirectedWithoutModulator(graph, modulator); // shortcuts if (undirectedGraph.vertexSet().isEmpty() || undirectedGraph.vertexSet().size() == 1) { return 0; } else if (!hasCycles(graph)) { // A graph without cycles will have an eta of 1 for our purposes // since a graph that does not have cycles is not of interest return 1; } // Run multiple treewidth approximation algorithms in parallel List> algorithms = Arrays.asList( () -> minDegreeEliminationTreewidth(undirectedGraph), () -> fillInHeuristicTreewidth(undirectedGraph), () -> maxCliqueTreewidth(undirectedGraph), () -> greedyTriangulationTreewidth(undirectedGraph)); try { List> results = executorService.invokeAll(algorithms, 30, TimeUnit.SECONDS); return results.parallelStream() .map(this::getFutureValue) .filter(Objects::nonNull) .filter(eta -> eta > 1) // if a graph has a cycle, eta will be more than 1 .min(Integer::compareTo) .orElse(undirectedGraph.vertexSet().size() - 1); // Worst case bound } catch (InterruptedException e) { Thread.currentThread().interrupt(); return computeFallbackTreewidth(undirectedGraph); } } /** * Checks if the graph has cycles */ private boolean hasCycles(Graph graph) { CycleDetector detector = new CycleDetector<>(graph); return detector.detectCycles(); } /** * Converts directed/undirected graph to undirected and removes modulator vertices */ private Graph convertToUndirectedWithoutModulator(Graph original, Set modulator) { Graph undirected = new DefaultUndirectedGraph<>(DefaultEdge.class); // Add vertices (except modulator) original.vertexSet().stream().filter(v -> !modulator.contains(v)).forEach(undirected::addVertex); // Add edges original.edgeSet().parallelStream().forEach(edge -> { V source = original.getEdgeSource(edge); V target = original.getEdgeTarget(edge); if (undirected.containsVertex(source) && undirected.containsVertex(target) && !source.equals(target) && !undirected.containsEdge(source, target)) { synchronized (undirected) { if (!undirected.containsEdge(source, target)) { undirected.addEdge(source, target); } } } }); return undirected; } /** * Minimum degree elimination ordering heuristic */ private int minDegreeEliminationTreewidth(Graph graph) { Set remainingVertices = new ConcurrentHashMap<>(graph.vertexSet().stream().collect(Collectors.toMap(v -> v, v -> v))).keySet(); Map> adjacencyMap = new ConcurrentHashMap<>(); // Initialize adjacency map graph.vertexSet().parallelStream().forEach(v -> { adjacencyMap.put(v, ConcurrentHashMap.newKeySet()); adjacencyMap.get(v).addAll(Graphs.neighborSetOf(graph, v)); }); int maxBagSize = 0; while (!remainingVertices.isEmpty()) { // Find vertex with minimum degree V minDegreeVertex = remainingVertices.parallelStream() .min(Comparator.comparingInt(v -> (int) adjacencyMap.get(v).stream() .filter(remainingVertices::contains) .count())) .orElse(null); if (minDegreeVertex == null) break; Set neighbors = adjacencyMap.get(minDegreeVertex).stream() .filter(remainingVertices::contains) .collect(Collectors.toSet()); maxBagSize = Math.max(maxBagSize, neighbors.size()); // Make neighbors a clique neighbors.parallelStream().forEach(u -> { neighbors.parallelStream().filter(v -> !v.equals(u)).forEach(v -> { adjacencyMap.get(u).add(v); adjacencyMap.get(v).add(u); }); }); remainingVertices.remove(minDegreeVertex); } return maxBagSize; } /** * Computes an upper bound on treewidth using the minimum fill-in heuristic with parallelization. * * The minimum fill-in heuristic repeatedly eliminates the vertex that requires * the minimum number of edges to be added to make its neighborhood a clique. * This implementation uses parallel streams and concurrent data structures for better performance. * * @return an upper bound on the treewidth of the graph */ public int fillInHeuristicTreewidth(Graph graph) { if (graph.vertexSet().isEmpty()) { return 0; } // Create a working copy of the graph using concurrent data structures ConcurrentHashMap> adjacencyMap = new ConcurrentHashMap<>(); // Initialize adjacency map in parallel graph.vertexSet().parallelStream().forEach(vertex -> { Set neighbors = ConcurrentHashMap.newKeySet(); // Add in-neighbors graph.incomingEdgesOf(vertex).parallelStream() .map(graph::getEdgeSource) .filter(neighbor -> !neighbor.equals(vertex)) .forEach(neighbors::add); // Add out-neighbors graph.outgoingEdgesOf(vertex).parallelStream() .map(graph::getEdgeTarget) .filter(neighbor -> !neighbor.equals(vertex)) .forEach(neighbors::add); adjacencyMap.put(vertex, neighbors); }); AtomicInteger maxCliqueSize = new AtomicInteger(0); ConcurrentHashMap remainingVertices = new ConcurrentHashMap<>(); // Initialize remaining vertices graph.vertexSet().parallelStream().forEach(vertex -> remainingVertices.put(vertex, true)); // Custom ForkJoinPool for better control over parallelization ForkJoinPool customThreadPool = new ForkJoinPool(Runtime.getRuntime().availableProcessors()); try { // Main elimination loop while (!remainingVertices.isEmpty()) { // Find vertex with minimum fill-in in parallel Optional> bestVertexEntry = customThreadPool .submit(() -> remainingVertices.keySet().parallelStream() .collect(Collectors.toConcurrentMap( vertex -> vertex, vertex -> calculateFillInParallel(vertex, adjacencyMap, remainingVertices))) .entrySet() .parallelStream() .min(Map.Entry.comparingByValue())) .get(); if (!bestVertexEntry.isPresent()) { // Fallback: choose any remaining vertex V fallbackVertex = remainingVertices.keys().nextElement(); eliminateVertexParallel(fallbackVertex, adjacencyMap, remainingVertices, maxCliqueSize); } else { V bestVertex = bestVertexEntry.get().getKey(); eliminateVertexParallel(bestVertex, adjacencyMap, remainingVertices, maxCliqueSize); } } } catch (InterruptedException | ExecutionException e) { Thread.currentThread().interrupt(); throw new RuntimeException("Parallel computation interrupted", e); } finally { customThreadPool.shutdown(); } return maxCliqueSize.get(); } /** * Alternative implementation using CompletableFuture for more complex parallel operations. * TODO: Explore later */ public CompletableFuture fillInHeuristicTreewidthAsync(Graph graph) { return CompletableFuture.supplyAsync(() -> { if (graph.vertexSet().isEmpty()) { return 0; } // Initialize concurrent data structures ConcurrentHashMap> adjacencyMap = new ConcurrentHashMap<>(); ConcurrentHashMap remainingVertices = new ConcurrentHashMap<>(); AtomicInteger maxCliqueSize = new AtomicInteger(0); // Parallel initialization List> initFutures = graph.vertexSet().stream() .map(vertex -> CompletableFuture.runAsync(() -> { Set neighbors = ConcurrentHashMap.newKeySet(); graph.incomingEdgesOf(vertex).parallelStream() .map(graph::getEdgeSource) .filter(neighbor -> !neighbor.equals(vertex)) .forEach(neighbors::add); graph.outgoingEdgesOf(vertex).parallelStream() .map(graph::getEdgeTarget) .filter(neighbor -> !neighbor.equals(vertex)) .forEach(neighbors::add); adjacencyMap.put(vertex, neighbors); remainingVertices.put(vertex, true); })) .collect(Collectors.toList()); // Wait for initialization to complete CompletableFuture.allOf(initFutures.toArray(new CompletableFuture[0])) .join(); // Main elimination loop while (!remainingVertices.isEmpty()) { CompletableFuture bestVertexFuture = CompletableFuture.supplyAsync(() -> remainingVertices.keySet().parallelStream() .min(Comparator.comparingInt( vertex -> calculateFillInParallel(vertex, adjacencyMap, remainingVertices))) .orElse(remainingVertices.keys().nextElement())); V bestVertex = bestVertexFuture.join(); eliminateVertexParallel(bestVertex, adjacencyMap, remainingVertices, maxCliqueSize); } return maxCliqueSize.get(); }); } /** * Eliminates a vertex and updates the graph structure in parallel. * * @param vertex the vertex to eliminate * @param adjacencyMap the current adjacency representation * @param remainingVertices vertices that haven't been eliminated yet * @param maxCliqueSize atomic reference to track maximum clique size */ private void eliminateVertexParallel( V vertex, ConcurrentHashMap> adjacencyMap, ConcurrentHashMap remainingVertices, AtomicInteger maxCliqueSize) { Set neighborhood = getNeighborhoodParallel(vertex, adjacencyMap, remainingVertices); // Update maximum clique size atomically maxCliqueSize.updateAndGet(current -> Math.max(current, neighborhood.size())); // Make the neighborhood a clique in parallel fillInNeighborhoodParallel(neighborhood, adjacencyMap); // Remove the eliminated vertex remainingVertices.remove(vertex); adjacencyMap.remove(vertex); // Remove vertex from all neighbor sets in parallel adjacencyMap.values().parallelStream().forEach(neighbors -> neighbors.remove(vertex)); } /** * Gets the neighborhood of a vertex using parallel processing. * * @param vertex the vertex whose neighborhood to find * @param adjacencyMap the current adjacency representation * @param remainingVertices vertices that haven't been eliminated yet * @return the set of neighboring vertices that are still remaining */ private Set getNeighborhoodParallel( V vertex, ConcurrentHashMap> adjacencyMap, ConcurrentHashMap remainingVertices) { Set allNeighbors = adjacencyMap.getOrDefault(vertex, ConcurrentHashMap.newKeySet()); // Filter to only remaining vertices in parallel return allNeighbors.parallelStream() .filter(remainingVertices::containsKey) .collect(Collectors.toConcurrentMap( neighbor -> neighbor, neighbor -> true, (existing, replacement) -> true, ConcurrentHashMap::new)) .keySet(); } /** * Adds edges to make the given set of vertices form a clique using parallel processing. * * @param vertices the vertices that should form a clique * @param adjacencyMap the adjacency map to modify */ private void fillInNeighborhoodParallel(Set vertices, ConcurrentHashMap> adjacencyMap) { List vertexList = new ArrayList<>(vertices); // Add all missing edges to make it a clique in parallel vertexList.parallelStream().forEach(v1 -> { int index1 = vertexList.indexOf(v1); vertexList.stream().skip(index1 + 1).parallel().forEach(v2 -> { // Add edges in both directions atomically adjacencyMap .computeIfAbsent(v1, k -> ConcurrentHashMap.newKeySet()) .add(v2); adjacencyMap .computeIfAbsent(v2, k -> ConcurrentHashMap.newKeySet()) .add(v1); }); }); } /** * Calculates the fill-in value for a vertex using parallel processing. * * @param vertex the vertex to calculate fill-in for * @param adjacencyMap the current adjacency representation * @param remainingVertices vertices that haven't been eliminated yet * @return the number of edges needed to make the neighborhood a clique */ private int calculateFillInParallel( V vertex, ConcurrentHashMap> adjacencyMap, ConcurrentHashMap remainingVertices) { Set neighborhood = getNeighborhoodParallel(vertex, adjacencyMap, remainingVertices); if (neighborhood.size() <= 1) { return 0; // Already a clique (or empty) } List neighborList = new ArrayList<>(neighborhood); // Count missing edges in parallel return neighborList.parallelStream() .mapToInt(v1 -> { int index1 = neighborList.indexOf(v1); return (int) neighborList.stream() .skip(index1 + 1) .parallel() .filter(v2 -> !hasEdgeParallel(v1, v2, adjacencyMap)) .count(); }) .sum(); } /** * Checks if an edge exists between two vertices. * * @param v1 first vertex * @param v2 second vertex * @param adjacencyMap the current adjacency representation * @return true if an edge exists in either direction */ private boolean hasEdgeParallel(V v1, V v2, ConcurrentHashMap> adjacencyMap) { Set neighborsV1 = adjacencyMap.get(v1); Set neighborsV2 = adjacencyMap.get(v2); return (neighborsV1 != null && neighborsV1.contains(v2)) || (neighborsV2 != null && neighborsV2.contains(v1)); } /** * Maximum clique based treewidth lower bound */ private int maxCliqueTreewidth(Graph graph) { if (graph.vertexSet().size() <= 50) { return findMaxCliqueBronKerbosch(graph) - 1; } else { return findMaxCliqueGreedy(graph) - 1; } } /** * Greedy triangulation heuristic */ private int greedyTriangulationTreewidth(Graph graph) { Map> adjacencyMap = new ConcurrentHashMap<>(); // Initialize adjacency map graph.vertexSet().parallelStream().forEach(v -> { adjacencyMap.put(v, ConcurrentHashMap.newKeySet()); adjacencyMap.get(v).addAll(Graphs.neighborSetOf(graph, v)); }); int maxBagSize = 0; Queue eliminationOrder = new ConcurrentLinkedQueue<>(graph.vertexSet()); while (!eliminationOrder.isEmpty()) { V vertex = eliminationOrder.poll(); if (vertex == null) break; Set neighbors = adjacencyMap.get(vertex); maxBagSize = Math.max(maxBagSize, neighbors.size()); // Triangulate neighborhood triangulateNeighborhood(neighbors, adjacencyMap); } return maxBagSize; } private void triangulateNeighborhood(Set neighbors, Map> adjacencyMap) { List neighborList = new ArrayList<>(neighbors); neighborList.parallelStream().forEach(u -> { neighborList.parallelStream() .filter(v -> !v.equals(u) && !adjacencyMap.get(u).contains(v)) .forEach(v -> { adjacencyMap.get(u).add(v); adjacencyMap.get(v).add(u); }); }); } // original implementation private int calculateFillIn(Set neighbors, Map> adjacencyMap) { AtomicInteger fillIn = new AtomicInteger(0); neighbors.parallelStream().forEach(u -> { neighbors.parallelStream() .filter(v -> !v.equals(u) && !adjacencyMap.get(u).contains(v)) .forEach(v -> fillIn.incrementAndGet()); }); return fillIn.get() / 2; // Each edge counted twice } private int findMaxCliqueBronKerbosch(Graph graph) { Set R = new HashSet<>(); Set P = new HashSet<>(graph.vertexSet()); Set X = new HashSet<>(); AtomicInteger maxCliqueSize = new AtomicInteger(0); bronKerbosch(graph, R, P, X, maxCliqueSize); return maxCliqueSize.get(); } private void bronKerbosch(Graph graph, Set R, Set P, Set X, AtomicInteger maxSize) { if (P.isEmpty() && X.isEmpty()) { maxSize.set(Math.max(maxSize.get(), R.size())); return; } for (V vertex : new HashSet<>(P)) { Set neighbors = Graphs.neighborSetOf(graph, vertex); Set newR = new HashSet<>(R); newR.add(vertex); Set newP = new HashSet<>(P); newP.retainAll(neighbors); Set newX = new HashSet<>(X); newX.retainAll(neighbors); bronKerbosch(graph, newR, newP, newX, maxSize); P.remove(vertex); X.add(vertex); } } private int findMaxCliqueGreedy(Graph graph) { return graph.vertexSet().parallelStream() .mapToInt(v -> Graphs.neighborSetOf(graph, v).size() + 1) .max() .orElse(1); } private int computeFallbackTreewidth(Graph graph) { // Simple fallback: maximum degree return graph.vertexSet().parallelStream() .mapToInt(v -> graph.degreeOf(v)) .max() .orElse(0); } private Integer getFutureValue(Future future) { try { return future.get(); } catch (Exception e) { return null; } } public void shutdown() { if (executorService != null && !executorService.isShutdown()) { executorService.shutdown(); } } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/dsm/CircularReferenceCheckerTests.java ================================================ package org.hjug.dsm; import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.Map; import org.jgrapht.Graph; import org.jgrapht.graph.AsSubgraph; import org.jgrapht.graph.DefaultDirectedGraph; import org.jgrapht.graph.DefaultWeightedEdge; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; class CircularReferenceCheckerTests { CircularReferenceChecker sutCircularReferenceChecker = new CircularReferenceChecker(); @DisplayName("Detect 3 cycles from given graph.") @Test void detectCyclesTest() { Graph classReferencesGraph = new DefaultDirectedGraph<>(DefaultWeightedEdge.class); classReferencesGraph.addVertex("A"); classReferencesGraph.addVertex("B"); classReferencesGraph.addVertex("C"); classReferencesGraph.addEdge("A", "B"); classReferencesGraph.addEdge("B", "C"); Map> cyclesForEveryVertexMap = sutCircularReferenceChecker.getCycles(classReferencesGraph); assertEquals(0, cyclesForEveryVertexMap.size(), "Not expecting any circular references at this point"); classReferencesGraph.addEdge("C", "A"); cyclesForEveryVertexMap = sutCircularReferenceChecker.getCycles(classReferencesGraph); assertEquals(1, cyclesForEveryVertexMap.size(), "Now we expect one circular reference"); assertEquals( "([A, B, C], [(A,B), (B,C), (C,A)])", cyclesForEveryVertexMap.get("A").toString(), "Expected a different circular reference"); } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/dsm/DSMTest.java ================================================ package org.hjug.dsm; import static org.junit.jupiter.api.Assertions.*; import java.util.List; import org.jgrapht.graph.DefaultWeightedEdge; import org.jgrapht.graph.SimpleDirectedWeightedGraph; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; class DSMTest { DSM dsm = new DSM(new SimpleDirectedWeightedGraph<>(DefaultWeightedEdge.class)); @BeforeEach void setUp() { dsm.addActivity("A"); dsm.addActivity("B"); dsm.addActivity("C"); dsm.addActivity("D"); dsm.addDependency("A", "B", 1); dsm.addDependency("B", "C", 2); dsm.addDependency("C", "D", 3); dsm.addDependency("B", "A", 6); // Adding a cycle dsm.addDependency("C", "A", 5); // Adding a cycle dsm.addDependency("D", "A", 4); // Adding a cycle /* D C B A D - 0 0 4 C 3 - 0 5 B 0 2 - 6 A 0 0 1 - */ dsm.addActivity("E"); dsm.addActivity("F"); dsm.addActivity("G"); dsm.addActivity("H"); dsm.addDependency("D", "C", 2); dsm.addDependency("A", "H", 7); dsm.addDependency("E", "C", 9); dsm.addDependency("E", "H", 2); dsm.addDependency("G", "E", 2); dsm.addDependency("H", "D", 9); dsm.addDependency("H", "G", 5); // dsm.printDSM(); } @Test void optimalBackwardEdgeToRemove() { // Identify which edge above the diagonal should be removed first DefaultWeightedEdge edge = dsm.getFirstLowestWeightEdgeAboveDiagonalToRemove(); assertEquals("(D : C)", edge.toString()); } @Test void optimalBackwardEdgeToRemoveWithWeightOfOne() { DSM dsm2 = new DSM<>(new SimpleDirectedWeightedGraph<>(DefaultWeightedEdge.class)); dsm2.addActivity("A"); dsm2.addActivity("B"); dsm2.addActivity("C"); dsm2.addDependency("A", "B", 1); dsm2.addDependency("B", "C", 1); dsm2.addDependency("B", "A", 1); dsm2.addDependency("C", "A", 1); // Identify which edge above the diagonal should be removed first DefaultWeightedEdge edge = dsm2.getFirstLowestWeightEdgeAboveDiagonalToRemove(); assertEquals("(C : A)", edge.toString()); } @Test void minWeightBackwardEdges() { // Identify which edge above the diagonal in the set of cycles should be removed first List edges = dsm.getMinimumWeightEdgesAboveDiagonal(); assertEquals(2, edges.size()); assertEquals("(D : C)", edges.get(0).toString()); assertEquals("(E : H)", edges.get(1).toString()); } @Test void edgesAboveDiagonal() { // Identify edges above the diagonal List edges = dsm.getEdgesAboveDiagonal(); assertEquals(5, edges.size()); assertEquals("(D : C)", edges.get(0).toString()); assertEquals("(D : A)", edges.get(1).toString()); assertEquals("(C : A)", edges.get(2).toString()); assertEquals("(B : A)", edges.get(3).toString()); assertEquals("(E : H)", edges.get(4).toString()); } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/dsm/EdgeRemovalCalculatorTest.java ================================================ package org.hjug.dsm; import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.List; import org.jgrapht.graph.DefaultWeightedEdge; import org.jgrapht.graph.SimpleDirectedWeightedGraph; import org.junit.jupiter.api.Test; public class EdgeRemovalCalculatorTest { DSM dsm; @Test void getImpactOfEdgesAboveDiagonalIfRemoved() { SimpleDirectedWeightedGraph graph = new SimpleDirectedWeightedGraph<>(DefaultWeightedEdge.class); dsm = new DSM<>(graph); dsm.addActivity("A"); dsm.addActivity("B"); dsm.addActivity("C"); dsm.addActivity("D"); // Cycle 1 dsm.addDependency("A", "B", 1); dsm.addDependency("B", "C", 2); dsm.addDependency("C", "D", 3); dsm.addDependency("B", "A", 6); // Adding a cycle dsm.addDependency("C", "A", 5); // Adding a cycle dsm.addDependency("D", "A", 4); // Adding a cycle // Cycle 2 dsm.addActivity("E"); dsm.addActivity("F"); dsm.addActivity("G"); dsm.addActivity("H"); dsm.addDependency("E", "F", 2); dsm.addDependency("F", "G", 7); dsm.addDependency("G", "H", 9); dsm.addDependency("H", "E", 9); // create cycle dsm.addDependency("A", "E", 9); dsm.addDependency("E", "A", 3); // create cycle between cycles EdgeRemovalCalculator edgeRemovalCalculator = new EdgeRemovalCalculator(graph, dsm); List infos = edgeRemovalCalculator.getImpactOfEdgesAboveDiagonalIfRemoved(50); assertEquals(5, infos.size()); assertEquals("(D : A)", infos.get(0).getEdge().toString()); assertEquals(3, infos.get(0).getNewCycleCount()); } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/dsm/OptimalBackEdgeRemoverTest.java ================================================ package org.hjug.dsm; import static org.junit.jupiter.api.Assertions.*; import java.util.ArrayList; import java.util.Set; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultWeightedEdge; import org.jgrapht.graph.SimpleDirectedWeightedGraph; import org.junit.jupiter.api.Test; class OptimalBackEdgeRemoverTest { @Test void noOptimalEdge() { Graph classReferencesGraph = new SimpleDirectedWeightedGraph<>(DefaultWeightedEdge.class); classReferencesGraph.addVertex("A"); classReferencesGraph.addVertex("B"); classReferencesGraph.addVertex("C"); classReferencesGraph.addEdge("A", "B"); classReferencesGraph.addEdge("B", "C"); OptimalBackEdgeRemover remover = new OptimalBackEdgeRemover(classReferencesGraph); Set optimalEdges = remover.findOptimalBackEdgesToRemove(); assertTrue(optimalEdges.isEmpty()); } @Test void oneBackEdge() { Graph classReferencesGraph = new SimpleDirectedWeightedGraph<>(DefaultWeightedEdge.class); classReferencesGraph.addVertex("A"); classReferencesGraph.addVertex("B"); classReferencesGraph.addVertex("C"); classReferencesGraph.addEdge("A", "B"); classReferencesGraph.addEdge("B", "C"); classReferencesGraph.addEdge("C", "A"); OptimalBackEdgeRemover remover = new OptimalBackEdgeRemover(classReferencesGraph); Set optimalEdges = remover.findOptimalBackEdgesToRemove(); // all are considered back edges since this is a cycle assertEquals(3, optimalEdges.size()); } @Test void twoBackEdges() { Graph classReferencesGraph = new SimpleDirectedWeightedGraph<>(DefaultWeightedEdge.class); classReferencesGraph.addVertex("A"); classReferencesGraph.addVertex("B"); classReferencesGraph.addVertex("C"); classReferencesGraph.addVertex("D"); classReferencesGraph.addEdge("A", "B"); classReferencesGraph.addEdge("B", "C"); classReferencesGraph.addEdge("C", "D"); classReferencesGraph.addEdge("C", "A"); // back edge classReferencesGraph.addEdge("D", "A"); // back edge OptimalBackEdgeRemover remover = new OptimalBackEdgeRemover(classReferencesGraph); Set optimalEdges = remover.findOptimalBackEdgesToRemove(); assertEquals(2, optimalEdges.size()); } @Test void multi() { Graph classReferencesGraph = new SimpleDirectedWeightedGraph<>(DefaultWeightedEdge.class); classReferencesGraph.addVertex("A"); classReferencesGraph.addVertex("B"); classReferencesGraph.addVertex("C"); classReferencesGraph.addVertex("D"); // Cycle 1 classReferencesGraph.addEdge("A", "B"); classReferencesGraph.addEdge("B", "C"); classReferencesGraph.addEdge("C", "D"); classReferencesGraph.addEdge("B", "A"); // Adding a cycle classReferencesGraph.addEdge("C", "A"); // Adding a cycle classReferencesGraph.addEdge("D", "A"); // Adding a cycle // Cycle 2 classReferencesGraph.addVertex("E"); classReferencesGraph.addVertex("F"); classReferencesGraph.addVertex("G"); classReferencesGraph.addVertex("H"); classReferencesGraph.addEdge("E", "F"); classReferencesGraph.addEdge("F", "G"); classReferencesGraph.addEdge("G", "H"); classReferencesGraph.addEdge("H", "E"); // create cycle classReferencesGraph.addEdge("A", "E"); classReferencesGraph.addEdge("E", "A"); // create cycle between cycles OptimalBackEdgeRemover remover = new OptimalBackEdgeRemover(classReferencesGraph); Set optimalEdges = remover.findOptimalBackEdgesToRemove(); assertEquals(1, optimalEdges.size()); assertEquals("(A : B)", new ArrayList<>(optimalEdges).get(0).toString()); } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/feedback/SuperTypeTokenTest.java ================================================ package org.hjug.feedback; import static org.junit.jupiter.api.Assertions.*; import java.util.List; import org.jgrapht.graph.DefaultWeightedEdge; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; class SuperTypeTokenTest { SuperTypeToken token; @BeforeEach void setUp() { token = new SuperTypeToken<>() {}; } @Test void getType() { assertEquals( "class org.jgrapht.graph.DefaultWeightedEdge", token.getType().toString()); } @Test void getGenericType() { SuperTypeToken> genericToken = new SuperTypeToken<>() {}; assertEquals("java.util.List", genericToken.getType().toString()); assertEquals(List.class, genericToken.getClassFromTypeToken()); } @Test void getClassFromType() { assertEquals(DefaultWeightedEdge.class, token.getClassFromTypeToken()); } @Test void typeWithGenericParameter() { assertEquals(DefaultWeightedEdge.class, new GenericTestClass<>(token).getTypeTokenClass()); } } class GenericTestClass { SuperTypeToken typeToken; public GenericTestClass(SuperTypeToken token) { this.typeToken = token; } public Class getTypeTokenClass() { return typeToken.getClassFromTypeToken(); } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/feedback/arc/approximate/FeedbackArcSetBenchmarkTest.java ================================================ package org.hjug.feedback.arc.approximate; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ThreadLocalRandom; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultDirectedGraph; import org.jgrapht.graph.DefaultEdge; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; /** * Benchmark tests for performance evaluation */ class FeedbackArcSetBenchmarkTest { @Test @DisplayName("Benchmark: Dense graphs with varying sizes") void benchmarkDenseGraphs() { int[] sizes = {10, 25, 50, 100}; System.out.println("=== Dense Graph Benchmark ==="); System.out.printf("%-10s %-15s %-15s %-15s %-15s%n", "Size", "Vertices", "Edges", "FAS Size", "Time (ms)"); for (int size : sizes) { Graph graph = createDenseGraph(size); long startTime = System.currentTimeMillis(); FeedbackArcSetSolver solver = new FeedbackArcSetSolver<>(graph); FeedbackArcSetResult result = solver.solve(); long endTime = System.currentTimeMillis(); System.out.printf( "%-10d %-15d %-15d %-15d %-15d%n", size, graph.vertexSet().size(), graph.edgeSet().size(), result.getFeedbackArcCount(), endTime - startTime); } } @Test @DisplayName("Benchmark: Sparse graphs with varying sizes") void benchmarkSparseGraphs() { int[] sizes = {50, 100, 200, 500, 1000, 1500}; System.out.println("=== Sparse Graph Benchmark ==="); System.out.printf("%-10s %-15s %-15s %-15s %-15s%n", "Size", "Vertices", "Edges", "FAS Size", "Time (ms)"); for (int size : sizes) { Graph graph = createSparseGraph(size); long startTime = System.currentTimeMillis(); FeedbackArcSetSolver solver = new FeedbackArcSetSolver<>(graph); FeedbackArcSetResult result = solver.solve(); long endTime = System.currentTimeMillis(); System.out.printf( "%-10d %-15d %-15d %-15d %-15d%n", size, graph.vertexSet().size(), graph.edgeSet().size(), result.getFeedbackArcCount(), endTime - startTime); } } private Graph createDenseGraph(int size) { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); // Add vertices for (int i = 0; i < size; i++) { graph.addVertex("V" + i); } List vertices = new ArrayList<>(graph.vertexSet()); ThreadLocalRandom random = ThreadLocalRandom.current(); // Add edges with high probability for (int i = 0; i < size; i++) { for (int j = 0; j < size; j++) { if (i != j && random.nextDouble() < 0.6) { graph.addEdge(vertices.get(i), vertices.get(j)); } } } return graph; } private Graph createSparseGraph(int size) { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); // Add vertices for (int i = 0; i < size; i++) { graph.addVertex("V" + i); } List vertices = new ArrayList<>(graph.vertexSet()); ThreadLocalRandom random = ThreadLocalRandom.current(); // Add approximately 2*size edges (sparse) int targetEdges = size * 2; int addedEdges = 0; while (addedEdges < targetEdges) { String source = vertices.get(random.nextInt(vertices.size())); String target = vertices.get(random.nextInt(vertices.size())); if (!source.equals(target) && !graph.containsEdge(source, target)) { graph.addEdge(source, target); addedEdges++; } } return graph; } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/feedback/arc/approximate/FeedbackArcSetExample.java ================================================ package org.hjug.feedback.arc.approximate; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultDirectedGraph; import org.jgrapht.graph.DefaultEdge; public class FeedbackArcSetExample { public static void main(String[] args) { // Create a directed graph with cycles Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); // Add vertices graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); graph.addVertex("D"); // Add edges creating cycles graph.addEdge("A", "B"); graph.addEdge("B", "C"); graph.addEdge("C", "A"); // Creates cycle A->B->C->A graph.addEdge("C", "D"); graph.addEdge("D", "A"); // Creates cycle A->B->C->D->A // Solve the FAS problem FeedbackArcSetSolver solver = new FeedbackArcSetSolver<>(graph); FeedbackArcSetResult result = solver.solve(); System.out.println("Vertex sequence: " + result.getVertexSequence()); System.out.println("Feedback arc count: " + result.getFeedbackArcCount()); System.out.println("Feedback arcs: " + result.getFeedbackArcs()); } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/feedback/arc/approximate/FeedbackArcSetSolverTest.java ================================================ package org.hjug.feedback.arc.approximate; import static org.junit.jupiter.api.Assertions.*; import java.util.*; import java.util.concurrent.ThreadLocalRandom; import org.jgrapht.Graph; import org.jgrapht.alg.cycle.CycleDetector; import org.jgrapht.graph.DefaultDirectedGraph; import org.jgrapht.graph.DefaultEdge; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; /** * Comprehensive unit tests for the FeedbackArcSetSolver */ class FeedbackArcSetSolverTest { private Graph graph; private FeedbackArcSetSolver solver; @BeforeEach void setUp() { graph = new DefaultDirectedGraph<>(DefaultEdge.class); } @Nested @DisplayName("Basic Algorithm Tests") class BasicAlgorithmTests { @Test @DisplayName("Should handle empty graph") void testEmptyGraph() { solver = new FeedbackArcSetSolver<>(graph); FeedbackArcSetResult result = solver.solve(); assertTrue(result.getVertexSequence().isEmpty()); assertTrue(result.getFeedbackArcs().isEmpty()); assertEquals(0, result.getFeedbackArcCount()); } @Test @DisplayName("Should handle single vertex") void testSingleVertex() { graph.addVertex("A"); solver = new FeedbackArcSetSolver<>(graph); FeedbackArcSetResult result = solver.solve(); assertEquals(1, result.getVertexSequence().size()); assertTrue(result.getVertexSequence().contains("A")); assertEquals(0, result.getFeedbackArcCount()); } @Test @DisplayName("Should handle acyclic graph") void testAcyclicGraph() { // Create a simple DAG: A -> B -> C graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); graph.addEdge("A", "B"); graph.addEdge("B", "C"); solver = new FeedbackArcSetSolver<>(graph); FeedbackArcSetResult result = solver.solve(); assertEquals(0, result.getFeedbackArcCount()); assertEquals(3, result.getVertexSequence().size()); } @Test @DisplayName("Should handle simple cycle") void testSimpleCycle() { // Create a simple cycle: A -> B -> C -> A graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); graph.addEdge("A", "B"); graph.addEdge("B", "C"); graph.addEdge("C", "A"); solver = new FeedbackArcSetSolver<>(graph); FeedbackArcSetResult result = solver.solve(); // Should break the cycle with exactly one feedback arc assertEquals(1, result.getFeedbackArcCount()); assertGraphIsAcyclicAfterRemoval(result); } } @Nested @DisplayName("Complex Graph Tests") class ComplexGraphTests { @Test @DisplayName("Should handle multiple cycles") void testMultipleCycles() { // Create graph with multiple overlapping cycles String[] vertices = {"A", "B", "C", "D", "E"}; for (String v : vertices) { graph.addVertex(v); } // Create cycles: A->B->C->A and C->D->E->C graph.addEdge("A", "B"); graph.addEdge("B", "C"); graph.addEdge("C", "A"); graph.addEdge("C", "D"); graph.addEdge("D", "E"); graph.addEdge("E", "C"); solver = new FeedbackArcSetSolver<>(graph); FeedbackArcSetResult result = solver.solve(); assertTrue(result.getFeedbackArcCount() >= 2); assertGraphIsAcyclicAfterRemoval(result); } @Test @DisplayName("Should handle tournament graph") void testTournamentGraph() { // Create a tournament (complete directed graph) String[] vertices = {"A", "B", "C", "D"}; for (String v : vertices) { graph.addVertex(v); } // Add edges to create a tournament graph.addEdge("A", "B"); graph.addEdge("A", "C"); graph.addEdge("A", "D"); graph.addEdge("B", "C"); graph.addEdge("B", "D"); graph.addEdge("C", "D"); graph.addEdge("D", "A"); // Creates cycles graph.addEdge("C", "B"); // Creates cycles solver = new FeedbackArcSetSolver<>(graph); FeedbackArcSetResult result = solver.solve(); assertGraphIsAcyclicAfterRemoval(result); // For tournaments, the bound should be ≤ m/2 + n/4 int m = graph.edgeSet().size(); int n = graph.vertexSet().size(); assertTrue(result.getFeedbackArcCount() <= m / 2 + n / 4); } } @Nested @DisplayName("Performance Tests") class PerformanceTests { @ParameterizedTest @ValueSource(ints = {10, 50, 100}) @DisplayName("Should handle large random graphs efficiently") void testLargeRandomGraphs(int size) { createRandomGraph(size, size * 2); long startTime = System.currentTimeMillis(); solver = new FeedbackArcSetSolver<>(graph); FeedbackArcSetResult result = solver.solve(); long endTime = System.currentTimeMillis(); assertGraphIsAcyclicAfterRemoval(result); // Performance should be reasonable (less than 5 seconds for size 100) assertTrue(endTime - startTime < 5000, "Algorithm took too long: " + (endTime - startTime) + "ms"); } @Test @DisplayName("Should verify parallel processing improves performance") void testParallelPerformanceImprovement() { createRandomGraph(50, 100); // Test with current parallel implementation long startTimeParallel = System.currentTimeMillis(); solver = new FeedbackArcSetSolver<>(graph); FeedbackArcSetResult parallelResult = solver.solve(); long endTimeParallel = System.currentTimeMillis(); assertGraphIsAcyclicAfterRemoval(parallelResult); // Verify result quality meets the theoretical bound int m = graph.edgeSet().size(); int n = graph.vertexSet().size(); assertTrue(parallelResult.getFeedbackArcCount() <= m / 2 + n / 4); } } @Nested @DisplayName("Edge Cases") class EdgeCaseTests { @Test @DisplayName("Should handle self-loops") void testSelfLoops() { graph.addVertex("A"); graph.addVertex("B"); // JGraphT DefaultDirectedGraph doesn't allow self-loops by default // But we can test the behavior graph.addEdge("A", "B"); graph.addEdge("B", "A"); solver = new FeedbackArcSetSolver<>(graph); FeedbackArcSetResult result = solver.solve(); assertEquals(1, result.getFeedbackArcCount()); assertGraphIsAcyclicAfterRemoval(result); } @Test @DisplayName("Should handle disconnected components") void testDisconnectedComponents() { // Component 1: A -> B -> A graph.addVertex("A"); graph.addVertex("B"); graph.addEdge("A", "B"); graph.addEdge("B", "A"); // Component 2: C -> D (acyclic) graph.addVertex("C"); graph.addVertex("D"); graph.addEdge("C", "D"); // Component 3: E (isolated) graph.addVertex("E"); solver = new FeedbackArcSetSolver<>(graph); FeedbackArcSetResult result = solver.solve(); assertEquals(1, result.getFeedbackArcCount()); assertGraphIsAcyclicAfterRemoval(result); assertEquals(5, result.getVertexSequence().size()); } } @Nested @DisplayName("Correctness Verification") class CorrectnessTests { @Test @DisplayName("Should produce valid vertex ordering") void testVertexOrderingValidity() { createRandomGraph(20, 40); solver = new FeedbackArcSetSolver<>(graph); FeedbackArcSetResult result = solver.solve(); // Verify all vertices are included in the sequence assertEquals(graph.vertexSet().size(), result.getVertexSequence().size()); assertTrue(result.getVertexSequence().containsAll(graph.vertexSet())); // Verify no duplicates Set uniqueVertices = new HashSet<>(result.getVertexSequence()); assertEquals(graph.vertexSet().size(), uniqueVertices.size()); } @Test @DisplayName("Should satisfy performance bound") void testPerformanceBound() { createRandomGraph(30, 60); solver = new FeedbackArcSetSolver<>(graph); FeedbackArcSetResult result = solver.solve(); int m = graph.edgeSet().size(); int n = graph.vertexSet().size(); int bound = m / 2 + n / 4; assertTrue( result.getFeedbackArcCount() <= bound, String.format("FAS size %d exceeds bound %d", result.getFeedbackArcCount(), bound)); } } // Helper methods private void createRandomGraph(int vertexCount, int edgeCount) { ThreadLocalRandom random = ThreadLocalRandom.current(); // Add vertices for (int i = 0; i < vertexCount; i++) { graph.addVertex("V" + i); } List vertices = new ArrayList<>(graph.vertexSet()); // Add random edges int addedEdges = 0; while (addedEdges < edgeCount) { String source = vertices.get(random.nextInt(vertices.size())); String target = vertices.get(random.nextInt(vertices.size())); if (!source.equals(target) && !graph.containsEdge(source, target)) { graph.addEdge(source, target); addedEdges++; } } } private void assertGraphIsAcyclicAfterRemoval(FeedbackArcSetResult result) { // Create a copy of the graph without feedback arcs Graph testGraph = new DefaultDirectedGraph<>(DefaultEdge.class); // Add all vertices for (String vertex : graph.vertexSet()) { testGraph.addVertex(vertex); } // Add all edges except feedback arcs for (DefaultEdge edge : graph.edgeSet()) { if (!result.getFeedbackArcs().contains(edge)) { String source = graph.getEdgeSource(edge); String target = graph.getEdgeTarget(edge); testGraph.addEdge(source, target); } } // Verify the resulting graph is acyclic CycleDetector cycleDetector = new CycleDetector<>(testGraph); assertFalse(cycleDetector.detectCycles(), "Graph should be acyclic after removing feedback arcs"); } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/feedback/arc/exact/MinimumFeedbackArcSetBenchmarkTest.java ================================================ package org.hjug.feedback.arc.exact; import java.util.*; import java.util.concurrent.ThreadLocalRandom; import java.util.stream.IntStream; import org.hjug.feedback.SuperTypeToken; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultDirectedGraph; import org.jgrapht.graph.DefaultEdge; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; /** * Performance benchmark tests for the algorithm [2] */ class MinimumFeedbackArcSetBenchmarkTest { @Test @DisplayName("Benchmark: Various graph sizes and densities") void benchmarkGraphSizes() { int[] sizes = {20, 50, 100}; double[] densities = {0.1, 0.3, 0.5}; System.out.println("=== Minimum Feedback Arc Set Benchmark ==="); System.out.printf( "%-10s %-15s %-15s %-15s %-15s %-15s%n", "Size", "Density", "Vertices", "Edges", "FAS Size", "Time (ms)"); for (int size : sizes) { for (double density : densities) { Graph graph = createRandomGraph(size, density); long startTime = System.currentTimeMillis(); MinimumFeedbackArcSetSolver solver = new MinimumFeedbackArcSetSolver<>(graph, null, new SuperTypeToken<>() {}); FeedbackArcSetResult result = solver.solve(); long endTime = System.currentTimeMillis(); System.out.printf( "%-10d %-15.1f %-15d %-15d %-15d %-15d%n", size, density, graph.vertexSet().size(), graph.edgeSet().size(), result.size(), endTime - startTime); } } } private Graph createRandomGraph(int size, double density) { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); // Add vertices using parallel streams [18] IntStream.range(0, size).forEach(i -> graph.addVertex("V" + i)); List vertices = new ArrayList<>(graph.vertexSet()); ThreadLocalRandom random = ThreadLocalRandom.current(); int maxEdges = size * (size - 1); int targetEdges = (int) (maxEdges * density); int addedEdges = 0; while (addedEdges < targetEdges) { String source = vertices.get(random.nextInt(vertices.size())); String target = vertices.get(random.nextInt(vertices.size())); if (!source.equals(target) && !graph.containsEdge(source, target)) { graph.addEdge(source, target); addedEdges++; } } return graph; } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/feedback/arc/exact/MinimumFeedbackArcSetExample.java ================================================ package org.hjug.feedback.arc.exact; import java.util.Map; import org.hjug.feedback.SuperTypeToken; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultDirectedGraph; import org.jgrapht.graph.DefaultEdge; public class MinimumFeedbackArcSetExample { public static void main(String[] args) { // Create a directed graph with cycles Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); // Add vertices graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); graph.addVertex("D"); // Add edges creating cycles DefaultEdge e1 = graph.addEdge("A", "B"); DefaultEdge e2 = graph.addEdge("B", "C"); DefaultEdge e3 = graph.addEdge("C", "A"); // Creates cycle A->B->C->A DefaultEdge e4 = graph.addEdge("C", "D"); DefaultEdge e5 = graph.addEdge("D", "A"); // Creates cycle A->B->C->D->A // Define edge weights (optional) Map weights = Map.of(e1, 1.0, e2, 2.0, e3, 1.5, e4, 1.0, e5, 1.0); // Solve the minimum feedback arc set problem MinimumFeedbackArcSetSolver solver = new MinimumFeedbackArcSetSolver<>(graph, weights, new SuperTypeToken<>() {}); FeedbackArcSetResult result = solver.solve(); System.out.println("Minimum feedback arc set: " + result.getFeedbackArcSet()); System.out.println("Objective value: " + result.getObjectiveValue()); System.out.println("Solution size: " + result.size()); } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/feedback/arc/exact/MinimumFeedbackArcSetSolverTest.java ================================================ package org.hjug.feedback.arc.exact; import static org.junit.jupiter.api.Assertions.*; import java.util.*; import java.util.concurrent.ThreadLocalRandom; import java.util.stream.IntStream; import org.hjug.feedback.SuperTypeToken; import org.jgrapht.Graph; import org.jgrapht.alg.cycle.CycleDetector; import org.jgrapht.graph.DefaultDirectedGraph; import org.jgrapht.graph.DefaultEdge; import org.junit.jupiter.api.*; import org.junit.jupiter.api.parallel.Execution; import org.junit.jupiter.api.parallel.ExecutionMode; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; /** * Comprehensive unit tests for the MinimumFeedbackArcSetSolver [15] */ @Execution(ExecutionMode.CONCURRENT) class MinimumFeedbackArcSetSolverTest { private Graph graph; private MinimumFeedbackArcSetSolver solver; @BeforeEach void setUp() { graph = new DefaultDirectedGraph<>(DefaultEdge.class); } @Nested @DisplayName("Basic Algorithm Tests") class BasicAlgorithmTests { @Test @DisplayName("Should handle empty graph") void testEmptyGraph() { solver = new MinimumFeedbackArcSetSolver<>(graph, null, new SuperTypeToken<>() {}); FeedbackArcSetResult result = solver.solve(); assertTrue(result.getFeedbackArcSet().isEmpty()); assertEquals(0.0, result.getObjectiveValue()); } @Test @DisplayName("Should handle single vertex") void testSingleVertex() { graph.addVertex("A"); solver = new MinimumFeedbackArcSetSolver<>(graph, null, new SuperTypeToken<>() {}); FeedbackArcSetResult result = solver.solve(); assertEquals(0, result.size()); } @Test @DisplayName("Should handle acyclic graph") void testAcyclicGraph() { // Create a simple DAG: A -> B -> C [15] graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); graph.addEdge("A", "B"); graph.addEdge("B", "C"); solver = new MinimumFeedbackArcSetSolver<>(graph, null, new SuperTypeToken<>() {}); FeedbackArcSetResult result = solver.solve(); assertEquals(0, result.size()); } @Test @DisplayName("Should handle simple cycle") void testSimpleCycle() { // Create a simple cycle: A -> B -> C -> A [2] graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); graph.addEdge("A", "B"); graph.addEdge("B", "C"); graph.addEdge("C", "A"); solver = new MinimumFeedbackArcSetSolver<>(graph, null, new SuperTypeToken<>() {}); FeedbackArcSetResult result = solver.solve(); // Should break the cycle with exactly one arc assertEquals(1, result.size()); assertGraphIsAcyclicAfterRemoval(result); } @Test @DisplayName("Should handle self-loop") @Disabled("Does not pass, but I (JRB) am not concerned about this case") void testSelfLoop() { graph.addVertex("A"); DefaultEdge selfLoop = graph.addEdge("A", "A"); solver = new MinimumFeedbackArcSetSolver<>(graph, null, new SuperTypeToken<>() {}); FeedbackArcSetResult result = solver.solve(); assertEquals(1, result.size()); assertTrue(result.getFeedbackArcSet().contains(selfLoop)); } } @Nested @DisplayName("Complex Graph Tests") class ComplexGraphTests { @Test @DisplayName("Should handle multiple cycles") void testMultipleCycles() { // Create graph with multiple overlapping cycles [2] String[] vertices = {"A", "B", "C", "D", "E"}; for (String v : vertices) { graph.addVertex(v); } // Create cycles: A->B->C->A and C->D->E->C graph.addEdge("A", "B"); graph.addEdge("B", "C"); graph.addEdge("C", "A"); graph.addEdge("C", "D"); graph.addEdge("D", "E"); graph.addEdge("E", "C"); solver = new MinimumFeedbackArcSetSolver<>(graph, null, new SuperTypeToken<>() {}); FeedbackArcSetResult result = solver.solve(); assertTrue(result.size() >= 2); assertGraphIsAcyclicAfterRemoval(result); } @Test @DisplayName("Should handle disconnected components") void testDisconnectedComponents() { // Component 1: A -> B -> A graph.addVertex("A"); graph.addVertex("B"); graph.addEdge("A", "B"); graph.addEdge("B", "A"); // Component 2: C -> D (acyclic) graph.addVertex("C"); graph.addVertex("D"); graph.addEdge("C", "D"); // Component 3: E (isolated) graph.addVertex("E"); solver = new MinimumFeedbackArcSetSolver<>(graph, null, new SuperTypeToken<>() {}); FeedbackArcSetResult result = solver.solve(); assertEquals(1, result.size()); assertGraphIsAcyclicAfterRemoval(result); } @Test @DisplayName("Should handle weighted edges") void testWeightedEdges() { // Create a cycle with different edge weights graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); DefaultEdge e1 = graph.addEdge("A", "B"); DefaultEdge e2 = graph.addEdge("B", "C"); DefaultEdge e3 = graph.addEdge("C", "A"); Map weights = Map.of(e1, 1.0, e2, 10.0, e3, 1.0); solver = new MinimumFeedbackArcSetSolver<>(graph, weights, new SuperTypeToken<>() {}); FeedbackArcSetResult result = solver.solve(); assertEquals(1, result.size()); // Should prefer removing lower weight edges assertFalse(result.getFeedbackArcSet().contains(e2)); } } @Nested @DisplayName("Performance Tests") class PerformanceTests { @ParameterizedTest @ValueSource(ints = {10, 25, 50}) @DisplayName("Should handle random graphs efficiently") void testRandomGraphPerformance(int size) { createRandomGraph(size, size * 2); long startTime = System.currentTimeMillis(); solver = new MinimumFeedbackArcSetSolver<>(graph, null, new SuperTypeToken<>() {}); FeedbackArcSetResult result = solver.solve(); long endTime = System.currentTimeMillis(); // Performance should be reasonable [2] assertTrue(endTime - startTime < 10000, "Algorithm took too long: " + (endTime - startTime) + "ms"); if (hasCycles()) { assertGraphIsAcyclicAfterRemoval(result); } } @Test @DisplayName("Should utilize parallel processing effectively") void testParallelProcessing() { createRandomGraph(30, 60); long startTime = System.currentTimeMillis(); solver = new MinimumFeedbackArcSetSolver<>(graph, null, new SuperTypeToken<>() {}); FeedbackArcSetResult result = solver.solve(); long endTime = System.currentTimeMillis(); assertTrue(endTime - startTime < 15000); if (hasCycles()) { assertGraphIsAcyclicAfterRemoval(result); } } } @Nested @DisplayName("Correctness Tests") class CorrectnessTests { @Test @DisplayName("Should maintain optimality properties") void testOptimalityProperties() { createRandomGraph(15, 30); solver = new MinimumFeedbackArcSetSolver<>(graph, null, new SuperTypeToken<>() {}); FeedbackArcSetResult result = solver.solve(); // Solution should be minimal and make graph acyclic [2] if (hasCycles()) { assertGraphIsAcyclicAfterRemoval(result); assertTrue(result.size() > 0); } } @Test @DisplayName("Should handle edge cases correctly") void testEdgeCases() { // Triangle with all edges having same weight graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); graph.addEdge("A", "B"); graph.addEdge("B", "C"); graph.addEdge("C", "A"); solver = new MinimumFeedbackArcSetSolver<>(graph, null, new SuperTypeToken<>() {}); FeedbackArcSetResult result = solver.solve(); assertEquals(1, result.size()); assertGraphIsAcyclicAfterRemoval(result); } } // Helper methods private void createRandomGraph(int vertexCount, int edgeCount) { ThreadLocalRandom random = ThreadLocalRandom.current(); // Add vertices [18] IntStream.range(0, vertexCount).forEach(i -> graph.addVertex("V" + i)); List vertices = new ArrayList<>(graph.vertexSet()); // Add random edges int addedEdges = 0; while (addedEdges < edgeCount && addedEdges < vertexCount * (vertexCount - 1)) { String source = vertices.get(random.nextInt(vertices.size())); String target = vertices.get(random.nextInt(vertices.size())); if (!source.equals(target) && !graph.containsEdge(source, target)) { graph.addEdge(source, target); addedEdges++; } } } private boolean hasCycles() { CycleDetector cycleDetector = new CycleDetector<>(graph); return cycleDetector.detectCycles(); } private void assertGraphIsAcyclicAfterRemoval(FeedbackArcSetResult result) { // Create a copy of the graph without feedback arcs [12] Graph testGraph = new DefaultDirectedGraph<>(DefaultEdge.class); Set resultEdgesAsStrings = new HashSet<>(); result.getFeedbackArcSet().forEach(edge -> resultEdgesAsStrings.add(edge.toString())); // Add all vertices graph.vertexSet().forEach(testGraph::addVertex); // Add edges not in feedback arc set graph.edgeSet().stream() .filter(edge -> !resultEdgesAsStrings.contains(edge.toString())) .forEach(edge -> { String source = graph.getEdgeSource(edge); String target = graph.getEdgeTarget(edge); testGraph.addEdge(source, target); }); // Verify the resulting graph is acyclic [12][16] CycleDetector cycleDetector = new CycleDetector<>(testGraph); assertFalse(cycleDetector.detectCycles(), "Graph should be acyclic after removing feedback arcs"); } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/feedback/arc/pageRank/PageRankFASExample.java ================================================ package org.hjug.feedback.arc.pageRank; import java.util.Set; import org.hjug.feedback.SuperTypeToken; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultDirectedGraph; import org.jgrapht.graph.DefaultEdge; /** * Example usage of the PageRankFAS algorithm * Demonstrates how to use the algorithm with different types of graphs */ public class PageRankFASExample { public static void main(String[] args) { System.out.println("PageRankFAS Algorithm Examples"); System.out.println("==============================="); // Example 1: Simple cycle System.out.println("\n1. Simple Cycle Example:"); demonstrateSimpleCycle(); // Example 2: Multiple cycles System.out.println("\n2. Multiple Cycles Example:"); demonstrateMultipleCycles(); // Example 3: Complex graph with nested cycles System.out.println("\n3. Complex Graph Example:"); demonstrateComplexGraph(); // Example 4: Performance comparison System.out.println("\n4. Performance Comparison:"); demonstratePerformanceComparison(); // Example 5: Custom PageRank iterations System.out.println("\n5. Custom PageRank Iterations:"); demonstrateCustomIterations(); } /** * Demonstrate PageRankFAS on a simple 3-node cycle */ private static void demonstrateSimpleCycle() { // Create a simple cycle: A -> B -> C -> A Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); DefaultEdge e1 = graph.addEdge("A", "B"); DefaultEdge e2 = graph.addEdge("B", "C"); DefaultEdge e3 = graph.addEdge("C", "A"); System.out.println("Original graph: A -> B -> C -> A"); System.out.println("Edges: " + graph.edgeSet().size()); System.out.println("Vertices: " + graph.vertexSet().size()); // Apply PageRankFAS PageRankFAS pageRankFAS = new PageRankFAS<>(graph, new SuperTypeToken<>() {}); Set feedbackArcSet = pageRankFAS.computeFeedbackArcSet(); System.out.println("Feedback Arc Set size: " + feedbackArcSet.size()); System.out.println("FAS edges: " + feedbackArcSet); // Verify the result verifyAcyclicity(graph, feedbackArcSet); } /** * Demonstrate PageRankFAS on a graph with multiple cycles */ private static void demonstrateMultipleCycles() { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); // First cycle: A -> B -> C -> A graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); graph.addEdge("A", "B"); graph.addEdge("B", "C"); graph.addEdge("C", "A"); // Second cycle: D -> E -> F -> D graph.addVertex("D"); graph.addVertex("E"); graph.addVertex("F"); graph.addEdge("D", "E"); graph.addEdge("E", "F"); graph.addEdge("F", "D"); // Connect the cycles graph.addEdge("C", "D"); // Add a larger cycle: A -> B -> E -> F -> A graph.addEdge("B", "E"); graph.addEdge("F", "A"); System.out.println("Graph with multiple interconnected cycles"); System.out.println("Edges: " + graph.edgeSet().size()); System.out.println("Vertices: " + graph.vertexSet().size()); // Apply PageRankFAS PageRankFAS pageRankFAS = new PageRankFAS<>(graph, new SuperTypeToken<>() {}); long startTime = System.currentTimeMillis(); Set feedbackArcSet = pageRankFAS.computeFeedbackArcSet(); long endTime = System.currentTimeMillis(); System.out.println("Feedback Arc Set size: " + feedbackArcSet.size()); System.out.println("Computation time: " + (endTime - startTime) + "ms"); verifyAcyclicity(graph, feedbackArcSet); } /** * Demonstrate PageRankFAS on a complex graph */ private static void demonstrateComplexGraph() { Graph graph = createComplexTestGraph(); System.out.println("Complex graph with nested and overlapping cycles"); System.out.println("Edges: " + graph.edgeSet().size()); System.out.println("Vertices: " + graph.vertexSet().size()); // Apply PageRankFAS with timing PageRankFAS pageRankFAS = new PageRankFAS<>(graph, new SuperTypeToken<>() {}); long startTime = System.currentTimeMillis(); Set feedbackArcSet = pageRankFAS.computeFeedbackArcSet(); long endTime = System.currentTimeMillis(); System.out.println("Feedback Arc Set size: " + feedbackArcSet.size()); System.out.println("Computation time: " + (endTime - startTime) + "ms"); System.out.println("FAS ratio: " + String.format( "%.2f%%", 100.0 * feedbackArcSet.size() / graph.edgeSet().size())); verifyAcyclicity(graph, feedbackArcSet); } /** * Compare performance with different graph sizes */ private static void demonstratePerformanceComparison() { int[] graphSizes = {50, 100, 200}; System.out.println("Performance comparison on different graph sizes:"); System.out.println("Size\tEdges\tFAS Size\tTime (ms)\tFAS Ratio"); System.out.println("----\t-----\t--------\t---------\t---------"); for (int size : graphSizes) { Graph graph = createRandomGraph(size, size * 2); PageRankFAS pageRankFAS = new PageRankFAS<>(graph, new SuperTypeToken<>() {}); long startTime = System.currentTimeMillis(); Set feedbackArcSet = pageRankFAS.computeFeedbackArcSet(); long endTime = System.currentTimeMillis(); double fasRatio = 100.0 * feedbackArcSet.size() / graph.edgeSet().size(); System.out.printf( "%d\t%d\t%d\t\t%d\t\t%.2f%%\n", size, graph.edgeSet().size(), feedbackArcSet.size(), (endTime - startTime), fasRatio); } } /** * Demonstrate the effect of different PageRank iteration counts */ private static void demonstrateCustomIterations() { Graph graph = createComplexTestGraph(); int[] iterations = {1, 3, 5, 10, 20}; System.out.println("Effect of PageRank iterations on FAS quality:"); System.out.println("Iterations\tFAS Size\tTime (ms)"); System.out.println("----------\t--------\t---------"); for (int iter : iterations) { Graph testGraph = copyGraph(graph); PageRankFAS pageRankFAS = new PageRankFAS<>(testGraph, iter, new SuperTypeToken<>() {}); long startTime = System.currentTimeMillis(); Set feedbackArcSet = pageRankFAS.computeFeedbackArcSet(); long endTime = System.currentTimeMillis(); System.out.printf("%d\t\t%d\t\t%d\n", iter, feedbackArcSet.size(), (endTime - startTime)); } } /** * Create a complex test graph with various cycle structures */ private static Graph createComplexTestGraph() { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); // Create vertices for (int i = 0; i < 15; i++) { graph.addVertex("V" + i); } // Create various cycle patterns // Triangle cycles graph.addEdge("V0", "V1"); graph.addEdge("V1", "V2"); graph.addEdge("V2", "V0"); graph.addEdge("V3", "V4"); graph.addEdge("V4", "V5"); graph.addEdge("V5", "V3"); // Square cycle graph.addEdge("V6", "V7"); graph.addEdge("V7", "V8"); graph.addEdge("V8", "V9"); graph.addEdge("V9", "V6"); // Overlapping cycles graph.addEdge("V2", "V6"); // Connect triangle to square graph.addEdge("V8", "V0"); // Create larger cycle // Additional complexity graph.addEdge("V10", "V11"); graph.addEdge("V11", "V12"); graph.addEdge("V12", "V13"); graph.addEdge("V13", "V14"); graph.addEdge("V14", "V10"); // Pentagon cycle // Connect to main component graph.addEdge("V5", "V10"); graph.addEdge("V12", "V3"); return graph; } /** * Create a random graph for testing */ private static Graph createRandomGraph(int numVertices, int numEdges) { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); // Add vertices for (int i = 0; i < numVertices; i++) { graph.addVertex("V" + i); } // Add random edges java.util.Random random = new java.util.Random(42); // Fixed seed for reproducibility java.util.List vertices = new java.util.ArrayList<>(graph.vertexSet()); int edgesAdded = 0; int attempts = 0; while (edgesAdded < numEdges && attempts < numEdges * 3) { String source = vertices.get(random.nextInt(vertices.size())); String target = vertices.get(random.nextInt(vertices.size())); if (!source.equals(target) && !graph.containsEdge(source, target)) { graph.addEdge(source, target); edgesAdded++; } attempts++; } return graph; } /** * Copy a graph */ private static Graph copyGraph(Graph original) { Graph copy = new DefaultDirectedGraph<>(DefaultEdge.class); // Add vertices original.vertexSet().forEach(copy::addVertex); // Add edges original.edgeSet().forEach(edge -> { String source = original.getEdgeSource(edge); String target = original.getEdgeTarget(edge); copy.addEdge(source, target); }); return copy; } /** * Verify that removing the FAS makes the graph acyclic */ private static void verifyAcyclicity(Graph originalGraph, Set feedbackArcSet) { Graph testGraph = copyGraph(originalGraph); // Remove FAS edges feedbackArcSet.forEach(testGraph::removeEdge); // Check if acyclic PageRankFAS verifier = new PageRankFAS<>(testGraph, new SuperTypeToken<>() {}); Set remainingFAS = verifier.computeFeedbackArcSet(); if (remainingFAS.isEmpty()) { System.out.println("✓ Verification successful: Graph is acyclic after FAS removal"); } else { System.out.println("✗ Verification failed: " + remainingFAS.size() + " cycles remain after FAS removal"); } } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/feedback/arc/pageRank/PageRankFASTest.java ================================================ package org.hjug.feedback.arc.pageRank; import static org.junit.jupiter.api.Assertions.*; import java.util.*; import org.hjug.feedback.SuperTypeToken; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultDirectedGraph; import org.jgrapht.graph.DefaultEdge; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; /** * Comprehensive unit tests for the PageRankFAS algorithm with custom LineDigraph */ class PageRankFASTest { private PageRankFAS pageRankFAS; @Nested @DisplayName("LineDigraph Implementation Tests") class LineDigraphTests { @Test @DisplayName("Test LineDigraph basic operations") void testLineDigraphBasicOperations() { LineDigraph lineDigraph = new LineDigraph<>(); // Test empty digraph assertTrue(lineDigraph.isEmpty()); assertEquals(0, lineDigraph.vertexCount()); assertEquals(0, lineDigraph.edgeCount()); // Create test line vertices DefaultEdge edge1 = new DefaultEdge(); DefaultEdge edge2 = new DefaultEdge(); LineVertex lv1 = new LineVertex<>("A", "B", edge1); LineVertex lv2 = new LineVertex<>("B", "C", edge2); // Test adding vertices assertTrue(lineDigraph.addVertex(lv1)); assertFalse(lineDigraph.addVertex(lv1)); // Should not add duplicate assertTrue(lineDigraph.addVertex(lv2)); assertEquals(2, lineDigraph.vertexCount()); assertTrue(lineDigraph.containsVertex(lv1)); assertTrue(lineDigraph.containsVertex(lv2)); // Test adding edges assertTrue(lineDigraph.addEdge(lv1, lv2)); assertFalse(lineDigraph.addEdge(lv1, lv2)); // Should not add duplicate assertEquals(1, lineDigraph.edgeCount()); assertTrue(lineDigraph.containsEdge(lv1, lv2)); assertFalse(lineDigraph.containsEdge(lv2, lv1)); } @Test @DisplayName("Test LineDigraph degree calculations") void testLineDigraphDegrees() { LineDigraph lineDigraph = new LineDigraph<>(); DefaultEdge e1 = new DefaultEdge(); DefaultEdge e2 = new DefaultEdge(); DefaultEdge e3 = new DefaultEdge(); LineVertex lv1 = new LineVertex<>("A", "B", e1); LineVertex lv2 = new LineVertex<>("B", "C", e2); LineVertex lv3 = new LineVertex<>("C", "A", e3); lineDigraph.addVertex(lv1); lineDigraph.addVertex(lv2); lineDigraph.addVertex(lv3); lineDigraph.addEdge(lv1, lv2); lineDigraph.addEdge(lv2, lv3); lineDigraph.addEdge(lv3, lv1); // Test degrees assertEquals(1, lineDigraph.getOutDegree(lv1)); assertEquals(1, lineDigraph.getInDegree(lv1)); assertEquals(2, lineDigraph.getTotalDegree(lv1)); // Test neighbors assertEquals(Set.of(lv2), lineDigraph.getOutgoingNeighbors(lv1)); assertEquals(Set.of(lv3), lineDigraph.getIncomingNeighbors(lv1)); assertEquals(Set.of(lv2, lv3), lineDigraph.getAllNeighbors(lv1)); } @Test @DisplayName("Test LineDigraph sources and sinks") void testLineDigraphSourcesAndSinks() { LineDigraph lineDigraph = new LineDigraph<>(); DefaultEdge e1 = new DefaultEdge(); DefaultEdge e2 = new DefaultEdge(); DefaultEdge e3 = new DefaultEdge(); LineVertex source = new LineVertex<>("A", "B", e1); LineVertex middle = new LineVertex<>("B", "C", e2); LineVertex sink = new LineVertex<>("C", "D", e3); lineDigraph.addVertex(source); lineDigraph.addVertex(middle); lineDigraph.addVertex(sink); lineDigraph.addEdge(source, middle); lineDigraph.addEdge(middle, sink); // Test sources and sinks assertEquals(Set.of(source), lineDigraph.getSources()); assertEquals(Set.of(sink), lineDigraph.getSinks()); } @Test @DisplayName("Test LineDigraph path finding") void testLineDigraphPathFinding() { LineDigraph lineDigraph = new LineDigraph<>(); DefaultEdge e1 = new DefaultEdge(); DefaultEdge e2 = new DefaultEdge(); DefaultEdge e3 = new DefaultEdge(); LineVertex lv1 = new LineVertex<>("A", "B", e1); LineVertex lv2 = new LineVertex<>("B", "C", e2); LineVertex lv3 = new LineVertex<>("C", "D", e3); lineDigraph.addVertex(lv1); lineDigraph.addVertex(lv2); lineDigraph.addVertex(lv3); lineDigraph.addEdge(lv1, lv2); lineDigraph.addEdge(lv2, lv3); // Test path existence assertTrue(lineDigraph.hasPath(lv1, lv2)); assertTrue(lineDigraph.hasPath(lv1, lv3)); assertTrue(lineDigraph.hasPath(lv2, lv3)); assertFalse(lineDigraph.hasPath(lv3, lv1)); // Test reachable vertices Set> reachable = lineDigraph.getReachableVertices(lv1); assertEquals(Set.of(lv1, lv2, lv3), reachable); } @Test @DisplayName("Test LineDigraph topological sort") void testLineDigraphTopologicalSort() { LineDigraph lineDigraph = new LineDigraph<>(); DefaultEdge e1 = new DefaultEdge(); DefaultEdge e2 = new DefaultEdge(); DefaultEdge e3 = new DefaultEdge(); LineVertex lv1 = new LineVertex<>("A", "B", e1); LineVertex lv2 = new LineVertex<>("B", "C", e2); LineVertex lv3 = new LineVertex<>("C", "D", e3); lineDigraph.addVertex(lv1); lineDigraph.addVertex(lv2); lineDigraph.addVertex(lv3); lineDigraph.addEdge(lv1, lv2); lineDigraph.addEdge(lv2, lv3); // Test topological sort on acyclic graph List> sorted = lineDigraph.topologicalSort(); assertEquals(3, sorted.size()); assertEquals(lv1, sorted.get(0)); assertEquals(lv2, sorted.get(1)); assertEquals(lv3, sorted.get(2)); // Add cycle and test lineDigraph.addEdge(lv3, lv1); List> cyclicSort = lineDigraph.topologicalSort(); assertTrue(cyclicSort.isEmpty()); // Should return empty for cyclic graphs } @Test @DisplayName("Test LineDigraph consistency validation") void testLineDigraphConsistency() { LineDigraph lineDigraph = new LineDigraph<>(); DefaultEdge e1 = new DefaultEdge(); DefaultEdge e2 = new DefaultEdge(); LineVertex lv1 = new LineVertex<>("A", "B", e1); LineVertex lv2 = new LineVertex<>("B", "C", e2); lineDigraph.addVertex(lv1); lineDigraph.addVertex(lv2); lineDigraph.addEdge(lv1, lv2); // Should be consistent assertTrue(lineDigraph.validateConsistency()); // Test copy operation LineDigraph copy = lineDigraph.copy(); assertEquals(lineDigraph.vertexCount(), copy.vertexCount()); assertEquals(lineDigraph.edgeCount(), copy.edgeCount()); assertTrue(copy.validateConsistency()); } } @Nested @DisplayName("Updated PageRankFAS Algorithm Tests") class UpdatedAlgorithmTests { @Test @DisplayName("Test updated algorithm on simple cycle") void testUpdatedAlgorithmSimpleCycle() { Graph graph = createSimpleCycle(); pageRankFAS = new PageRankFAS<>(graph, new SuperTypeToken<>() {}); Set fas = pageRankFAS.computeFeedbackArcSet(); assertEquals(1, fas.size(), "FAS should contain exactly one edge for simple cycle"); // Verify that removing the FAS makes the graph acyclic fas.forEach(graph::removeEdge); PageRankFAS verifier = new PageRankFAS<>(graph, new SuperTypeToken<>() {}); assertTrue(verifier.computeFeedbackArcSet().isEmpty(), "Graph should be acyclic after removing FAS"); } @Test @DisplayName("Test updated algorithm execution statistics") void testExecutionStatistics() { Graph graph = createComplexGraph(); pageRankFAS = new PageRankFAS<>(graph, new SuperTypeToken<>() {}); Map stats = pageRankFAS.getExecutionStatistics(graph); assertNotNull(stats); assertTrue(stats.containsKey("originalVertices")); assertTrue(stats.containsKey("originalEdges")); assertTrue(stats.containsKey("pageRankIterations")); assertTrue(stats.containsKey("sccCount")); assertTrue(stats.containsKey("trivialSCCs")); assertTrue(stats.containsKey("nonTrivialSCCs")); assertTrue(stats.containsKey("largestSCCSize")); assertEquals(graph.vertexSet().size(), stats.get("originalVertices")); assertEquals(graph.edgeSet().size(), stats.get("originalEdges")); } @Test @DisplayName("Test updated algorithm with multiple SCCs") void testMultipleSCCs() { Graph graph = createMultipleSCCGraph(); pageRankFAS = new PageRankFAS<>(graph, new SuperTypeToken<>() {}); Set fas = pageRankFAS.computeFeedbackArcSet(); // Verify that the result breaks all cycles fas.forEach(graph::removeEdge); PageRankFAS verifier = new PageRankFAS<>(graph, new SuperTypeToken<>() {}); assertTrue(verifier.computeFeedbackArcSet().isEmpty(), "Graph should be acyclic after removing FAS"); // Check execution statistics Map stats = pageRankFAS.getExecutionStatistics(createMultipleSCCGraph()); assertTrue((Integer) stats.get("nonTrivialSCCs") >= 2, "Should have multiple non-trivial SCCs"); } @Test @DisplayName("Test performance comparison with different PageRank iterations") void testPerformanceWithDifferentIterations() { Graph graph = createComplexGraph(); int[] iterations = {1, 3, 5, 10}; Map fasSize = new HashMap<>(); Map executionTime = new HashMap<>(); for (int iter : iterations) { Graph testGraph = copyGraph(graph); PageRankFAS algorithm = new PageRankFAS<>(testGraph, iter, new SuperTypeToken<>() {}); long startTime = System.currentTimeMillis(); Set fas = algorithm.computeFeedbackArcSet(); long endTime = System.currentTimeMillis(); fasSize.put(iter, fas.size()); executionTime.put(iter, endTime - startTime); // Verify correctness fas.forEach(testGraph::removeEdge); PageRankFAS verifier = new PageRankFAS<>(testGraph, new SuperTypeToken<>() {}); assertTrue( verifier.computeFeedbackArcSet().isEmpty(), "Graph should be acyclic after removing FAS (iter=" + iter + ")"); } // Log results for analysis System.out.println("Performance analysis:"); for (int iter : iterations) { System.out.printf( "Iterations: %d, FAS size: %d, Time: %dms%n", iter, fasSize.get(iter), executionTime.get(iter)); } } } // Helper methods for creating test graphs private Graph createSimpleCycle() { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); graph.addEdge("A", "B"); graph.addEdge("B", "C"); graph.addEdge("C", "A"); return graph; } private Graph createComplexGraph() { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); // Create vertices for (int i = 0; i < 8; i++) { graph.addVertex("V" + i); } // Create multiple cycles graph.addEdge("V0", "V1"); graph.addEdge("V1", "V2"); graph.addEdge("V2", "V0"); // Triangle cycle graph.addEdge("V3", "V4"); graph.addEdge("V4", "V5"); graph.addEdge("V5", "V6"); graph.addEdge("V6", "V3"); // Square cycle // Overlapping cycle graph.addEdge("V2", "V3"); graph.addEdge("V5", "V7"); graph.addEdge("V7", "V1"); // Creates larger cycle return graph; } private Graph createMultipleSCCGraph() { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); // SCC 1: A <-> B graph.addVertex("A"); graph.addVertex("B"); graph.addEdge("A", "B"); graph.addEdge("B", "A"); // SCC 2: C <-> D <-> E graph.addVertex("C"); graph.addVertex("D"); graph.addVertex("E"); graph.addEdge("C", "D"); graph.addEdge("D", "E"); graph.addEdge("E", "C"); // SCC 3: F -> G -> H -> F graph.addVertex("F"); graph.addVertex("G"); graph.addVertex("H"); graph.addEdge("F", "G"); graph.addEdge("G", "H"); graph.addEdge("H", "F"); // Connections between SCCs (acyclic) graph.addEdge("B", "C"); graph.addEdge("E", "F"); return graph; } private Graph copyGraph(Graph original) { Graph copy = new DefaultDirectedGraph<>(DefaultEdge.class); // Add vertices original.vertexSet().forEach(copy::addVertex); // Add edges original.edgeSet().forEach(edge -> { String source = original.getEdgeSource(edge); String target = original.getEdgeTarget(edge); copy.addEdge(source, target); }); return copy; } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/feedback/vertex/approximate/FeedbackVertexSetBenchmarkTest.java ================================================ package org.hjug.feedback.vertex.approximate; import java.util.*; import java.util.concurrent.ThreadLocalRandom; import java.util.stream.IntStream; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultDirectedGraph; import org.jgrapht.graph.DefaultEdge; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; /** * Performance benchmark tests[8] */ class FeedbackVertexSetBenchmarkTest { @Test @DisplayName("Benchmark: Various graph sizes and densities") void benchmarkGraphSizes() { int[] sizes = {20, 50, 100, 200}; double[] densities = {0.1, 0.3, 0.5}; System.out.println("=== Feedback Vertex Set Benchmark ==="); System.out.printf("%-10s %-15s %-15s %-15s %-15s%n", "Size", "Density", "Vertices", "Edges", "Time (ms)"); for (int size : sizes) { for (double density : densities) { Graph graph = createRandomGraph(size, density); long startTime = System.currentTimeMillis(); FeedbackVertexSetSolver solver = new FeedbackVertexSetSolver<>(graph, null, null, 0.1); FeedbackVertexSetResult result = solver.solve(); long endTime = System.currentTimeMillis(); System.out.printf( "%-10d %-15.1f %-15d %-15d %-15d%n", size, density, graph.vertexSet().size(), graph.edgeSet().size(), endTime - startTime); } } } private Graph createRandomGraph(int size, double density) { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); // Add vertices IntStream.range(0, size).forEach(i -> graph.addVertex("V" + i)); List vertices = new ArrayList<>(graph.vertexSet()); ThreadLocalRandom random = ThreadLocalRandom.current(); int maxEdges = size * (size - 1); int targetEdges = (int) (maxEdges * density); int addedEdges = 0; while (addedEdges < targetEdges) { String source = vertices.get(random.nextInt(vertices.size())); String target = vertices.get(random.nextInt(vertices.size())); if (!source.equals(target) && !graph.containsEdge(source, target)) { graph.addEdge(source, target); addedEdges++; } } return graph; } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/feedback/vertex/approximate/FeedbackVertexSetExample.java ================================================ package org.hjug.feedback.vertex.approximate; import java.util.Map; import java.util.Set; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultDirectedGraph; import org.jgrapht.graph.DefaultEdge; public class FeedbackVertexSetExample { public static void main(String[] args) { // Create a directed graph with cycles Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); // Add vertices graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); graph.addVertex("D"); // Add edges creating cycles graph.addEdge("A", "B"); graph.addEdge("B", "C"); graph.addEdge("C", "A"); // Creates cycle A->B->C->A graph.addEdge("C", "D"); graph.addEdge("D", "A"); // Creates cycle A->B->C->D->A // Define vertex weights (optional) Map weights = Map.of("A", 1.0, "B", 2.0, "C", 1.5, "D", 1.0); // Define special vertices (optional - all vertices by default) Set specialVertices = Set.of("A", "B", "C", "D"); // Solve the FVS problem FeedbackVertexSetSolver solver = new FeedbackVertexSetSolver<>(graph, specialVertices, weights, 0.1); FeedbackVertexSetResult result = solver.solve(); System.out.println("Feedback vertex set: " + result.getFeedbackVertices()); System.out.println("Solution size: " + result.size()); } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/feedback/vertex/approximate/FeedbackVertexSetSolverTest.java ================================================ package org.hjug.feedback.vertex.approximate; import static org.junit.jupiter.api.Assertions.*; import java.util.*; import java.util.concurrent.ThreadLocalRandom; import java.util.stream.IntStream; import org.jgrapht.Graph; import org.jgrapht.alg.cycle.CycleDetector; import org.jgrapht.graph.DefaultDirectedGraph; import org.jgrapht.graph.DefaultEdge; import org.junit.jupiter.api.*; import org.junit.jupiter.api.parallel.Execution; import org.junit.jupiter.api.parallel.ExecutionMode; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; /** * Comprehensive unit tests for the FeedbackVertexSetSolver[6] */ @Execution(ExecutionMode.CONCURRENT) class FeedbackVertexSetSolverTest { private Graph graph; private FeedbackVertexSetSolver solver; @BeforeEach void setUp() { graph = new DefaultDirectedGraph<>(DefaultEdge.class); } @Nested @DisplayName("Basic Algorithm Tests") class BasicAlgorithmTests { @Test @DisplayName("Should handle empty graph") void testEmptyGraph() { solver = new FeedbackVertexSetSolver<>(graph, null, null, 0.1); FeedbackVertexSetResult result = solver.solve(); assertTrue(result.getFeedbackVertices().isEmpty()); assertEquals(0, result.size()); } @Test @DisplayName("Should handle single vertex") void testSingleVertex() { graph.addVertex("A"); solver = new FeedbackVertexSetSolver<>(graph, null, null, 0.1); FeedbackVertexSetResult result = solver.solve(); assertEquals(0, result.size()); } @Test @DisplayName("Should handle acyclic graph") void testAcyclicGraph() { // Create a simple DAG: A -> B -> C[7] graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); graph.addEdge("A", "B"); graph.addEdge("B", "C"); solver = new FeedbackVertexSetSolver<>(graph, null, null, 0.1); FeedbackVertexSetResult result = solver.solve(); assertEquals(0, result.size()); } @Test @DisplayName("Should handle simple cycle") void testSimpleCycle() { // Create a simple cycle: A -> B -> C -> A[7] graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); graph.addEdge("A", "B"); graph.addEdge("B", "C"); graph.addEdge("C", "A"); solver = new FeedbackVertexSetSolver<>(graph, null, null, 0.1); FeedbackVertexSetResult result = solver.solve(); // Should break the cycle with at least one vertex assertTrue(result.size() >= 1); assertFalse(isGraphIsAcyclicAfterRemoval(result)); } @Test @DisplayName("Should handle self-loop") void testSelfLoop() { graph.addVertex("A"); graph.addEdge("A", "A"); Set specialVertices = Set.of("A"); solver = new FeedbackVertexSetSolver<>(graph, specialVertices, null, 0.1); FeedbackVertexSetResult result = solver.solve(); assertEquals(1, result.size()); assertTrue(result.getFeedbackVertices().contains("A")); } } @Nested @DisplayName("Complex Graph Tests") class ComplexGraphTests { @Test @DisplayName("Should handle multiple cycles") void testMultipleCycles() { // Create graph with multiple overlapping cycles[5] String[] vertices = {"A", "B", "C", "D", "E"}; for (String v : vertices) { graph.addVertex(v); } // Create cycles: A->B->C->A and C->D->E->C graph.addEdge("A", "B"); graph.addEdge("B", "C"); graph.addEdge("C", "A"); graph.addEdge("C", "D"); graph.addEdge("D", "E"); graph.addEdge("E", "C"); solver = new FeedbackVertexSetSolver<>(graph, null, null, 0.1); FeedbackVertexSetResult result = solver.solve(); assertTrue(result.size() >= 1); assertFalse(isGraphIsAcyclicAfterRemoval(result)); } @Test @DisplayName("Should handle disconnected components") void testDisconnectedComponents() { // Component 1: A -> B -> A graph.addVertex("A"); graph.addVertex("B"); graph.addEdge("A", "B"); graph.addEdge("B", "A"); // Component 2: C -> D (acyclic) graph.addVertex("C"); graph.addVertex("D"); graph.addEdge("C", "D"); // Component 3: E (isolated) graph.addVertex("E"); solver = new FeedbackVertexSetSolver<>(graph, null, null, 0.1); FeedbackVertexSetResult result = solver.solve(); assertTrue(result.size() >= 1); assertFalse(isGraphIsAcyclicAfterRemoval(result)); } } @Nested @DisplayName("Performance Tests") class PerformanceTests { @ParameterizedTest @ValueSource(ints = {10, 25, 50}) @DisplayName("Should handle random graphs efficiently") @Disabled("Not consistent") void testRandomGraphPerformance(int size) { createRandomGraph(size, size * 2); long startTime = System.currentTimeMillis(); solver = new FeedbackVertexSetSolver<>(graph, null, null, 0.1); FeedbackVertexSetResult result = solver.solve(); long endTime = System.currentTimeMillis(); // Performance should be reasonable[8] assertTrue(endTime - startTime < 20000, "Algorithm took too long: " + (endTime - startTime) + "ms"); if (hasCycles(graph)) { assertFalse(isGraphIsAcyclicAfterRemoval(result)); } } @Test @DisplayName("Should handle weighted vertices") @Disabled("Not planning to use weighted vertices") void testWeightedVertices() { // Create a cycle with different vertex weights graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); graph.addEdge("A", "B"); graph.addEdge("B", "C"); graph.addEdge("C", "A"); Map weights = Map.of("A", 1.0, "B", 10.0, "C", 1.0); solver = new FeedbackVertexSetSolver<>(graph, null, weights, 0.1); FeedbackVertexSetResult result = solver.solve(); assertTrue(result.size() >= 1); // Should prefer removing lower weight vertices System.out.println("Feedback vertices: " + result.getFeedbackVertices()); assertFalse(result.getFeedbackVertices().contains("B")); } } @Nested @DisplayName("Correctness Tests") class CorrectnessTests { @Test @DisplayName("Should maintain approximation guarantees") @Disabled("Not consistent") void testApproximationBounds() { createRandomGraph(20, 40); solver = new FeedbackVertexSetSolver<>(graph, null, null, 0.1); FeedbackVertexSetResult result = solver.solve(); // The solution should be bounded by the theoretical guarantees[1] int n = graph.vertexSet().size(); assertTrue(result.size() <= n, "Solution size should be at most n"); if (hasCycles(graph)) { assertFalse(isGraphIsAcyclicAfterRemoval(result)); } } @Test @DisplayName("Should handle special vertex constraints") void testSpecialVertexConstraints() { // Create cycle where only some vertices are "special" graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); graph.addVertex("D"); graph.addEdge("A", "B"); graph.addEdge("B", "C"); graph.addEdge("C", "D"); graph.addEdge("D", "A"); Set specialVertices = Set.of("A", "C"); // Only A and C are special solver = new FeedbackVertexSetSolver<>(graph, specialVertices, null, 0.1); FeedbackVertexSetResult result = solver.solve(); // Should only consider cycles involving special vertices assertTrue(result.size() >= 1); } } // Helper methods private void createRandomGraph(int vertexCount, int edgeCount) { ThreadLocalRandom random = ThreadLocalRandom.current(); // Add vertices[10] IntStream.range(0, vertexCount).forEach(i -> graph.addVertex("V" + i)); List vertices = new ArrayList<>(graph.vertexSet()); // Add random edges int addedEdges = 0; while (addedEdges < edgeCount && addedEdges < vertexCount * (vertexCount - 1)) { String source = vertices.get(random.nextInt(vertices.size())); String target = vertices.get(random.nextInt(vertices.size())); if (!source.equals(target) && !graph.containsEdge(source, target)) { graph.addEdge(source, target); addedEdges++; } } } private boolean hasCycles(Graph graph) { CycleDetector cycleDetector = new CycleDetector<>(graph); return cycleDetector.detectCycles(); } private boolean isGraphIsAcyclicAfterRemoval(FeedbackVertexSetResult result) { Graph testGraph = createGraphWithoutFeedbackVertices(result); // Verify the resulting graph is acyclic[6] CycleDetector cycleDetector = new CycleDetector<>(testGraph); System.out.println(cycleDetector.findCycles()); return cycleDetector.detectCycles(); // assertFalse(hasCycles, "Graph should be acyclic after removing feedback vertices"); } private Graph createGraphWithoutFeedbackVertices(FeedbackVertexSetResult result) { // Create a copy of the graph without feedback vertices[6] Graph testGraph = new DefaultDirectedGraph<>(DefaultEdge.class); // Add vertices except feedback vertices graph.vertexSet().stream() .filter(v -> !result.getFeedbackVertices().contains(v)) .forEach(testGraph::addVertex); // Add edges between remaining vertices for (DefaultEdge edge : graph.edgeSet()) { String source = graph.getEdgeSource(edge); String target = graph.getEdgeTarget(edge); if (testGraph.containsVertex(source) && testGraph.containsVertex(target)) { testGraph.addEdge(source, target); } } return testGraph; } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/feedback/vertex/kernelized/DirectedFeedbackVertexSetBenchmarkTest.java ================================================ package org.hjug.feedback.vertex.kernelized; import java.util.*; import java.util.concurrent.ThreadLocalRandom; import java.util.stream.IntStream; import org.hjug.feedback.SuperTypeToken; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultDirectedGraph; import org.jgrapht.graph.DefaultEdge; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; /** * Performance benchmark tests for the kernelization algorithm[1] */ class DirectedFeedbackVertexSetBenchmarkTest { @Test @DisplayName("Benchmark: Various graph sizes and treewidth parameters") void benchmarkGraphSizes() { int[] sizes = {20, 50, 100}; int[] etaValues = {1, 2, 3}; double[] densities = {0.1, 0.3, 0.5}; System.out.println("=== Directed Feedback Vertex Set Benchmark ==="); System.out.printf( "%-10s %-10s %-15s %-15s %-15s %-15s%n", "Size", "Eta", "Density", "Vertices", "Edges", "Time (ms)"); for (int size : sizes) { for (int eta : etaValues) { for (double density : densities) { Graph graph = createRandomGraph(size, density); long startTime = System.currentTimeMillis(); DirectedFeedbackVertexSetSolver solver = new DirectedFeedbackVertexSetSolver<>(graph, null, null, eta, new SuperTypeToken<>() {}); DirectedFeedbackVertexSetResult result = solver.solve(size / 4); long endTime = System.currentTimeMillis(); System.out.printf( "%-10d %-10d %-15.1f %-15d %-15d %-15d%n", size, eta, density, graph.vertexSet().size(), graph.edgeSet().size(), endTime - startTime); } } } } private Graph createRandomGraph(int size, double density) { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); // Add vertices IntStream.range(0, size).forEach(i -> graph.addVertex("V" + i)); List vertices = new ArrayList<>(graph.vertexSet()); ThreadLocalRandom random = ThreadLocalRandom.current(); int maxEdges = size * (size - 1); int targetEdges = (int) (maxEdges * density); int addedEdges = 0; while (addedEdges < targetEdges) { String source = vertices.get(random.nextInt(vertices.size())); String target = vertices.get(random.nextInt(vertices.size())); if (!source.equals(target) && !graph.containsEdge(source, target)) { graph.addEdge(source, target); addedEdges++; } } return graph; } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/feedback/vertex/kernelized/DirectedFeedbackVertexSetExample.java ================================================ package org.hjug.feedback.vertex.kernelized; import java.util.Map; import java.util.Set; import org.hjug.feedback.SuperTypeToken; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultDirectedGraph; import org.jgrapht.graph.DefaultEdge; public class DirectedFeedbackVertexSetExample { public static void main(String[] args) { // Create a directed graph with cycles Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); // Add vertices graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); graph.addVertex("D"); // Add edges creating cycles graph.addEdge("A", "B"); graph.addEdge("B", "C"); graph.addEdge("C", "A"); // Creates cycle A->B->C->A graph.addEdge("C", "D"); graph.addEdge("D", "A"); // Creates cycle A->B->C->D->A // Define treewidth modulator (optional) Set modulator = Set.of("A", "C"); // Define vertex weights (optional) Map weights = Map.of("A", 1.0, "B", 2.0, "C", 1.5, "D", 1.0); // Solve the DFVS problem with treewidth parameter η=2 DirectedFeedbackVertexSetSolver solver = new DirectedFeedbackVertexSetSolver<>(graph, modulator, weights, 2, new SuperTypeToken<>() {}); DirectedFeedbackVertexSetResult result = solver.solve(3); System.out.println("Feedback vertex set: " + result.getFeedbackVertices()); System.out.println("Solution size: " + result.size()); } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/feedback/vertex/kernelized/DirectedFeedbackVertexSetSolverTest.java ================================================ package org.hjug.feedback.vertex.kernelized; import static org.junit.jupiter.api.Assertions.*; import java.util.*; import java.util.concurrent.ThreadLocalRandom; import java.util.stream.IntStream; import org.hjug.feedback.SuperTypeToken; import org.jgrapht.Graph; import org.jgrapht.alg.cycle.CycleDetector; import org.jgrapht.graph.DefaultDirectedGraph; import org.jgrapht.graph.DefaultEdge; import org.junit.jupiter.api.*; import org.junit.jupiter.api.parallel.Execution; import org.junit.jupiter.api.parallel.ExecutionMode; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; /** * Comprehensive unit tests for the DirectedFeedbackVertexSetSolver[1] */ @Execution(ExecutionMode.CONCURRENT) class DirectedFeedbackVertexSetSolverTest { private Graph graph; private DirectedFeedbackVertexSetSolver solver; @BeforeEach void setUp() { graph = new DefaultDirectedGraph<>(DefaultEdge.class); } @Nested @DisplayName("Basic Algorithm Tests") class BasicAlgorithmTests { @Test @DisplayName("Should handle empty graph") void testEmptyGraph() { solver = new DirectedFeedbackVertexSetSolver<>(graph, null, null, 2, new SuperTypeToken<>() {}); DirectedFeedbackVertexSetResult result = solver.solve(1); assertTrue(result.getFeedbackVertices().isEmpty()); assertEquals(0, result.size()); } @Test @DisplayName("Should handle single vertex") void testSingleVertex() { graph.addVertex("A"); solver = new DirectedFeedbackVertexSetSolver<>(graph, null, null, 2, new SuperTypeToken<>() {}); DirectedFeedbackVertexSetResult result = solver.solve(1); assertEquals(0, result.size()); } @Test @DisplayName("Should handle acyclic graph") void testAcyclicGraph() { // Create a simple DAG: A -> B -> C[17] graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); graph.addEdge("A", "B"); graph.addEdge("B", "C"); solver = new DirectedFeedbackVertexSetSolver<>(graph, null, null, 2, new SuperTypeToken<>() {}); DirectedFeedbackVertexSetResult result = solver.solve(2); assertEquals(0, result.size()); } @Test @DisplayName("Should handle simple cycle") void testSimpleCycle() { // Create a simple cycle: A -> B -> C -> A graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); graph.addEdge("A", "B"); graph.addEdge("B", "C"); graph.addEdge("C", "A"); solver = new DirectedFeedbackVertexSetSolver<>(graph, null, null, 2, new SuperTypeToken<>() {}); DirectedFeedbackVertexSetResult result = solver.solve(2); // Should break the cycle with at least one vertex assertTrue(result.size() >= 1); assertGraphIsAcyclicAfterRemoval(result); } @Test @DisplayName("Should handle self-loop") void testSelfLoop() { graph.addVertex("A"); graph.addEdge("A", "A"); solver = new DirectedFeedbackVertexSetSolver<>(graph, null, null, 2, new SuperTypeToken<>() {}); DirectedFeedbackVertexSetResult result = solver.solve(1); assertEquals(1, result.size()); assertTrue(result.getFeedbackVertices().contains("A")); } } @Nested @DisplayName("Complex Graph Tests") class ComplexGraphTests { @Test @DisplayName("Should handle multiple cycles") void testMultipleCycles() { // Create graph with multiple overlapping cycles String[] vertices = {"A", "B", "C", "D", "E"}; for (String v : vertices) { graph.addVertex(v); } // Create cycles: A->B->C->A and C->D->E->C graph.addEdge("A", "B"); graph.addEdge("B", "C"); graph.addEdge("C", "A"); graph.addEdge("C", "D"); graph.addEdge("D", "E"); graph.addEdge("E", "C"); solver = new DirectedFeedbackVertexSetSolver<>(graph, null, null, 2, new SuperTypeToken<>() {}); DirectedFeedbackVertexSetResult result = solver.solve(3); assertTrue(result.size() >= 1); assertGraphIsAcyclicAfterRemoval(result); } @Test @DisplayName("Should handle treewidth modulator") void testTreewidthModulator() { // Create a graph with a known modulator graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); graph.addVertex("D"); graph.addEdge("A", "B"); graph.addEdge("B", "C"); graph.addEdge("C", "A"); graph.addEdge("A", "D"); Set modulator = Set.of("A"); // A is the modulator solver = new DirectedFeedbackVertexSetSolver<>(graph, modulator, null, 1, new SuperTypeToken<>() {}); DirectedFeedbackVertexSetResult result = solver.solve(2); // there are 2 SCCs // removing A breaks the graph into 2 distinct trees: B->C, D // no results means there are no feedback vertices to remove assertTrue(result.size() == 0); } @Test @DisplayName("Should handle weighted vertices") void testWeightedVertices() { // Create a cycle with different vertex weights graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); graph.addEdge("A", "B"); graph.addEdge("B", "C"); graph.addEdge("C", "A"); Map weights = Map.of("A", 1.0, "B", 10.0, "C", 1.0); solver = new DirectedFeedbackVertexSetSolver<>(graph, null, weights, 2, new SuperTypeToken<>() {}); DirectedFeedbackVertexSetResult result = solver.solve(2); assertTrue(result.size() >= 1); // Should prefer removing lower weight vertices if (result.size() == 1) { assertFalse(result.getFeedbackVertices().contains("B")); } } } @Nested @DisplayName("Performance Tests") @Disabled("Not consistent") class PerformanceTests { @ParameterizedTest @ValueSource(ints = {10, 25, 50}) @DisplayName("Should handle random graphs efficiently") void testRandomGraphPerformance(int size) { createRandomGraph(size, size * 2); long startTime = System.currentTimeMillis(); solver = new DirectedFeedbackVertexSetSolver<>(graph, null, null, 2, new SuperTypeToken<>() {}); DirectedFeedbackVertexSetResult result = solver.solve(size / 3); long endTime = System.currentTimeMillis(); // Performance should be reasonable[1] assertTrue(endTime - startTime < 20000, "Algorithm took too long: " + (endTime - startTime) + "ms"); if (hasCycles()) { assertGraphIsAcyclicAfterRemoval(result); } } @Test @DisplayName("Should utilize parallel processing effectively") void testParallelProcessing() { createRandomGraph(30, 60); long startTime = System.currentTimeMillis(); solver = new DirectedFeedbackVertexSetSolver<>(graph, null, null, 2, new SuperTypeToken<>() {}); DirectedFeedbackVertexSetResult result = solver.solve(10); long endTime = System.currentTimeMillis(); assertTrue(endTime - startTime < 15000); if (hasCycles()) { assertGraphIsAcyclicAfterRemoval(result); } } } @Nested @DisplayName("Kernelization Tests") class KernelizationTests { @Test @DisplayName("Should maintain kernelization properties") @Disabled("Not consistent") void testKernelizationProperties() { createRandomGraph(20, 40); solver = new DirectedFeedbackVertexSetSolver<>(graph, null, null, 2, new SuperTypeToken<>() {}); DirectedFeedbackVertexSetResult result = solver.solve(5); // Solution should be bounded by the kernelization guarantees[1] int n = graph.vertexSet().size(); assertTrue(result.size() <= n, "Solution size should be at most n"); if (hasCycles()) { assertGraphIsAcyclicAfterRemoval(result); } } @Test @DisplayName("Should handle zone decomposition correctly") void testZoneDecomposition() { // Create a graph that will trigger zone decomposition graph.addVertex("M1"); // Modulator vertex graph.addVertex("Z1"); // Zone vertex 1 graph.addVertex("Z2"); // Zone vertex 2 graph.addVertex("Z3"); // Zone vertex 3 graph.addEdge("M1", "Z1"); graph.addEdge("Z1", "Z2"); graph.addEdge("Z2", "Z3"); graph.addEdge("Z3", "Z1"); // Creates cycle in zone Set modulator = Set.of("M1"); solver = new DirectedFeedbackVertexSetSolver<>(graph, modulator, null, 1, new SuperTypeToken<>() {}); DirectedFeedbackVertexSetResult result = solver.solve(2); assertTrue(result.size() >= 1); assertGraphIsAcyclicAfterRemoval(result); } } // Helper methods private void createRandomGraph(int vertexCount, int edgeCount) { ThreadLocalRandom random = ThreadLocalRandom.current(); // Add vertices [18] IntStream.range(0, vertexCount).forEach(i -> graph.addVertex("V" + i)); List vertices = new ArrayList<>(graph.vertexSet()); // Add random edges int addedEdges = 0; while (addedEdges < edgeCount && addedEdges < vertexCount * (vertexCount - 1)) { String source = vertices.get(random.nextInt(vertices.size())); String target = vertices.get(random.nextInt(vertices.size())); if (!source.equals(target) && !graph.containsEdge(source, target)) { graph.addEdge(source, target); addedEdges++; } } } private boolean hasCycles() { CycleDetector cycleDetector = new CycleDetector<>(graph); return cycleDetector.detectCycles(); } private void assertGraphIsAcyclicAfterRemoval(DirectedFeedbackVertexSetResult result) { // Create a copy of the graph without feedback vertices[17] Graph testGraph = new DefaultDirectedGraph<>(DefaultEdge.class); // Add vertices except feedback vertices graph.vertexSet().stream() .filter(v -> !result.getFeedbackVertices().contains(v)) .forEach(testGraph::addVertex); // Add edges between remaining vertices graph.edgeSet().forEach(edge -> { String source = graph.getEdgeSource(edge); String target = graph.getEdgeTarget(edge); if (testGraph.containsVertex(source) && testGraph.containsVertex(target)) { testGraph.addEdge(source, target); } }); // Verify the resulting graph is acyclic[17] CycleDetector cycleDetector = new CycleDetector<>(testGraph); assertFalse(cycleDetector.detectCycles(), "Graph should be acyclic after removing feedback vertices"); } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/feedback/vertex/kernelized/ModulatorComputerTest.java ================================================ package org.hjug.feedback.vertex.kernelized; import static org.junit.jupiter.api.Assertions.*; import java.util.*; import java.util.concurrent.ThreadLocalRandom; import java.util.stream.IntStream; import org.hjug.feedback.SuperTypeToken; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultDirectedGraph; import org.jgrapht.graph.DefaultEdge; import org.junit.jupiter.api.*; import org.junit.jupiter.api.parallel.Execution; import org.junit.jupiter.api.parallel.ExecutionMode; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; @Execution(ExecutionMode.CONCURRENT) class ModulatorComputerTest { private ModulatorComputer modulatorComputer; private EnhancedParameterComputer parameterComputer; private SuperTypeToken token; @BeforeEach void setUp() { token = new SuperTypeToken<>() {}; modulatorComputer = new ModulatorComputer<>(token); parameterComputer = new EnhancedParameterComputer<>(token); } @AfterEach void tearDown() { modulatorComputer.shutdown(); parameterComputer.shutdown(); } @Nested @DisplayName("Modulator Computation Tests") class ModulatorComputationTests { @Test @DisplayName("Should compute empty modulator for tree graph") void testTreeGraphModulator() { Graph tree = createTreeGraph(10); ModulatorComputer.ModulatorResult result = modulatorComputer.computeModulator(tree, 1, 5); assertTrue(result.getResultingTreewidth() <= 1); assertTrue(result.getSize() <= 2); // Trees have treewidth 1 } @Test @DisplayName("Should compute valid modulator for cycle graph") void testCycleGraphModulator() { Graph cycle = createCycleGraph(6); ModulatorComputer.ModulatorResult result = modulatorComputer.computeModulator(cycle, 1, 3); /*A tree has treewidth = 1. A cycle has treewidth = 2. A clique of size n has treewidth = n-1 The more “grid-like” or “dense” the graph, the higher its treewidth.*/ assertTrue(result.getResultingTreewidth() <= 2); // this is a cycle assertTrue(result.getSize() >= 1); // Need to break cycle assertFalse(result.getModulator().isEmpty()); } @Test @DisplayName("Should compute modulator for complete graph") void testCompleteGraphModulator() { Graph complete = createCompleteGraph(5); ModulatorComputer.ModulatorResult result = modulatorComputer.computeModulator(complete, 2, 4); assertTrue(result.getResultingTreewidth() <= 2); assertTrue(result.getSize() >= 2); // Complete graphs have high treewidth } @Test @DisplayName("Should respect modulator size limit") void testModulatorSizeLimit() { Graph complete = createCompleteGraph(8); int maxSize = 3; ModulatorComputer.ModulatorResult result = modulatorComputer.computeModulator(complete, 1, maxSize); assertTrue(result.getSize() <= maxSize); } @ParameterizedTest @ValueSource(ints = {10, 20, 30}) @DisplayName("Should handle random graphs efficiently") void testRandomGraphModulator(int size) { Graph graph = createRandomGraph(size, 0.2); long startTime = System.currentTimeMillis(); ModulatorComputer.ModulatorResult result = modulatorComputer.computeModulator(graph, 3, size / 4); long duration = System.currentTimeMillis() - startTime; assertTrue(result.getResultingTreewidth() >= 0); assertTrue(result.getSize() <= size / 4); assertTrue(duration < 10000); // Should complete within 10 seconds } @Test @DisplayName("Should find better modulators with larger budgets") void testModulatorQualityImprovement() { Graph graph = createGridGraph(4, 4); ModulatorComputer.ModulatorResult smallResult = modulatorComputer.computeModulator(graph, 2, 2); ModulatorComputer.ModulatorResult largeResult = modulatorComputer.computeModulator(graph, 2, 6); // Larger budget should achieve better or equal treewidth assertTrue(largeResult.getResultingTreewidth() <= smallResult.getResultingTreewidth()); } } @Nested @DisplayName("Enhanced Parameter Computer Tests") class EnhancedParameterComputerTests { @Test @DisplayName("Should compute enhanced parameters for simple graph") void testSimpleGraphParameters() { Graph graph = createCycleGraph(5); EnhancedParameterComputer.EnhancedParameters params = parameterComputer.computeOptimalParameters(graph, 3); assertTrue(params.getK() >= 1); // Cycle needs feedback vertex set assertTrue(params.getModulatorSize() <= 3); assertTrue(params.getEta() >= 0); assertTrue(params.getTotalParameter() > 0); } @Test @DisplayName("Should compute multiple parameter options") void testMultipleParameterOptions() { Graph graph = createRandomGraph(15, 0.3); List> options = parameterComputer.computeMultipleParameterOptions(graph, 5, 3); assertFalse(options.isEmpty()); assertTrue(options.size() <= 3); // Options should be sorted by quality for (int i = 1; i < options.size(); i++) { assertTrue( options.get(i - 1).getQualityScore() <= options.get(i).getQualityScore()); } } @Test @DisplayName("Should validate modulators correctly") void testModulatorValidation() { Graph graph = createPathGraph(8); Set emptyModulator = new HashSet<>(); Set singleVertexModulator = Set.of("V3"); assertTrue(parameterComputer.validateModulator(graph, emptyModulator, 1)); assertTrue(parameterComputer.validateModulator(graph, singleVertexModulator, 1)); } @Test @DisplayName("Should compute kernel size bounds correctly") void testKernelSizeBounds() { Graph graph = createCycleGraph(4); EnhancedParameterComputer.EnhancedParameters params = parameterComputer.computeOptimalParameters(graph, 2, 1); double kernelBound = params.getKernelSizeBound(); assertTrue(kernelBound >= 1.0); assertTrue(kernelBound < Double.MAX_VALUE); } @Test @DisplayName("Should handle edge cases gracefully") void testEdgeCases() { // Empty graph Graph emptyGraph = new DefaultDirectedGraph<>(DefaultEdge.class); EnhancedParameterComputer.EnhancedParameters emptyParams = parameterComputer.computeOptimalParameters(emptyGraph, 1); assertEquals(0, emptyParams.getK()); assertTrue(emptyParams.getModulator().isEmpty()); // Single vertex Graph singleVertex = new DefaultDirectedGraph<>(DefaultEdge.class); singleVertex.addVertex("V0"); EnhancedParameterComputer.EnhancedParameters singleParams = parameterComputer.computeOptimalParameters(singleVertex, 1); assertEquals(0, singleParams.getK()); assertEquals(0, singleParams.getEta()); } } @Nested @DisplayName("Integration and Performance Tests") class IntegrationPerformanceTests { @Test @DisplayName("Should compute parameters for complex graphs") void testComplexGraphParameters() { // Create a more complex graph structure Graph graph = createComplexGraph(); EnhancedParameterComputer.EnhancedParameters params = parameterComputer.computeOptimalParameters(graph, 5, 2); assertTrue(params.getK() >= 0); assertTrue(params.getModulatorSize() <= 5); assertTrue(params.getEta() <= 2); // Verify kernel size bound is reasonable double kernelBound = params.getKernelSizeBound(); assertTrue(kernelBound >= 1.0); } @Test @DisplayName("Should handle concurrent parameter computation") void testConcurrentParameterComputation() throws InterruptedException { List> graphs = IntStream.range(0, 5) .mapToObj(i -> createRandomGraph(15, 0.25)) .collect(java.util.stream.Collectors.toList()); List>> futures = graphs.stream() .map(graph -> java.util.concurrent.CompletableFuture.supplyAsync( () -> parameterComputer.computeOptimalParameters(graph, 4))) .collect(java.util.stream.Collectors.toList()); List> results = futures.stream() .map(java.util.concurrent.CompletableFuture::join) .collect(java.util.stream.Collectors.toList()); assertEquals(5, results.size()); results.forEach(params -> { assertTrue(params.getK() >= 0); assertTrue(params.getModulatorSize() <= 4); assertTrue(params.getEta() >= 0); }); } @RepeatedTest(3) @DisplayName("Should produce consistent results") void testConsistentResults() { Graph graph = createGridGraph(3, 3); EnhancedParameterComputer.EnhancedParameters params1 = parameterComputer.computeOptimalParameters(graph, 3, 2); EnhancedParameterComputer.EnhancedParameters params2 = parameterComputer.computeOptimalParameters(graph, 3, 2); // Results should be deterministic for the same inputs assertEquals(params1.getK(), params2.getK()); assertEquals(params1.getEta(), params2.getEta()); // Modulator might vary but should have same size and achieve same treewidth assertEquals(params1.getModulatorSize(), params2.getModulatorSize()); } } // Helper methods for creating test graphs private Graph createTreeGraph(int size) { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); for (int i = 0; i < size; i++) { graph.addVertex("V" + i); } for (int i = 1; i < size; i++) { graph.addEdge("V" + (i / 2), "V" + i); // Binary tree structure } return graph; } private Graph createCycleGraph(int size) { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); for (int i = 0; i < size; i++) { graph.addVertex("V" + i); } for (int i = 0; i < size; i++) { graph.addEdge("V" + i, "V" + ((i + 1) % size)); } return graph; } private Graph createCompleteGraph(int size) { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); for (int i = 0; i < size; i++) { graph.addVertex("V" + i); } for (int i = 0; i < size; i++) { for (int j = 0; j < size; j++) { if (i != j) { graph.addEdge("V" + i, "V" + j); } } } return graph; } private Graph createPathGraph(int size) { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); for (int i = 0; i < size; i++) { graph.addVertex("V" + i); } for (int i = 0; i < size - 1; i++) { graph.addEdge("V" + i, "V" + (i + 1)); } System.out.println(graph); return graph; } private Graph createGridGraph(int rows, int cols) { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); // Add vertices for (int i = 0; i < rows; i++) { for (int j = 0; j < cols; j++) { graph.addVertex("V" + i + "_" + j); } } // Add edges for (int i = 0; i < rows; i++) { for (int j = 0; j < cols; j++) { String current = "V" + i + "_" + j; // Right edge if (j < cols - 1) { graph.addEdge(current, "V" + i + "_" + (j + 1)); } // Down edge if (i < rows - 1) { graph.addEdge(current, "V" + (i + 1) + "_" + j); } } } return graph; } private Graph createRandomGraph(int vertexCount, double edgeProbability) { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); ThreadLocalRandom random = ThreadLocalRandom.current(); // Add vertices for (int i = 0; i < vertexCount; i++) { graph.addVertex("V" + i); } // Add random edges for (int i = 0; i < vertexCount; i++) { for (int j = 0; j < vertexCount; j++) { if (i != j && random.nextDouble() < edgeProbability) { graph.addEdge("V" + i, "V" + j); } } } return graph; } private Graph createComplexGraph() { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); // Add vertices for (int i = 0; i < 12; i++) { graph.addVertex("V" + i); } // Create a complex structure with multiple cycles and high-degree vertices // Central hub for (int i = 1; i <= 4; i++) { graph.addEdge("V0", "V" + i); graph.addEdge("V" + i, "V0"); } // Two cycles for (int i = 5; i <= 7; i++) { graph.addEdge("V" + i, "V" + ((i - 5 + 1) % 3 + 5)); } for (int i = 8; i <= 11; i++) { graph.addEdge("V" + i, "V" + ((i - 8 + 1) % 4 + 8)); } // Connections between components graph.addEdge("V1", "V5"); graph.addEdge("V2", "V8"); graph.addEdge("V7", "V10"); return graph; } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/feedback/vertex/kernelized/ParameterComputerExample.java ================================================ package org.hjug.feedback.vertex.kernelized; import java.util.Set; import org.hjug.feedback.SuperTypeToken; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultDirectedGraph; import org.jgrapht.graph.DefaultEdge; public class ParameterComputerExample { public static void main(String[] args) { // Create a sample directed graph with cycles Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); // Add vertices for (int i = 0; i < 6; i++) { graph.addVertex("V" + i); } // Add edges to create cycles graph.addEdge("V0", "V1"); graph.addEdge("V1", "V2"); graph.addEdge("V2", "V0"); // First cycle graph.addEdge("V2", "V3"); graph.addEdge("V3", "V4"); graph.addEdge("V4", "V5"); graph.addEdge("V5", "V2"); // Second cycle // Create parameter computer ParameterComputer computer = new ParameterComputer<>(new SuperTypeToken<>() {}); try { // Compute parameters without modulator ParameterComputer.Parameters params1 = computer.computeParameters(graph); System.out.println("Parameters without modulator: " + params1); // Compute parameters with a modulator Set modulator = Set.of("V2"); // V2 connects both cycles ParameterComputer.Parameters params2 = computer.computeParameters(graph, modulator); System.out.println("Parameters with modulator {V2}: " + params2); // Find optimal modulator automatically ParameterComputer.Parameters params3 = computer.computeParametersWithOptimalModulator(graph, 2); System.out.println("Parameters with optimal modulator: " + params3); } finally { computer.shutdown(); } } } ================================================ FILE: graph-algorithms/src/test/java/org/hjug/feedback/vertex/kernelized/ParameterComputerTest.java ================================================ package org.hjug.feedback.vertex.kernelized; import static org.junit.jupiter.api.Assertions.*; import java.util.*; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ThreadLocalRandom; import java.util.stream.IntStream; import org.hjug.feedback.SuperTypeToken; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultDirectedGraph; import org.jgrapht.graph.DefaultEdge; import org.junit.jupiter.api.*; import org.junit.jupiter.api.parallel.Execution; import org.junit.jupiter.api.parallel.ExecutionMode; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; @Execution(ExecutionMode.CONCURRENT) class ParameterComputerTest { private ParameterComputer parameterComputer; private TreewidthComputer treewidthComputer; private FeedbackVertexSetComputer fvsComputer; private SuperTypeToken token; @BeforeEach void setUp() { token = new SuperTypeToken<>() {}; parameterComputer = new ParameterComputer<>(token); treewidthComputer = new TreewidthComputer<>(); fvsComputer = new FeedbackVertexSetComputer<>(token); } @AfterEach void tearDown() { parameterComputer.shutdown(); treewidthComputer.shutdown(); fvsComputer.shutdown(); } @Nested @DisplayName("Treewidth Computation Tests") class TreewidthComputationTests { @Test @DisplayName("Should compute eta=0 for empty graph") void testEmptyGraph() { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); int eta = treewidthComputer.computeEta(graph, new HashSet<>()); assertEquals(0, eta); } @Test @DisplayName("Should compute eta=0 for single vertex") void testSingleVertex() { Graph graph = createSingleVertexGraph(); int eta = treewidthComputer.computeEta(graph, new HashSet<>()); assertEquals(0, eta); } @Test @DisplayName("Should compute eta=1 for path graph") void testPathGraph() { Graph graph = createPathGraph(5); int eta = treewidthComputer.computeEta(graph, new HashSet<>()); assertEquals(1, eta); } @Test @DisplayName("Should compute eta=2 for cycle graph") void testCycleGraph() { Graph graph = createCycleGraph(5); int eta = treewidthComputer.computeEta(graph, new HashSet<>()); assertTrue(eta >= 2); } @Test @DisplayName("Should handle modulator removal correctly") void testModulatorRemoval() { Graph graph = createCompleteGraph(5); Set modulator = Set.of("V0", "V1"); int etaWithModulator = treewidthComputer.computeEta(graph, modulator); int etaWithoutModulator = treewidthComputer.computeEta(graph, new HashSet<>()); assertTrue(etaWithModulator <= etaWithoutModulator); } @ParameterizedTest @ValueSource(ints = {10, 25, 50}) @DisplayName("Should handle random graphs efficiently") void testRandomGraphTreewidth(int size) { Graph graph = createRandomGraph(size, 0.3); long startTime = System.currentTimeMillis(); int eta = treewidthComputer.computeEta(graph, new HashSet<>()); long duration = System.currentTimeMillis() - startTime; assertTrue(eta >= 0); assertTrue(eta < size); assertTrue(duration < 5000); // Should complete within 5 seconds } } @Nested @DisplayName("Feedback Vertex Set Computation Tests") class FeedbackVertexSetComputationTests { @Test @DisplayName("Should compute k=0 for acyclic graph") void testAcyclicGraph() { Graph graph = createPathGraph(5); int k = fvsComputer.computeK(graph); assertEquals(0, k); } @Test @DisplayName("Should compute k=1 for simple cycle") void testSimpleCycle() { Graph graph = createCycleGraph(4); int k = fvsComputer.computeK(graph); assertEquals(1, k); } @Test @DisplayName("Should handle self-loops correctly") void testSelfLoops() { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); graph.addVertex("A"); graph.addEdge("A", "A"); int k = fvsComputer.computeK(graph); assertEquals(1, k); } @Test @DisplayName("Should handle multiple cycles") void testMultipleCycles() { Graph graph = createMultipleCyclesGraph(); int k = fvsComputer.computeK(graph); assertEquals(1, k); // Removing node C breaks both cycles } @Test @DisplayName("Should handle disconnected components") void testDisconnectedComponents() { Graph graph = createDisconnectedCyclesGraph(); int k = fvsComputer.computeK(graph); assertTrue(k >= 2); // Each cycle needs at least one vertex removed } @ParameterizedTest @ValueSource(ints = {20, 50, 100}) @DisplayName("Should handle large random graphs") void testLargeRandomGraphs(int size) { Graph graph = createRandomGraph(size, 0.15); long startTime = System.currentTimeMillis(); int k = fvsComputer.computeK(graph); long duration = System.currentTimeMillis() - startTime; assertTrue(k >= 0); assertTrue(k <= size); assertTrue(duration < 30000); // Should complete within 30 seconds } } @Nested @DisplayName("Parameter Computer Integration Tests") class ParameterComputerIntegrationTests { @Test @DisplayName("Should compute valid parameters for simple graphs") void testSimpleGraphParameters() { Graph graph = createCycleGraph(4); ParameterComputer.Parameters params = parameterComputer.computeParameters(graph); assertTrue(params.getK() >= 1); assertTrue(params.getEta() >= 0); assertTrue(params.getModulatorSize() >= 0); } @Test @DisplayName("Should compute parameters with modulator") void testParametersWithModulator() { Graph graph = createCompleteGraph(6); Set modulator = Set.of("V0", "V1"); ParameterComputer.Parameters params = parameterComputer.computeParameters(graph, modulator); assertEquals(2, params.getModulatorSize()); assertTrue(params.getK() >= 0); assertTrue(params.getEta() >= 0); } @Test @DisplayName("Should find optimal modulator") void testOptimalModulatorFinding() { Graph graph = createStarGraph(8); ParameterComputer.Parameters params = parameterComputer.computeParametersWithOptimalModulator(graph, 2); assertTrue(params.getModulatorSize() <= 2); assertTrue(params.getEta() >= 0); } @RepeatedTest(5) @DisplayName("Should produce consistent results") void testConsistentResults() { Graph graph = createRandomGraph(30, 0.2); ParameterComputer.Parameters params1 = parameterComputer.computeParameters(graph); ParameterComputer.Parameters params2 = parameterComputer.computeParameters(graph); // Results should be deterministic for the same graph assertEquals(params1.getK(), params2.getK()); assertEquals(params1.getEta(), params2.getEta()); } } @Nested @DisplayName("Multithreading and Performance Tests") class MultithreadingPerformanceTests { @Test @DisplayName("Should handle concurrent parameter computation") void testConcurrentParameterComputation() throws InterruptedException { List> graphs = IntStream.range(0, 10) .mapToObj(i -> createRandomGraph(20, 0.25)) .collect(java.util.stream.Collectors.toList()); List> futures = graphs.stream() .map(graph -> CompletableFuture.supplyAsync(() -> parameterComputer.computeParameters(graph))) .collect(java.util.stream.Collectors.toList()); List results = futures.stream().map(CompletableFuture::join).collect(java.util.stream.Collectors.toList()); assertEquals(10, results.size()); results.forEach(params -> { assertTrue(params.getK() >= 0); assertTrue(params.getEta() >= 0); }); } @Test @DisplayName("Should scale with parallelism level") void testScalingWithParallelism() { Graph graph = createRandomGraph(100, 0.1); // Test with different parallelism levels for (int parallelism : Arrays.asList(1, 2, 4)) { ParameterComputer computer = new ParameterComputer<>(token, parallelism); long startTime = System.currentTimeMillis(); ParameterComputer.Parameters params = computer.computeParameters(graph); long duration = System.currentTimeMillis() - startTime; assertTrue(params.getK() >= 0); assertTrue(duration < 35000); // Reasonable time limit computer.shutdown(); } } } // Helper methods for creating test graphs private Graph createSingleVertexGraph() { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); graph.addVertex("V0"); return graph; } private Graph createPathGraph(int length) { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); for (int i = 0; i < length; i++) { graph.addVertex("V" + i); } for (int i = 0; i < length - 1; i++) { graph.addEdge("V" + i, "V" + (i + 1)); } return graph; } private Graph createCycleGraph(int size) { Graph graph = createPathGraph(size); graph.addEdge("V" + (size - 1), "V0"); return graph; } private Graph createCompleteGraph(int size) { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); for (int i = 0; i < size; i++) { graph.addVertex("V" + i); } for (int i = 0; i < size; i++) { for (int j = 0; j < size; j++) { if (i != j) { graph.addEdge("V" + i, "V" + j); } } } return graph; } private Graph createStarGraph(int size) { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); graph.addVertex("center"); for (int i = 0; i < size; i++) { graph.addVertex("V" + i); graph.addEdge("center", "V" + i); graph.addEdge("V" + i, "center"); } return graph; } private Graph createMultipleCyclesGraph() { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); // First cycle: A -> B -> C -> A graph.addVertex("A"); graph.addVertex("B"); graph.addVertex("C"); graph.addEdge("A", "B"); graph.addEdge("B", "C"); graph.addEdge("C", "A"); // Second cycle: C -> D -> E -> C (overlapping) graph.addVertex("D"); graph.addVertex("E"); graph.addEdge("C", "D"); graph.addEdge("D", "E"); graph.addEdge("E", "C"); return graph; } private Graph createDisconnectedCyclesGraph() { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); // First cycle graph.addVertex("A1"); graph.addVertex("A2"); graph.addVertex("A3"); graph.addEdge("A1", "A2"); graph.addEdge("A2", "A3"); graph.addEdge("A3", "A1"); // Second cycle (disconnected) graph.addVertex("B1"); graph.addVertex("B2"); graph.addVertex("B3"); graph.addEdge("B1", "B2"); graph.addEdge("B2", "B3"); graph.addEdge("B3", "B1"); return graph; } private Graph createRandomGraph(int vertexCount, double edgeProbability) { Graph graph = new DefaultDirectedGraph<>(DefaultEdge.class); ThreadLocalRandom random = ThreadLocalRandom.current(); // Add vertices for (int i = 0; i < vertexCount; i++) { graph.addVertex("V" + i); } // Add random edges for (int i = 0; i < vertexCount; i++) { for (int j = 0; j < vertexCount; j++) { if (i != j && random.nextDouble() < edgeProbability) { graph.addEdge("V" + i, "V" + j); } } } return graph; } } ================================================ FILE: graph-data-generator/pom.xml ================================================ 4.0.0 org.hjug.refactorfirst refactor-first 0.8.1-SNAPSHOT org.hjug.refactorfirst.graphdatagenerator graph-data-generator RefactorFirst Graph Data Generator org.hjug.refactorfirst.costbenefitcalculator cost-benefit-calculator ================================================ FILE: graph-data-generator/src/main/java/org/hjug/gdg/GraphDataGenerator.java ================================================ package org.hjug.gdg; import java.util.List; import org.hjug.cbc.RankedDisharmony; public class GraphDataGenerator { public String getGodClassScriptStart() { return " google.charts.load('current', {'packages':['corechart']});\n" + " google.charts.setOnLoadCallback(drawSeriesChart);\n" + "\n" + " function drawSeriesChart() {\n" + "\n" + " var data = google.visualization.arrayToDataTable(["; } public String getGodClassScriptEnd() { return "]);\n" + "\n" + " var options = {\n" + " title: 'Priority Ranking for Refactoring God Classes - ' +\n" + " 'Start with Priority 1',\n" + " height: 900, " + " width: 1200, " + " explorer: {}, " + " hAxis: {title: 'Effort'},\n" + " vAxis: {title: 'Change Proneness'},\n" + " colorAxis: {colors: ['green', 'red']},\n" + " bubble: {textStyle: {fontSize: 11}} };\n" + "\n" + " var chart = new google.visualization.BubbleChart(document.getElementById('series_chart_div'));\n" + " chart.draw(data, options);\n" + " }\n"; } public String getCBOScriptStart() { return " google.charts.load('current', {'packages':['corechart']});\n" + " google.charts.setOnLoadCallback(drawSeriesChart);\n" + "\n" + " function drawSeriesChart() {\n" + "\n" + " var data2 = google.visualization.arrayToDataTable(["; } public String getCBOScriptEnd() { return "]);\n" + "\n" + " var options = {\n" + " title: 'Priority Ranking for Refactoring Highly Coupled Classes - ' +\n" + " 'Start with Priority 1',\n" + " height: 900, " + " width: 1200, " + " explorer: {}, " + " hAxis: {title: 'Coupling Count'},\n" + " vAxis: {title: 'Change Proneness'},\n" + " colorAxis: {colors: ['green', 'red']},\n" + " bubble: {textStyle: {fontSize: 11}} };\n" + "\n" + " var chart2 = new google.visualization.BubbleChart(document.getElementById('series_chart_div_2'));\n" + " chart2.draw(data2, options);\n" + " }\n"; } public String generateGodClassBubbleChartData(List rankedDisharmonies, int maxPriority) { StringBuilder chartData = new StringBuilder(); chartData.append("[ 'ID', 'Effort', 'Change Proneness', 'Priority', 'Priority (Visual)'], "); for (int i = 0; i < rankedDisharmonies.size(); i++) { RankedDisharmony rankedDisharmony = rankedDisharmonies.get(i); chartData.append("["); chartData.append("'"); chartData.append(rankedDisharmony.getFileName()); chartData.append("',"); chartData.append(rankedDisharmony.getEffortRank()); chartData.append(","); chartData.append(rankedDisharmony.getChangePronenessRank()); chartData.append(","); chartData.append(rankedDisharmony.getPriority()); chartData.append(","); chartData.append(maxPriority - rankedDisharmony.getPriority()); chartData.append("]"); if (i + 1 < rankedDisharmonies.size()) { chartData.append(","); } } return chartData.toString(); } public String generateCBOBubbleChartData(List rankedDisharmonies, int maxPriority) { StringBuilder chartData = new StringBuilder(); chartData.append("[ 'ID', 'Coupling Count', 'Change Proneness', 'Priority', 'Priority (Visual)'], "); for (int i = 0; i < rankedDisharmonies.size(); i++) { RankedDisharmony rankedDisharmony = rankedDisharmonies.get(i); chartData.append("["); chartData.append("'"); chartData.append(rankedDisharmony.getFileName()); chartData.append("',"); chartData.append(rankedDisharmony.getEffortRank()); chartData.append(","); chartData.append(rankedDisharmony.getChangePronenessRank()); chartData.append(","); chartData.append(rankedDisharmony.getPriority()); chartData.append(","); chartData.append(maxPriority - rankedDisharmony.getPriority()); chartData.append("]"); if (i + 1 < rankedDisharmonies.size()) { chartData.append(","); } } return chartData.toString(); } } ================================================ FILE: graph-data-generator/src/test/java/org/hjug/gdg/GraphDataGeneratorTest.java ================================================ package org.hjug.gdg; import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.ArrayList; import java.util.List; import org.hjug.cbc.RankedDisharmony; import org.hjug.git.ScmLogInfo; import org.hjug.metrics.GodClass; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; class GraphDataGeneratorTest { private GraphDataGenerator graphDataGenerator; @BeforeEach public void setUp() { graphDataGenerator = new GraphDataGenerator(); } @Test void getScriptStart() { String scriptStart = " google.charts.load('current', {'packages':['corechart']});\n" + " google.charts.setOnLoadCallback(drawSeriesChart);\n" + "\n" + " function drawSeriesChart() {\n" + "\n" + " var data = google.visualization.arrayToDataTable(["; assertEquals(scriptStart, graphDataGenerator.getGodClassScriptStart()); } @Test void getScriptEnd() { String scriptEnd = "]);\n" + "\n" + " var options = {\n" + " title: 'Priority Ranking for Refactoring God Classes - ' +\n" + " 'Start with Priority 1',\n" + " height: 900, " + " width: 1200, " + " explorer: {}, " + " hAxis: {title: 'Effort'},\n" + " vAxis: {title: 'Change Proneness'},\n" + " colorAxis: {colors: ['green', 'red']},\n" + " bubble: {textStyle: {fontSize: 11}} };\n" + "\n" + " var chart = new google.visualization.BubbleChart(document.getElementById('series_chart_div'));\n" + " chart.draw(data, options);\n" + " }\n"; assertEquals(scriptEnd, graphDataGenerator.getGodClassScriptEnd()); } @Test void generateBubbleChartDataOneDataPoint() { GodClass godClass = new GodClass( "AttributeHandler", "AttributeHandler.java", "org.apache.myfaces.tobago.facelets", "(WMC=77, ATFD=105, TCC=15.555999755859375)"); godClass.setOverallRank(0); ScmLogInfo scmLogInfo = new ScmLogInfo("org/apache/myfaces/tobago/facelets/AttributeHandler.java", null, 1595275997, 0, 1); scmLogInfo.setChangePronenessRank(0); RankedDisharmony rankedDisharmony = new RankedDisharmony(godClass, scmLogInfo); rankedDisharmony.setPriority(1); List rankedDisharmonies = new ArrayList<>(); rankedDisharmonies.add(rankedDisharmony); String chartData = "[ 'ID', 'Effort', 'Change Proneness', 'Priority', 'Priority (Visual)'], " + "['AttributeHandler.java',0,0,1,0]"; Assertions.assertEquals(chartData, graphDataGenerator.generateGodClassBubbleChartData(rankedDisharmonies, 1)); } // Only testing correct string formatting, not data correctness @Test void generateBubbleChartDataTwoDataPoints() { GodClass godClass = new GodClass( "AttributeHandler", "AttributeHandler.java", "org.apache.myfaces.tobago.facelets", "(WMC=77, ATFD=105, TCC=15.555999755859375)"); godClass.setOverallRank(0); ScmLogInfo scmLogInfo = new ScmLogInfo("org/apache/myfaces/tobago/facelets/AttributeHandler.java", null, 1595275997, 0, 1); scmLogInfo.setChangePronenessRank(0); RankedDisharmony rankedDisharmony = new RankedDisharmony(godClass, scmLogInfo); rankedDisharmony.setPriority(1); RankedDisharmony rankedDisharmony2 = new RankedDisharmony(godClass, scmLogInfo); rankedDisharmony2.setPriority(2); List rankedDisharmonies = new ArrayList<>(); rankedDisharmonies.add(rankedDisharmony); rankedDisharmonies.add(rankedDisharmony2); String chartData = "[ 'ID', 'Effort', 'Change Proneness', 'Priority', 'Priority (Visual)'], " + "['AttributeHandler.java',0,0,1,0]," + "['AttributeHandler.java',0,0,2,-1]"; Assertions.assertEquals(chartData, graphDataGenerator.generateGodClassBubbleChartData(rankedDisharmonies, 1)); } } ================================================ FILE: jreleaser.yml ================================================ # Generated with JReleaser 1.22.0 at 2026-01-24T15:46:34.8940566-06:00 project: name: RefactorFirst description: Identifies cycles and God classes in a codebase and suggests which classes should be refactored first. longDescription: Identifies cycles and God classes in a codebase and suggests which classes should be refactored first. authors: - Jim Bethancourt license: Apache-2.0 links: homepage: https://github.com/refactorfirst/RefactorFirst languages: java: groupId: org.hjug.refactorfirst # version: 11 inceptionYear: 2020 release: github: owner: refactorfirst signing: active: ALWAYS armored: true deploy: maven: mavenCentral: release-deploy: active: RELEASE url: https://central.sonatype.com/api/v1/publisher applyMavenCentralRules: true stagingRepositories: - target/staging-deploy pomchecker: failOnError: false failOnWarning: false strict: false ================================================ FILE: lombok.config ================================================ lombok.addLombokGeneratedAnnotation = true ================================================ FILE: pom.xml ================================================ 4.0.0 org.hjug.refactorfirst refactor-first 0.8.1-SNAPSHOT pom https://github.com/refactorfirst/RefactorFirst RefactorFirst Plugin that identifies Cycles and God classes in a codebase and suggests which classes should be refactored first. Generates a graph and a table providing (hopefully) easy to understand guidance. Can be used via command line, as a build plugin, or as a report plugin. Apache License 2.0 http://www.apache.org/licenses/ repo Jim Bethancourt jimbethancourt@gmail.com Houston Java Users Group http://www.hjug.org developer CST scm:git:https://github.com/refactorfirst/RefactorFirst scm:git:https://github.com/refactorfirst/RefactorFirst https://github.com/refactorfirst/RefactorFirst HEAD GitHub https://github.com/refactorfirst/RefactorFirst/issues UTF-8 11 11 1.18.36 jimbethancourt_RefactorFirst ${project.artifactId} jimbethancourt-github https://sonarcloud.io 3.9.9 test-resources codebase-graph-builder graph-algorithms change-proneness-ranker effort-ranker cost-benefit-calculator graph-data-generator refactor-first-maven-plugin coverage report cli org.hjug.refactorfirst.changepronenessranker change-proneness-ranker ${project.version} org.hjug.refactorfirst.effortranker effort-ranker ${project.version} org.hjug.refactorfirst.dsm graph-algorithms ${project.version} org.hjug.refactorfirst.costbenefitcalculator cost-benefit-calculator ${project.version} org.hjug.refactorfirst.graphdatagenerator graph-data-generator ${project.version} org.hjug.refactorfirst.plugin refactor-first-maven-plugin ${project.version} org.hjug.refactorfirst.report report ${project.version} org.hjug.refactorfirst.testresources test-resources ${project.version} test org.hjug.refactorfirst.codebasegraphbuilder codebase-graph-builder ${project.version} org.eclipse.jgit org.eclipse.jgit 6.10.0.202406032230-r compile org.jgrapht jgrapht-core 1.5.2 org.jgrapht jgrapht-opt 1.5.2 in.wilsonl.minifyhtml minify-html 0.15.0 net.sourceforge.pmd pmd-java 7.0.0-rc4 compile com.beust jcommander net.sourceforge.saxon saxon com.fasterxml.jackson.core jackson-databind 2.18.3 com.google.guava guava 33.4.0-jre org.apache.maven maven-core ${maven.core.version} com.google.guava guava org.slf4j slf4j-api 2.0.17 org.slf4j slf4j-simple 2.0.17 org.mockito mockito-core 3.4.4 test org.junit.jupiter junit-jupiter-api 5.13.3 test org.junit.jupiter junit-jupiter-params 5.13.3 test org.junit.jupiter junit-jupiter-engine 5.13.3 test org.projectlombok lombok ${lombok.version} true org.apache.maven.plugins maven-deploy-plugin 3.1.4 org.apache.maven.plugins maven-compiler-plugin 3.8.1 -XDcompilePolicy=simple org.projectlombok lombok ${lombok.version} 11 org.jacoco jacoco-maven-plugin 0.8.14 default-prepare-agent prepare-agent report verify report csvreport verify report org.pitest pitest-maven 1.16.1 org.pitest pitest-junit5-plugin 1.2.1 org.apache.maven.plugins maven-site-plugin 3.7.1 org.apache.maven.plugins maven-project-info-reports-plugin 3.0.0 com.github.spotbugs spotbugs-maven-plugin 4.9.2.0 com.github.spotbugs spotbugs 4.9.3 Max Low true com.h3xstream.findsecbugs findsecbugs-plugin 1.13.0 com.diffplug.spotless spotless-maven-plugin 2.44.5 *.java true 4 apply initialize local org.owasp dependency-check-maven 12.1.0 8.0 check snapshot-release org.apache.maven.plugins maven-release-plugin 2.5.3 https://oss.sonatype.org/content/repositories/snapshots/ ossrh https://oss.sonatype.org/content/repositories/snapshots publish org.apache.maven.plugins maven-source-plugin 3.4.0 attach-sources jar org.apache.maven.plugins maven-javadoc-plugin 3.12.0 attach-javadocs jar ossrh https://oss.sonatype.org/service/local/staging/deploy/maven2/ ossrh https://oss.sonatype.org/content/repositories/snapshots ================================================ FILE: refactor-first-gradle-plugin/build.gradle ================================================ plugins { id 'java-gradle-plugin' id 'maven-publish' id 'com.gradle.plugin-publish' version '0.12.0' } repositories { mavenCentral() maven { url 'target/dependencies' } mavenLocal() } dependencies { compileOnly gradleApi() // api "org.hjug.refactorfirst.graphdatagenerator:graph-data-generator:${version}" } pluginBundle { website = 'https://github.com/jimbethancourt/RefactorFirst' vcsUrl = 'https://github.com/jimbethancourt/RefactorFirst.git' tags = ['refactor', 'report'] } gradlePlugin { plugins { refactorFirstPlugin { id = 'org.hjug.refactor-first' displayName = 'RefactorFirst' description = 'Plugin that identifies God classes in a codebase and suggests which classes should be refactored first.' implementationClass = 'org.hjug.gradlereport.RefactorFirstPlugin' } } } ================================================ FILE: refactor-first-gradle-plugin/gradle/wrapper/gradle-wrapper.properties ================================================ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists distributionUrl=https\://services.gradle.org/distributions/gradle-6.7-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists ================================================ FILE: refactor-first-gradle-plugin/gradlew ================================================ #!/usr/bin/env sh # # Copyright 2015 the original author or authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ############################################################################## ## ## Gradle start up script for UN*X ## ############################################################################## # Attempt to set APP_HOME # Resolve links: $0 may be a link PRG="$0" # Need this for relative symlinks. while [ -h "$PRG" ] ; do ls=`ls -ld "$PRG"` link=`expr "$ls" : '.*-> \(.*\)$'` if expr "$link" : '/.*' > /dev/null; then PRG="$link" else PRG=`dirname "$PRG"`"/$link" fi done SAVED="`pwd`" cd "`dirname \"$PRG\"`/" >/dev/null APP_HOME="`pwd -P`" cd "$SAVED" >/dev/null APP_NAME="Gradle" APP_BASE_NAME=`basename "$0"` # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD="maximum" warn () { echo "$*" } die () { echo echo "$*" echo exit 1 } # OS specific support (must be 'true' or 'false'). cygwin=false msys=false darwin=false nonstop=false case "`uname`" in CYGWIN* ) cygwin=true ;; Darwin* ) darwin=true ;; MINGW* ) msys=true ;; NONSTOP* ) nonstop=true ;; esac CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar # Determine the Java command to use to start the JVM. if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables JAVACMD="$JAVA_HOME/jre/sh/java" else JAVACMD="$JAVA_HOME/bin/java" fi if [ ! -x "$JAVACMD" ] ; then die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME Please set the JAVA_HOME variable in your environment to match the location of your Java installation." fi else JAVACMD="java" which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the location of your Java installation." fi # Increase the maximum file descriptors if we can. if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then MAX_FD_LIMIT=`ulimit -H -n` if [ $? -eq 0 ] ; then if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then MAX_FD="$MAX_FD_LIMIT" fi ulimit -n $MAX_FD if [ $? -ne 0 ] ; then warn "Could not set maximum file descriptor limit: $MAX_FD" fi else warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" fi fi # For Darwin, add options to specify how the application appears in the dock if $darwin; then GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" fi # For Cygwin or MSYS, switch paths to Windows format before running java if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then APP_HOME=`cygpath --path --mixed "$APP_HOME"` CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` JAVACMD=`cygpath --unix "$JAVACMD"` # We build the pattern for arguments to be converted via cygpath ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` SEP="" for dir in $ROOTDIRSRAW ; do ROOTDIRS="$ROOTDIRS$SEP$dir" SEP="|" done OURCYGPATTERN="(^($ROOTDIRS))" # Add a user-defined pattern to the cygpath arguments if [ "$GRADLE_CYGPATTERN" != "" ] ; then OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" fi # Now convert the arguments - kludge to limit ourselves to /bin/sh i=0 for arg in "$@" ; do CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` else eval `echo args$i`="\"$arg\"" fi i=`expr $i + 1` done case $i in 0) set -- ;; 1) set -- "$args0" ;; 2) set -- "$args0" "$args1" ;; 3) set -- "$args0" "$args1" "$args2" ;; 4) set -- "$args0" "$args1" "$args2" "$args3" ;; 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; esac fi # Escape application args save () { for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done echo " " } APP_ARGS=`save "$@"` # Collect all arguments for the java command, following the shell quoting and substitution rules eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" exec "$JAVACMD" "$@" ================================================ FILE: refactor-first-gradle-plugin/gradlew.bat ================================================ @rem @rem Copyright 2015 the original author or authors. @rem @rem Licensed under the Apache License, Version 2.0 (the "License"); @rem you may not use this file except in compliance with the License. @rem You may obtain a copy of the License at @rem @rem https://www.apache.org/licenses/LICENSE-2.0 @rem @rem Unless required by applicable law or agreed to in writing, software @rem distributed under the License is distributed on an "AS IS" BASIS, @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @rem See the License for the specific language governing permissions and @rem limitations under the License. @rem @if "%DEBUG%" == "" @echo off @rem ########################################################################## @rem @rem Gradle startup script for Windows @rem @rem ########################################################################## @rem Set local scope for the variables with windows NT shell if "%OS%"=="Windows_NT" setlocal set DIRNAME=%~dp0 if "%DIRNAME%" == "" set DIRNAME=. set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% @rem Resolve any "." and ".." in APP_HOME to make it shorter. for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" @rem Find java.exe if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 if "%ERRORLEVEL%" == "0" goto execute echo. echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. echo. echo Please set the JAVA_HOME variable in your environment to match the echo location of your Java installation. goto fail :findJavaFromJavaHome set JAVA_HOME=%JAVA_HOME:"=% set JAVA_EXE=%JAVA_HOME%/bin/java.exe if exist "%JAVA_EXE%" goto execute echo. echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% echo. echo Please set the JAVA_HOME variable in your environment to match the echo location of your Java installation. goto fail :execute @rem Setup the command line set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar @rem Execute Gradle "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* :end @rem End local scope for the variables with windows NT shell if "%ERRORLEVEL%"=="0" goto mainEnd :fail rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of rem the _cmd.exe /c_ return code! if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 exit /b 1 :mainEnd if "%OS%"=="Windows_NT" endlocal :omega ================================================ FILE: refactor-first-gradle-plugin/pom.xml ================================================ 4.0.0 org.hjug.refactorfirst refactor-first 0.2.0-SNAPSHOT org.hjug.refactorfirst.plugin refactor-first-gradle-plugin pom ./gradlew build true true org.hjug.refactorfirst.graphdatagenerator graph-data-generator org.apache.maven.plugins maven-dependency-plugin 3.1.2 copy-dependencies generate-resources copy-dependencies false true true true true ${project.build.directory}/dependencies org.codehaus.mojo exec-maven-plugin 1.6.0 gradle prepare-package ${gradle.executable} ${gradle.tasks} -Pgroup=${project.groupId} -Pversion=${project.version} -S exec windows windows gradlew.bat ================================================ FILE: refactor-first-gradle-plugin/settings.gradle ================================================ rootProject.name = 'refactor-first-gradle-plugin' ================================================ FILE: refactor-first-gradle-plugin/src/main/java/org/hjug/gradlereport/RefactorFirstPlugin.java ================================================ package org.hjug.gradlereport; import org.gradle.api.Plugin; import org.gradle.api.Project; public class RefactorFirstPlugin implements Plugin { public void apply(Project project) { // impl } } ================================================ FILE: refactor-first-maven-plugin/pom.xml ================================================ 4.0.0 org.hjug.refactorfirst refactor-first 0.8.1-SNAPSHOT org.hjug.refactorfirst.plugin refactor-first-maven-plugin maven-plugin RefactorFirst Maven Plugin org.hjug.refactorfirst.graphdatagenerator graph-data-generator org.hjug.refactorfirst.report report com.google.guava guava org.iq80.snappy snappy 0.5 org.apache.maven maven-core org.apache.maven.reporting maven-reporting-impl 4.0.0 org.tukaani xz org.apache.maven.reporting maven-reporting-api 4.0.0 org.apache.maven maven-plugin-api 3.9.9 org.apache.maven.plugin-tools maven-plugin-annotations 3.15.1 provided com.fasterxml.jackson.core jackson-databind maven-install-plugin 3.1.3 org.apache.maven.plugins maven-plugin-plugin 3.15.1 refactor-first default-descriptor process-classes generated-helpmojo helpmojo ================================================ FILE: refactor-first-maven-plugin/src/main/java/org/hjug/mavenreport/RefactorFirstHtmlReport.java ================================================ package org.hjug.mavenreport; import java.io.File; import lombok.extern.slf4j.Slf4j; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugins.annotations.ResolutionScope; import org.apache.maven.project.MavenProject; import org.hjug.refactorfirst.report.HtmlReport; @Slf4j @Mojo( name = "htmlReport", defaultPhase = LifecyclePhase.SITE, requiresDependencyResolution = ResolutionScope.RUNTIME, requiresProject = false, threadSafe = true, inheritByDefault = false) public class RefactorFirstHtmlReport extends AbstractMojo { @Parameter(property = "showDetails") private boolean showDetails = false; @Parameter(property = "backEdgeAnalysisCount") protected int backEdgeAnalysisCount = 50; @Parameter(property = "analyzeCycles") private boolean analyzeCycles = true; @Parameter(property = "minifyHtml") private boolean minifyHtml = false; @Parameter(property = "excludeTests") private boolean excludeTests = true; /** * The test source directory containing test class sources. */ @Parameter(property = "testSourceDirectory") private String testSourceDirectory; @Parameter(defaultValue = "${project.name}") private String projectName; @Parameter(defaultValue = "${project.version}") private String projectVersion; @Parameter(readonly = true, defaultValue = "${project}") private MavenProject project; @Parameter(property = "project.build.directory") protected File outputDirectory; @Override public void execute() { log.info(outputDirectory.getPath()); HtmlReport htmlReport = new HtmlReport(); htmlReport.execute( backEdgeAnalysisCount, analyzeCycles, showDetails, minifyHtml, excludeTests, testSourceDirectory, projectName, projectVersion, project.getBasedir(), project.getModel() .getReporting() .getOutputDirectory() .replace("${project.basedir}" + File.separator, "")); } } ================================================ FILE: refactor-first-maven-plugin/src/main/java/org/hjug/mavenreport/RefactorFirstMavenCsvReport.java ================================================ package org.hjug.mavenreport; import java.io.File; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugins.annotations.ResolutionScope; import org.apache.maven.project.MavenProject; import org.hjug.refactorfirst.report.CsvReport; @Mojo( name = "csvreport", defaultPhase = LifecyclePhase.SITE, requiresDependencyResolution = ResolutionScope.RUNTIME, requiresProject = false, threadSafe = true, inheritByDefault = false) public class RefactorFirstMavenCsvReport extends AbstractMojo { @Parameter(property = "showDetails") private boolean showDetails = false; @Parameter(defaultValue = "${project.name}") private String projectName; @Parameter(defaultValue = "${project.version}") private String projectVersion; @Parameter(readonly = true, defaultValue = "${project}") private MavenProject project; @Parameter(property = "project.build.directory") protected File outputDirectory; @Override public void execute() { CsvReport csvReport = new CsvReport(); csvReport.execute( showDetails, projectName, projectVersion, project.getModel() .getReporting() .getOutputDirectory() .replace("${project.basedir}" + File.separator, ""), project.getBasedir()); } } ================================================ FILE: refactor-first-maven-plugin/src/main/java/org/hjug/mavenreport/RefactorFirstMavenJsonReport.java ================================================ package org.hjug.mavenreport; import com.fasterxml.jackson.databind.ObjectMapper; import java.io.File; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugins.annotations.ResolutionScope; import org.apache.maven.project.MavenProject; import org.hjug.refactorfirst.report.json.JsonReportExecutor; @Mojo( name = "jsonreport", defaultPhase = LifecyclePhase.SITE, requiresDependencyResolution = ResolutionScope.RUNTIME, requiresProject = false, threadSafe = true, inheritByDefault = false) public class RefactorFirstMavenJsonReport extends AbstractMojo { private static final String FILE_NAME = "refactor-first-data.json"; private static final ObjectMapper MAPPER = new ObjectMapper(); @Parameter(readonly = true, defaultValue = "${project}") private MavenProject project; @Override public void execute() { JsonReportExecutor jsonReportExecutor = new JsonReportExecutor(); jsonReportExecutor.execute( project.getBasedir(), project.getModel() .getReporting() .getOutputDirectory() .replace("${project.basedir}" + File.separator, "")); } } ================================================ FILE: refactor-first-maven-plugin/src/main/java/org/hjug/mavenreport/RefactorFirstMavenReport.java ================================================ package org.hjug.mavenreport; import java.util.*; import lombok.extern.slf4j.Slf4j; import org.apache.maven.doxia.markup.HtmlMarkup; import org.apache.maven.doxia.sink.Sink; import org.apache.maven.doxia.sink.SinkEventAttributes; import org.apache.maven.doxia.sink.impl.SinkEventAttributeSet; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugins.annotations.ResolutionScope; import org.apache.maven.reporting.AbstractMavenReport; import org.hjug.refactorfirst.report.HtmlReport; @Slf4j @Mojo( name = "report", defaultPhase = LifecyclePhase.SITE, requiresDependencyResolution = ResolutionScope.RUNTIME, requiresProject = false, threadSafe = true, inheritByDefault = false) public class RefactorFirstMavenReport extends AbstractMavenReport { @Parameter(property = "showDetails") private boolean showDetails = false; @Parameter(property = "backEdgeAnalysisCount") protected int backEdgeAnalysisCount = 50; @Parameter(property = "analyzeCycles") private boolean analyzeCycles = true; @Parameter(property = "excludeTests") private boolean excludeTests = true; /** * The test source directory containing test class sources. */ @Parameter(property = "testSourceDirectory") private String testSourceDirectory; @Parameter(defaultValue = "${project.name}") private String projectName; @Parameter(defaultValue = "${project.version}") private String projectVersion; public String getOutputName() { // This report will generate simple-report.html when invoked in a project with `mvn site` return "refactor-first-report"; } public String getName(Locale locale) { // Name of the report when listed in the project-reports.html page of a project return "Refactor First Report"; } public String getDescription(Locale locale) { // Description of the report when listed in the project-reports.html page of a project return "Ranks the disharmonies in a codebase. The classes that should be refactored first " + " have the highest priority values."; } @Override public void executeReport(Locale locale) { HtmlReport htmlReport = new HtmlReport(); Sink mainSink = getSink(); printHead(mainSink); String report = htmlReport .generateReport( showDetails, backEdgeAnalysisCount, analyzeCycles, excludeTests, testSourceDirectory, projectName, projectVersion, project.getBasedir()) .toString(); mainSink.rawText(report); } private void printHead(Sink mainSink) { mainSink.head(); mainSink.title(); mainSink.text("Refactor First Report for " + projectName + " " + projectVersion); mainSink.title_(); // GH Buttons import renderJsDeclaration(mainSink, "https://buttons.github.io/buttons.js"); // google chart import renderJsDeclaration(mainSink, "https://www.gstatic.com/charts/loader.js"); // for DOT graph zooming renderJsDeclaration(mainSink, "https://cdn.jsdelivr.net/npm/svg-pan-zoom@3.6.1/dist/svg-pan-zoom.min.js"); // sigma graph imports - sigma, graphology, graphlib, and graphlib-dot renderJsDeclaration(mainSink, "https://cdnjs.cloudflare.com/ajax/libs/sigma.js/2.4.0/sigma.min.js"); renderJsDeclaration(mainSink, "https://cdnjs.cloudflare.com/ajax/libs/graphology/0.25.4/graphology.umd.min.js"); // may only need graphlib-dot renderJsDeclaration(mainSink, "https://cdnjs.cloudflare.com/ajax/libs/graphlib/2.1.8/graphlib.min.js"); renderJsDeclaration(mainSink, "https://cdn.jsdelivr.net/npm/graphlib-dot@0.6.4/dist/graphlib-dot.min.js"); renderJsDeclaration(mainSink, "https://cdn.jsdelivr.net/npm/3d-force-graph"); mainSink.head_(); } /** * @See https://maven.apache.org/doxia/developers/sink.html#How_to_inject_javascript_code_into_HTML */ private void renderJsDeclaration(Sink mainSink, String scriptUrl) { SinkEventAttributeSet githubButtonJS = new SinkEventAttributeSet(); githubButtonJS.addAttribute(SinkEventAttributes.TYPE, "text/javascript"); githubButtonJS.addAttribute(SinkEventAttributes.SRC, scriptUrl); mainSink.unknown("script", new Object[] {HtmlMarkup.TAG_TYPE_START}, githubButtonJS); mainSink.unknown("script", new Object[] {HtmlMarkup.TAG_TYPE_END}, null); } private void renderStyle(Sink mainSink) { SinkEventAttributeSet githubButtonJS = new SinkEventAttributeSet(); githubButtonJS.addAttribute(SinkEventAttributes.SRC, HtmlReport.POPUP_STYLE); mainSink.unknown("script", new Object[] {HtmlMarkup.TAG_TYPE_START}, githubButtonJS); mainSink.unknown("script", new Object[] {HtmlMarkup.TAG_TYPE_END}, null); } } ================================================ FILE: refactor-first-maven-plugin/src/main/java/org/hjug/mavenreport/RefactorFirstSimpleHtmlReport.java ================================================ package org.hjug.mavenreport; import java.io.File; import lombok.extern.slf4j.Slf4j; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugins.annotations.ResolutionScope; import org.apache.maven.project.MavenProject; import org.hjug.refactorfirst.report.SimpleHtmlReport; @Slf4j @Mojo( name = "simpleHtmlReport", defaultPhase = LifecyclePhase.SITE, requiresDependencyResolution = ResolutionScope.RUNTIME, requiresProject = false, threadSafe = true, inheritByDefault = false) public class RefactorFirstSimpleHtmlReport extends AbstractMojo { @Parameter(property = "showDetails") private boolean showDetails = false; @Parameter(property = "backEdgeAnalysisCount") private int backEdgeAnalysisCount = 50; @Parameter(property = "analyzeCycles") private boolean analyzeCycles = true; @Parameter(property = "minifyHtml") private boolean minifyHtml = false; @Parameter(property = "excludeTests") private boolean excludeTests = true; /** * The test source directory containing test class sources. */ @Parameter(property = "testSourceDirectory") private String testSourceDirectory; @Parameter(defaultValue = "${project.name}") private String projectName; @Parameter(defaultValue = "${project.version}") private String projectVersion; @Parameter(readonly = true, defaultValue = "${project}") private MavenProject project; @Parameter(property = "project.build.directory") protected File outputDirectory; @Override public void execute() { log.info(outputDirectory.getPath()); SimpleHtmlReport htmlReport = new SimpleHtmlReport(); htmlReport.execute( backEdgeAnalysisCount, analyzeCycles, showDetails, minifyHtml, excludeTests, testSourceDirectory, projectName, projectVersion, project.getBasedir(), project.getModel() .getReporting() .getOutputDirectory() .replace("${project.basedir}" + File.separator, "")); } } ================================================ FILE: report/.gitignore ================================================ target/ !.mvn/wrapper/maven-wrapper.jar !**/src/main/**/target/ !**/src/test/**/target/ ### IntelliJ IDEA ### .idea/modules.xml .idea/jarRepositories.xml .idea/compiler.xml .idea/libraries/ *.iws *.iml *.ipr ### Eclipse ### .apt_generated .classpath .factorypath .project .settings .springBeans .sts4-cache ### NetBeans ### /nbproject/private/ /nbbuild/ /dist/ /nbdist/ /.nb-gradle/ build/ !**/src/main/**/build/ !**/src/test/**/build/ ### VS Code ### .vscode/ ### Mac OS ### .DS_Store ================================================ FILE: report/pom.xml ================================================ 4.0.0 org.hjug.refactorfirst refactor-first 0.8.1-SNAPSHOT org.hjug.refactorfirst.report report RefactorFirst Report org.hjug.refactorfirst.graphdatagenerator graph-data-generator com.fasterxml.jackson.core jackson-databind in.wilsonl.minifyhtml minify-html ================================================ FILE: report/src/main/java/org/hjug/refactorfirst/report/CsvReport.java ================================================ package org.hjug.refactorfirst.report; import static org.hjug.refactorfirst.report.ReportWriter.writeReportToDisk; import java.io.File; import java.nio.file.Paths; import java.time.Instant; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.util.*; import lombok.extern.slf4j.Slf4j; import org.hjug.cbc.CostBenefitCalculator; import org.hjug.cbc.RankedDisharmony; import org.hjug.git.GitLogReader; @Slf4j public class CsvReport { public void execute( boolean showDetails, String projectName, String projectVersion, String outputDirectory, File baseDir) { StringBuilder fileNameSB = new StringBuilder(); String publishedDate = createFileDateTimeFormatter().format(Instant.now()); fileNameSB .append(getOutputNamePrefix()) .append("_P") .append(projectName) .append("_PV") .append(projectVersion) .append("_PD") .append(publishedDate) .append(".csv"); String filename = fileNameSB.toString(); if (Objects.equals(projectName, "Maven Stub Project (No POM)")) { projectName = new File(Paths.get("").toAbsolutePath().toString()).getName(); } log.info("Generating {} for {} - {} date: {}", filename, projectName, projectVersion, publishedDate); StringBuilder contentBuilder = new StringBuilder(); // git management GitLogReader gitLogReader = new GitLogReader(); String projectBaseDir; Optional optionalGitDir; if (baseDir != null) { projectBaseDir = baseDir.getPath(); optionalGitDir = Optional.ofNullable(gitLogReader.getGitDir(baseDir)); } else { projectBaseDir = Paths.get("").toAbsolutePath().toString(); optionalGitDir = Optional.ofNullable(gitLogReader.getGitDir(new File(projectBaseDir))); } File gitDir; if (optionalGitDir.isPresent()) { gitDir = optionalGitDir.get(); } else { log.info( "Done! No Git repository found! Please initialize a Git repository and perform an initial commit."); contentBuilder .append("No Git repository found in project ") .append(projectName) .append(" ") .append(projectVersion) .append(". "); contentBuilder.append("Please initialize a Git repository and perform an initial commit."); writeReportToDisk(outputDirectory, filename, contentBuilder.toString()); return; } String parentOfGitDir = gitDir.getParentFile().getPath(); log.info("Project Base Dir: {} ", projectBaseDir); log.info("Parent of Git Dir: {}", parentOfGitDir); if (!projectBaseDir.equals(parentOfGitDir)) { log.warn("Project Base Directory does not match Git Parent Directory"); contentBuilder.append("Project Base Directory does not match Git Parent Directory. " + "Please refer to the report at the root of the site directory."); return; } // actual calcualte List rankedDisharmonies; // TODO: revisit try (CostBenefitCalculator costBenefitCalculator = new CostBenefitCalculator(projectBaseDir, new HashMap<>())) { costBenefitCalculator.runPmdAnalysis(); rankedDisharmonies = costBenefitCalculator.calculateGodClassCostBenefitValues(); } catch (Exception e) { log.error("Error running analysis."); throw new RuntimeException(e); } rankedDisharmonies.sort(Comparator.comparing(RankedDisharmony::getPriority)); // perfect score: no god classes if (rankedDisharmonies.isEmpty()) { contentBuilder .append("Congratulations! ") .append(projectName) .append(" ") .append(projectVersion) .append(" has no God classes!"); log.info("Done! No God classes found!"); writeReportToDisk(outputDirectory, filename, contentBuilder.toString()); return; } // create Content // header final String[] tableHeadings = getHeaderList(showDetails); addsRow(contentBuilder, tableHeadings); contentBuilder.append("\n"); // rows for (RankedDisharmony rankedDisharmony : rankedDisharmonies) { final String[] rankedDisharmonyData = getDataList(rankedDisharmony, showDetails); contentBuilder.append(projectVersion).append(","); addsRow(contentBuilder, rankedDisharmonyData); contentBuilder.append("eol" + "\n"); } log.info(contentBuilder.toString()); writeReportToDisk(outputDirectory, filename, contentBuilder.toString()); } private DateTimeFormatter createFileDateTimeFormatter() { return DateTimeFormatter.ofPattern("yyyyMMddhhmm") .withLocale(Locale.getDefault()) .withZone(ZoneId.systemDefault()); } private DateTimeFormatter createCsvDateTimeFormatter() { return DateTimeFormatter.ISO_LOCAL_DATE_TIME .withLocale(Locale.getDefault()) .withZone(ZoneId.systemDefault()); } private String[] getDataList(RankedDisharmony rankedDisharmony, boolean showDetails) { String[] simpleRankedDisharmonyData = { rankedDisharmony.getFileName(), rankedDisharmony.getPriority().toString(), rankedDisharmony.getChangePronenessRank().toString(), rankedDisharmony.getEffortRank().toString(), rankedDisharmony.getWmc().toString(), createCsvDateTimeFormatter().format(rankedDisharmony.getMostRecentCommitTime()), rankedDisharmony.getCommitCount().toString() }; String[] detailedRankedDisharmonyData = { rankedDisharmony.getFileName(), rankedDisharmony.getPriority().toString(), rankedDisharmony.getChangePronenessRank().toString(), rankedDisharmony.getEffortRank().toString(), rankedDisharmony.getWmc().toString(), rankedDisharmony.getWmcRank().toString(), rankedDisharmony.getAtfd().toString(), rankedDisharmony.getAtfdRank().toString(), rankedDisharmony.getTcc().toString(), rankedDisharmony.getTccRank().toString(), createCsvDateTimeFormatter().format(rankedDisharmony.getFirstCommitTime()), createCsvDateTimeFormatter().format(rankedDisharmony.getMostRecentCommitTime()), rankedDisharmony.getCommitCount().toString(), rankedDisharmony.getPath() }; return showDetails ? detailedRankedDisharmonyData : simpleRankedDisharmonyData; } private String[] getHeaderList(boolean showDetails) { final String[] simpleTableHeadings = { "Ver", "Class", "Priority", "Change Proneness Rank", "Effort Rank", "Method Count", "Most Recent Commit Date", "Commit Count" }; final String[] detailedTableHeadings = { "Ver", "Class", "Priority", "Change Proneness Rank", "Effort Rank", "WMC", "WMC Rank", "ATFD", "ATFD Rank", "TCC", "TCC Rank", "Date of First Commit", "Most Recent Commit Date", "Commit Count", "Full Path" }; return showDetails ? detailedTableHeadings : simpleTableHeadings; } private void addsRow(StringBuilder contentBuilder, String[] rankedDisharmonyData) { for (String rowData : rankedDisharmonyData) { contentBuilder.append(rowData).append(","); } } public String getOutputNamePrefix() { // This report will generate simple-report.html when invoked in a project with `mvn site` return "RefFirst"; } public String getName(Locale locale) { // Name of the report when listed in the project-reports.html page of a project return "Refactor First Report data"; } public String getDescription(Locale locale) { // Description of the report when listed in the project-reports.html page of a project return "DRACO Ranks the disharmonies in a codebase. The classes that should be refactored first " + " have the highest priority values."; } } ================================================ FILE: report/src/main/java/org/hjug/refactorfirst/report/HtmlReport.java ================================================ package org.hjug.refactorfirst.report; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Set; import lombok.extern.slf4j.Slf4j; import org.hjug.cbc.RankedCycle; import org.hjug.cbc.RankedDisharmony; import org.hjug.gdg.GraphDataGenerator; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultWeightedEdge; @Slf4j public class HtmlReport extends SimpleHtmlReport { int d3Threshold = 700; // use Files.readString(Path.of(file)) // Created by generative AI and modified slightly public static final String SUGIYAMA_SIGMA_GRAPH = ""; public static final String FORCE_3D_GRAPH = ""; // Created by generative AI and modified public static final String POPUP_STYLE = ""; // Created by generative AI and modified public static final String POPUP_FUNCTIONS = ""; private static final String GOD_CLASS_CHART_LEGEND = "

God Class Chart Legend:

" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + "
X-Axis: Effort to refactor to a non-God class
Y-Axis: Relative churn
Color: Priority of what to fix first
Circle size: Priority (Visual) of what to fix first
" + "
"; private static final String COUPLING_BETWEEN_OBJECT_CHART_LEGEND = "

Coupling Between Objects Chart Legend:

" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + "
X-Axis: Number of objects the class is coupled to
Y-Axis: Relative churn
Color: Priority of what to fix first
Circle size: Priority (Visual) of what to fix first
" + "
"; @Override public String printHead() { // !Remember to update RefactorFirstMavenReport if this is modified return // GH Buttons import "\n" // google chart import + "\n" // d3 dot graph imports // + "\n" // + "\n" // + "\n" // + "\n" + "" // sigma graph imports - sigma, graphology, graphlib, and graphlib-dot + "\n" + "\n" // may only need graphlib-dot + "\n" + "\n" + "\n"; } String printScripts() { return SUGIYAMA_SIGMA_GRAPH + FORCE_3D_GRAPH + POPUP_FUNCTIONS + POPUP_STYLE; } @Override public String printOpenBodyTag() { return " \n" + printOverlay(); } private String printOverlay() { return "
"; } @Override public String printTitle(String projectName, String projectVersion) { return "Refactor First Report for " + projectName + " " + projectVersion + " \n"; } @Override String renderGithubButtons() { return "
\n" + "Show RefactorFirst some ❤️\n" + "
\n" + "Star\n" + "Fork\n" + "Watch\n" + "Issue\n" + "Sponsor\n" + "
"; } @Override String writeGodClassGchartJs(List rankedDisharmonies, int maxPriority) { GraphDataGenerator graphDataGenerator = new GraphDataGenerator(); String scriptStart = graphDataGenerator.getGodClassScriptStart(); String bubbleChartData = graphDataGenerator.generateGodClassBubbleChartData(rankedDisharmonies, maxPriority); String scriptEnd = graphDataGenerator.getGodClassScriptEnd(); return scriptStart + bubbleChartData + scriptEnd; } @Override String writeGCBOGchartJs(List rankedDisharmonies, int maxPriority) { GraphDataGenerator graphDataGenerator = new GraphDataGenerator(); String scriptStart = graphDataGenerator.getCBOScriptStart(); String bubbleChartData = graphDataGenerator.generateCBOBubbleChartData(rankedDisharmonies, maxPriority); String scriptEnd = graphDataGenerator.getCBOScriptEnd(); return scriptStart + bubbleChartData + scriptEnd; } public String getName(Locale locale) { // Name of the report when listed in the project-reports.html page of a project return "Refactor First Report"; } public String getDescription(Locale locale) { // Description of the report when listed in the project-reports.html page of a project return "Ranks the disharmonies in a codebase. The classes that should be refactored first " + " have the highest priority values."; } @Override String renderGodClassChart(List rankedGodClassDisharmonies, int maxGodClassPriority) { StringBuilder stringBuilder = new StringBuilder(); String godClassChart = writeGodClassGchartJs(rankedGodClassDisharmonies, maxGodClassPriority - 1); stringBuilder.append( "
\n"); stringBuilder.append(renderGithubButtons()); stringBuilder.append(GOD_CLASS_CHART_LEGEND); return stringBuilder.toString(); } @Override String renderCBOChart(List rankedCBODisharmonies, int maxCboPriority) { StringBuilder stringBuilder = new StringBuilder(); String cboChart = writeGCBOGchartJs(rankedCBODisharmonies, maxCboPriority - 1); stringBuilder.append( "
\n"); stringBuilder.append(renderGithubButtons()); stringBuilder.append(COUPLING_BETWEEN_OBJECT_CHART_LEGEND); return stringBuilder.toString(); } @Override public String renderClassGraphVisuals() { String dot = buildClassGraphDot(classGraph); String classGraphName = "classGraph"; StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append(generateGraphButtons(classGraphName, dot)); stringBuilder.append( "
Excludes classes that have no incoming and outgoing edges
"); int classCount = classGraph.vertexSet().size(); int relationshipCount = classGraph.edgeSet().size(); stringBuilder.append("
Number of classes: " + classCount + " Number of relationships: " + relationshipCount + "
"); if (classCount + relationshipCount < d3Threshold) { stringBuilder.append(generateDotImage(classGraphName)); } else { // revisit and add DOT SVG popup button stringBuilder.append("
\nSVG is too big to render quickly
\n"); } return stringBuilder.toString(); } private StringBuilder generateGraphButtons(String graphName, String dot) { StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append("

Class Map

"); stringBuilder.append("\n"); stringBuilder.append(generateForce3DPopup(graphName)); stringBuilder.append(generate2DPopup(graphName)); stringBuilder.append(generateHidePopup(graphName)); stringBuilder.append("
\nRed lines represent relationships to remove.
\n"); stringBuilder.append("Red nodes represent classes to remove.
\n"); stringBuilder.append("Zoom in / out with your mouse wheel and click/move to drag the image.\n"); stringBuilder.append("
\n"); return stringBuilder; } private static String generateDotImage(String graphName) { // revisit and add D3 popup button as well return "
\n" + "\n"; } String buildClassGraphDot(Graph classGraph) { StringBuilder dot = new StringBuilder(); dot.append("`strict digraph G {\n"); for (DefaultWeightedEdge edge : classGraph.edgeSet()) { renderEdge(classGraph, edge, dot); } // capture only classes that have a relationship with one or more other classes Set vertexesToRender = new HashSet<>(); for (DefaultWeightedEdge edge : classGraph.edgeSet()) { String[] vertexes = extractVertexes(edge); vertexesToRender.add(vertexes[0].trim()); vertexesToRender.add(vertexes[1].trim()); } // render vertices for (String vertex : vertexesToRender) { String className = getClassName(vertex); // if the vertex is a nested class and has no outgoing edges, skip it if (className.contains("$") && className.split("\\$")[className.split("\\$").length - 1].matches("\\d+") && classGraph.outDegreeOf(vertex) == 0) { log.info("Skipping vertex: {}", className); continue; } dot.append(className.replace("$", "_")); if (vertexesToRemove.contains(vertex)) { dot.append(" [color=red style=filled]\n"); } dot.append(";\n"); } dot.append("}`;"); return dot.toString(); } private void renderEdge( Graph classGraph, DefaultWeightedEdge edge, StringBuilder dot) { // render edge String[] vertexes = extractVertexes(edge); // String start = getClassName(vertexes[0].trim()).replace("$", "_"); // String end = getClassName(vertexes[1].trim()).replace("$", "_"); String startVertex = vertexes[0].trim(); String start = getClassName(startVertex.trim()).replace("$", "_"); String endVertex = vertexes[1].trim(); String end = getClassName(endVertex.trim()).replace("$", "_"); // if the vertex is a nested class and has no outgoing edges, skip it if (start.contains("$") && start.split("\\$")[startVertex.split("\\$").length - 1].matches("\\d+") && classGraph.outDegreeOf(startVertex) == 0) { log.info("Skipping edge: {} -> {}", startVertex, endVertex); return; } if (endVertex.contains("$") && endVertex.split("\\$")[endVertex.split("\\$").length - 1].matches("\\d+") && classGraph.outDegreeOf(endVertex) == 0) { log.info("Skipping edge: {} -> {}", startVertex, endVertex); return; } log.info("Rendering edge: {} -> {}", startVertex, endVertex); dot.append(start); dot.append(" -> "); dot.append(end); // render edge attributes int edgeWeight = (int) classGraph.getEdgeWeight(edge); dot.append(" [ "); dot.append("label = \""); dot.append(edgeWeight); dot.append("\" "); dot.append("weight = \""); dot.append(edgeWeight); dot.append("\""); if (edgesToRemove.contains(edge)) { dot.append(" color = \"red\""); } dot.append(" ];\n"); } @Override public String renderCycleVisuals(RankedCycle cycle) { String dot = buildCycleDot(classGraph, cycle); String cycleName = getClassName(cycle.getCycleName()).replace("$", "_"); StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append(generateGraphButtons(cycleName, dot)); if (cycle.getCycleNodes().size() + cycle.getEdgeSet().size() < d3Threshold) { stringBuilder.append(generateDotImage(cycleName)); } else { // revisit and add DOT SVG popup button stringBuilder.append("
\nSVG is too big to render quickly
\n"); } stringBuilder.append("
\n"); stringBuilder.append("
\n"); return stringBuilder.toString(); } String buildCycleDot(Graph classGraph, RankedCycle cycle) { StringBuilder dot = new StringBuilder(); dot.append("`strict digraph G {\n"); for (DefaultWeightedEdge edge : cycle.getEdgeSet()) { renderEdge(classGraph, edge, dot); } // render vertices for (String vertex : cycle.getVertexSet()) { dot.append(getClassName(vertex).replace("$", "_")); if (vertexesToRemove.contains(vertex)) { dot.append(" [color=red style=filled]\n"); } dot.append(";\n"); } dot.append("}`;"); return dot.toString().replace("$", "_"); } String generate2DPopup(String cycleName) { // Created by generative AI and modified return "\n"; } String generateForce3DPopup(String cycleName) { // Created by generative AI and modified return "\n"; } String generateHidePopup(String cycleName) { return "
\n" + "×\n" + "
" + "\n
\n"; } } ================================================ FILE: report/src/main/java/org/hjug/refactorfirst/report/ReportWriter.java ================================================ package org.hjug.refactorfirst.report; import java.io.BufferedWriter; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import java.nio.file.Files; import lombok.extern.slf4j.Slf4j; @Slf4j public class ReportWriter { public static void writeReportToDisk( final String reportOutputDirectory, final String filename, final String string) { final File reportOutputDir = new File(reportOutputDirectory); if (!reportOutputDir.exists()) { reportOutputDir.mkdirs(); } final String pathname = reportOutputDirectory + File.separator + filename; final File reportFile = new File(pathname); try { reportFile.createNewFile(); } catch (IOException e) { log.error("Failure creating chart script file", e); } try (BufferedWriter writer = Files.newBufferedWriter(reportFile.toPath(), Charset.defaultCharset())) { writer.write(string); } catch (IOException e) { log.error("Error writing chart script file", e); } log.info("Done! View the report at target/site/{}", filename); } } ================================================ FILE: report/src/main/java/org/hjug/refactorfirst/report/SimpleHtmlReport.java ================================================ package org.hjug.refactorfirst.report; import static org.hjug.refactorfirst.report.ReportWriter.writeReportToDisk; import in.wilsonl.minifyhtml.Configuration; import in.wilsonl.minifyhtml.MinifyHtml; import java.io.File; import java.nio.file.Paths; import java.time.Instant; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.time.format.FormatStyle; import java.util.*; import lombok.extern.slf4j.Slf4j; import org.hjug.cbc.*; import org.hjug.dsm.CircularReferenceChecker; import org.hjug.feedback.SuperTypeToken; import org.hjug.feedback.arc.pageRank.PageRankFAS; import org.hjug.feedback.vertex.kernelized.DirectedFeedbackVertexSetResult; import org.hjug.feedback.vertex.kernelized.DirectedFeedbackVertexSetSolver; import org.hjug.feedback.vertex.kernelized.EnhancedParameterComputer; import org.hjug.git.GitLogReader; import org.jgrapht.Graph; import org.jgrapht.graph.AsSubgraph; import org.jgrapht.graph.DefaultWeightedEdge; /** * Strictly HTML report that contains no JavaScript * Generates only tables */ @Slf4j public class SimpleHtmlReport { public static final String THE_BEGINNING = "\n" + "\n"; public static final String THE_END = "\n" + " \n" + " \n" + "\n"; public final String[] godClassSimpleTableHeadings = { "Class", "Priority", "Change Proneness Rank", "Effort Rank", "Method Count", "Most Recent Commit Date", "Commit Count" }; public final String[] godClassDetailedTableHeadings = { "Class", "Priority", "Raw Priority", "Change Proneness Rank", "Effort Rank", "WMC", "WMC Rank", "ATFD", "ATFD Rank", "TCC", "TCC Rank", "Date of First Commit", "Most Recent Commit Date", "Commit Count", "Full Path" }; public final String[] cboTableHeadings = { "Class", "Priority", "Change Proneness Rank", "Coupling Count", "Most Recent Commit Date", "Commit Count" }; public final String[] classCycleTableHeadings = {"Classes", "Relationships"}; Graph classGraph; Map> cycles; Set vertexesToRemove = Set.of(); // initialize for unit tests Set edgesToRemove = Set.of(); DateTimeFormatter formatter = DateTimeFormatter.ofLocalizedDateTime(FormatStyle.SHORT) .withLocale(Locale.getDefault()) .withZone(ZoneId.systemDefault()); private final Configuration htmlMinifierConfig = new Configuration.Builder() .setKeepHtmlAndHeadOpeningTags(true) .setKeepComments(false) .setMinifyJs(true) .setMinifyCss(true) .build(); public void execute( int edgeAnalysisCount, boolean analyzeCycles, boolean showDetails, boolean minifyHtml, boolean excludeTests, String testSourceDirectory, String projectName, String projectVersion, File baseDir, String outputDirectory) { String filename = getOutputName() + ".html"; log.info("Generating {} for {} - {}", filename, projectName, projectVersion); StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append(THE_BEGINNING); stringBuilder.append(""); stringBuilder.append(printTitle(projectName, projectVersion)); stringBuilder.append(printHead()); stringBuilder.append(""); stringBuilder.append(generateReport( showDetails, edgeAnalysisCount, analyzeCycles, excludeTests, testSourceDirectory, projectName, projectVersion, baseDir)); stringBuilder.append(printProjectFooter()); stringBuilder.append(THE_END); String reportHtml; if (minifyHtml) { reportHtml = MinifyHtml.minify(stringBuilder.toString(), htmlMinifierConfig); } else { reportHtml = stringBuilder.toString(); } writeReportToDisk(outputDirectory, filename, reportHtml); log.info("Done! View the report at target/site/{}", filename); } public StringBuilder generateReport( boolean showDetails, int edgeAnalysisCount, boolean analyzeCycles, boolean excludeTests, String testSourceDirectory, String projectName, String projectVersion, File baseDir) { if (testSourceDirectory == null || testSourceDirectory.isEmpty()) { testSourceDirectory = "src" + File.separator + "test"; } StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append(printOpenBodyTag()); stringBuilder.append(printScripts()); stringBuilder.append(printBreadcrumbs()); stringBuilder.append(printProjectHeader(projectName, projectVersion)); GitLogReader gitLogReader = new GitLogReader(); String projectBaseDir; Optional optionalGitDir; if (baseDir != null) { projectBaseDir = baseDir.getPath(); optionalGitDir = Optional.ofNullable(gitLogReader.getGitDir(baseDir)); } else { projectBaseDir = Paths.get("").toAbsolutePath().toString(); optionalGitDir = Optional.ofNullable(gitLogReader.getGitDir(new File(projectBaseDir))); } File gitDir; if (optionalGitDir.isPresent()) { gitDir = optionalGitDir.get(); } else { log.info( "Done! No Git repository found! Please initialize a Git repository and perform an initial commit."); stringBuilder .append("No Git repository found in project ") .append(projectName) .append(" ") .append(projectVersion) .append(". "); stringBuilder.append("Please initialize a Git repository and perform an initial commit."); return stringBuilder; } String parentOfGitDir = gitDir.getParentFile().getPath(); log.info("Project Base Dir: {} ", projectBaseDir); log.info("Parent of Git Dir: {}", parentOfGitDir); if (!projectBaseDir.equals(parentOfGitDir)) { log.warn("Project Base Directory does not match Git Parent Directory"); stringBuilder.append("Project Base Directory does not match Git Parent Directory. " + "Please refer to the report at the root of the site directory."); return stringBuilder; } CycleRanker cycleRanker = new CycleRanker(projectBaseDir); List rankedCycles = List.of(); if (analyzeCycles) { log.info("Analyzing Cycles"); rankedCycles = cycleRanker.performCycleAnalysis(excludeTests, testSourceDirectory); } else { cycleRanker.generateClassReferencesGraph(excludeTests, testSourceDirectory); } classGraph = cycleRanker.getClassReferencesGraph(); cycles = new CircularReferenceChecker().getCycles(classGraph); // Identify vertexes to remove log.info("Identifying vertexes to remove"); EnhancedParameterComputer enhancedParameterComputer = new EnhancedParameterComputer<>(new SuperTypeToken<>() {}); EnhancedParameterComputer.EnhancedParameters parameters = enhancedParameterComputer.computeOptimalParameters(classGraph, 4); DirectedFeedbackVertexSetSolver vertexSolver = new DirectedFeedbackVertexSetSolver<>( classGraph, parameters.getModulator(), null, parameters.getEta(), new SuperTypeToken<>() {}); DirectedFeedbackVertexSetResult vertexSetResult = vertexSolver.solve(parameters.getK()); vertexesToRemove = vertexSetResult.getFeedbackVertices(); // Identify edges to remove log.info("Identifying edges to remove"); PageRankFAS pageRankFAS = new PageRankFAS<>(classGraph, new SuperTypeToken<>() {}); edgesToRemove = pageRankFAS.computeFeedbackArcSet(); // capture the number of cycles each edge to remove is in Map edgeToRemoveCycleCounts = new HashMap<>(); for (DefaultWeightedEdge edgeToRemove : edgesToRemove) { int cycleCount = 0; for (AsSubgraph cycle : cycles.values()) { if (cycle.containsEdge(edgeToRemove)) { cycleCount++; } } edgeToRemoveCycleCounts.put(edgeToRemove, cycleCount); } // int edgeWeight = (int) classGraph.getEdgeWeight(defaultWeightedEdge); // map sources to CycleNodes to get paths and get churn in try/finally block below Map sourceNodeInfos = new HashMap<>(); Map targetNodeInfos = new HashMap<>(); for (DefaultWeightedEdge defaultWeightedEdge : edgesToRemove) { String edgeSource = classGraph.getEdgeSource(defaultWeightedEdge); CycleNode sourceNode = cycleRanker.classToCycleNode(edgeSource); sourceNodeInfos.put(defaultWeightedEdge, sourceNode); String edgeTarget = classGraph.getEdgeTarget(defaultWeightedEdge); CycleNode targetNode = cycleRanker.classToCycleNode(edgeTarget); targetNodeInfos.put(defaultWeightedEdge, targetNode); } List rankedGodClassDisharmonies = List.of(); List rankedCBODisharmonies = List.of(); List edgeDisharmonies = List.of(); log.info("Identifying Object Oriented Disharmonies"); try (CostBenefitCalculator costBenefitCalculator = new CostBenefitCalculator( projectBaseDir, cycleRanker.getCodebaseGraphDTO().getClassToSourceFilePathMapping())) { costBenefitCalculator.runPmdAnalysis(excludeTests, testSourceDirectory); rankedGodClassDisharmonies = costBenefitCalculator.calculateGodClassCostBenefitValues(); rankedCBODisharmonies = costBenefitCalculator.calculateCBOCostBenefitValues(); edgeDisharmonies = costBenefitCalculator.calculateSourceNodeCostBenefitValues( classGraph, sourceNodeInfos, targetNodeInfos, edgeToRemoveCycleCounts, vertexesToRemove); } catch (Exception e) { log.error("Error running analysis."); throw new RuntimeException(e); } // TODO: Incorporate node information and guidance into Edge Infos // - Source / target vertex in list of vertexes to remove // - How many cycles is the edge present in // - Edge weight // - Provide guidance on where to move the method if one is in the list to remove if (edgesToRemove.isEmpty() && rankedGodClassDisharmonies.isEmpty() && rankedCBODisharmonies.isEmpty() && rankedCycles.isEmpty()) { stringBuilder .append("Congratulations! ") .append(projectName) .append(" ") .append(projectVersion) .append(" has no Back Edges, God classes, Highly Coupled Classes, or Cycles!"); stringBuilder.append(renderGithubButtons()); log.info("Done! No Disharmonies found!"); return stringBuilder; } if (!edgesToRemove.isEmpty()) { stringBuilder.append("Edges To Remove\n"); stringBuilder.append("
\n"); } if (!rankedGodClassDisharmonies.isEmpty()) { stringBuilder.append("God Classes\n"); stringBuilder.append("
\n"); } if (!rankedCBODisharmonies.isEmpty()) { stringBuilder.append("Highly Coupled Classes\n"); stringBuilder.append("
\n"); } if (!rankedCycles.isEmpty()) { stringBuilder.append("Class Cycles\n"); } log.info("Generating HTML Report"); stringBuilder.append(renderClassGraphVisuals()); stringBuilder.append("
\n"); stringBuilder.append(renderGithubButtons()); stringBuilder.append("
\n"); if (!edgeDisharmonies.isEmpty()) { stringBuilder.append(renderEdgeDisharmonies(edgeDisharmonies)); stringBuilder.append("
\n" + "
\n" + "
\n" + "
\n" + "
\n" + "
\n" + "
\n"); } if (!rankedGodClassDisharmonies.isEmpty()) { final String[] godClassTableHeadings = showDetails ? godClassDetailedTableHeadings : godClassSimpleTableHeadings; stringBuilder.append(renderGodClassInfo(showDetails, rankedGodClassDisharmonies, godClassTableHeadings)); stringBuilder.append("
\n" + "
\n" + "
\n" + "
\n" + "
\n" + "
\n" + "
\n"); } if (!rankedCBODisharmonies.isEmpty()) { stringBuilder.append(renderHighlyCoupledClassInfo(rankedCBODisharmonies)); stringBuilder.append("
\n" + "
\n" + "
\n" + "
\n" + "
\n" + "
\n" + "
\n"); } if (!rankedCycles.isEmpty()) { stringBuilder.append(renderCycles(rankedCycles)); } stringBuilder.append("\n"); log.debug(stringBuilder.toString()); return stringBuilder; } private String renderCycles(List rankedCycles) { StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append(renderClassCycleSummary(rankedCycles)); rankedCycles.stream().limit(1).map(this::renderSingleCycle).forEach(stringBuilder::append); return stringBuilder.toString(); } private String renderEdgeDisharmonies(List edgeDisharmonies) { StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append( "\n"); stringBuilder.append("

Refactor Starting with Priority 1

\n"); stringBuilder.append("
\n"); stringBuilder.append("Current Cycle Count: ").append(cycles.size()).append("
\n"); stringBuilder .append("Number of Relationships to Remove: ") .append(edgesToRemove.size()) .append("
\n"); stringBuilder.append("Classes in bold should be broken apart").append("
\n"); stringBuilder.append("
\n"); // Content stringBuilder.append("\n"); stringBuilder.append("\n\n"); for (String heading : getEdgeDisharmonyTableHeadings()) { stringBuilder.append("\n"); } stringBuilder.append("\n"); stringBuilder.append("\n"); for (RankedDisharmony edge : edgeDisharmonies) { stringBuilder.append("\n"); for (String rowData : getEdgeDisharmony(edge)) { stringBuilder.append(drawTableCell(rowData)); } stringBuilder.append("\n"); } stringBuilder.append("\n"); stringBuilder.append("
").append(heading).append("
\n"); return stringBuilder.toString(); } private String[] getEdgeDisharmonyTableHeadings() { return new String[] { "Relationship", "Priority", "In Cycles", "Edge
Weight", "Source
Change Proneness Rank", "Target
Change Proneness Rank", }; } private String[] getEdgeDisharmony(RankedDisharmony edgeInfo) { return new String[] { renderEdge(edgeInfo.getEdge()), String.valueOf(edgeInfo.getPriority()), String.valueOf(edgeInfo.getCycleCount()), String.valueOf(edgeInfo.getEffortRank()), String.valueOf(edgeInfo.getChangePronenessRank()), String.valueOf(edgeInfo.getEdgeTargetChangePronenessRank()), }; } private String renderClassCycleSummary(List rankedCycles) { StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append("\n"); /*if (rankedCycles.size() > 10) { stringBuilder.append( "
10 largest cycles are shown in the sections below
\n"); }*/ stringBuilder.append("

Class Cycles by the numbers:

\n"); stringBuilder.append("\n"); // Content stringBuilder.append("\n\n"); for (String heading : getCycleSummaryTableHeadings()) { stringBuilder.append("\n"); } stringBuilder.append("\n"); stringBuilder.append("\n"); for (RankedCycle cycle : rankedCycles) { stringBuilder.append("\n"); StringBuilder edges = new StringBuilder(); for (DefaultWeightedEdge edge : cycle.getMinCutEdges()) { if (edgesToRemove.contains(edge)) { stringBuilder.append(""); edges.append(renderEdge(edge)); stringBuilder.append(""); } else { edges.append(renderEdge(edge)); } edges.append("
\n"); } for (String rowData : getRankedCycleSummaryData(cycle, edges)) { stringBuilder.append(drawTableCell(rowData)); } stringBuilder.append("\n"); } stringBuilder.append("\n"); stringBuilder.append("
").append(heading).append("
\n"); return stringBuilder.toString(); } private String renderEdge(DefaultWeightedEdge edge) { StringBuilder edgesToCut = new StringBuilder(); String[] vertexes = extractVertexes(edge); String startVertex = vertexes[0].trim(); String start; if (vertexesToRemove.contains(startVertex)) { start = "" + getClassName(startVertex) + ""; } else { start = getClassName(startVertex); } String endVertex = vertexes[1].trim(); String end; if (vertexesToRemove.contains(endVertex)) { end = "" + getClassName(endVertex) + ""; } else { end = getClassName(endVertex); } // → is HTML "Right Arrow" code return edgesToCut .append(start + " → " + end + " : " + (int) classGraph.getEdgeWeight(edge)) .toString(); } private String[] getCycleSummaryTableHeadings() { return new String[] {"Cycle Name", "Priority", "Class Count", "Relationship Count" /*, "Minimum Cuts"*/}; } private String[] getRankedCycleSummaryData(RankedCycle rankedCycle, StringBuilder edgesToCut) { return new String[] { // "Cycle Name", "Priority", "Class Count", "Relationship Count", "Min Cuts" getClassName(rankedCycle.getCycleName()), rankedCycle.getPriority().toString(), String.valueOf(rankedCycle.getCycleNodes().size()), String.valueOf(rankedCycle.getEdgeSet().size()) }; } private String renderSingleCycle(RankedCycle cycle) { StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append("
\n"); stringBuilder.append("
\n"); stringBuilder.append("
\n"); stringBuilder.append("
\n"); stringBuilder.append("
\n"); stringBuilder.append( "

Largest Class Cycle : " + getClassName(cycle.getCycleName()) + "

\n"); stringBuilder.append( "

Limiting number of cycles displayed to 1 to keep page load time fast

\n"); stringBuilder.append(renderCycleVisuals(cycle)); stringBuilder.append("
"); stringBuilder.append(""); stringBuilder.append("Bold text indicates class or relationship to remove to decompose cycle"); stringBuilder.append(""); int classCount = cycle.getCycleNodes().size(); int relationshipCount = cycle.getEdgeSet().size(); stringBuilder.append("
Number of classes: " + classCount + " Number of relationships: " + relationshipCount + "
"); stringBuilder.append("
\n"); stringBuilder.append("\n"); // Content stringBuilder.append("\n\n"); for (String heading : classCycleTableHeadings) { stringBuilder.append("\n"); } stringBuilder.append("\n"); stringBuilder.append("\n"); for (String vertex : cycle.getVertexSet()) { stringBuilder.append(""); String className = getClassName(vertex); if (vertexesToRemove.contains(vertex)) { className = "" + className + ""; } stringBuilder.append(drawTableCell(className)); StringBuilder edges = new StringBuilder(); for (DefaultWeightedEdge edge : cycle.getEdgeSet()) { if (edge.toString().startsWith("(" + vertex + " :")) { if (edgesToRemove.contains(edge)) { edges.append(""); edges.append(renderEdge(edge)); if (cycle.getMinCutEdges().contains(edge)) { edges.append("*"); } edges.append(""); } else { edges.append(renderEdge(edge)); } edges.append("
\n"); } } stringBuilder.append(drawTableCell(edges.toString())); stringBuilder.append("\n"); } stringBuilder.append("\n"); stringBuilder.append("
").append(heading).append("
\n"); return stringBuilder.toString(); } public String renderClassGraphVisuals() { return ""; // empty on purpose } public String renderCycleVisuals(RankedCycle cycle) { return ""; // empty on purpose } private String renderGodClassInfo( boolean showDetails, List rankedGodClassDisharmonies, String[] godClassTableHeadings) { int maxGodClassPriority = rankedGodClassDisharmonies .get(rankedGodClassDisharmonies.size() - 1) .getPriority(); StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append("\n"); stringBuilder.append(renderGodClassChart(rankedGodClassDisharmonies, maxGodClassPriority)); stringBuilder.append( "

God classes by the numbers: (Refactor Starting with Priority 1)

\n"); stringBuilder.append("\n"); // Content stringBuilder.append(""); for (String heading : godClassTableHeadings) { stringBuilder.append("\n"); } stringBuilder.append("\n\n"); stringBuilder.append("\n"); for (RankedDisharmony rankedGodClassDisharmony : rankedGodClassDisharmonies) { stringBuilder.append("\n"); String[] simpleRankedGodClassDisharmonyData = { rankedGodClassDisharmony.getFileName(), rankedGodClassDisharmony.getPriority().toString(), rankedGodClassDisharmony.getChangePronenessRank().toString(), rankedGodClassDisharmony.getEffortRank().toString(), rankedGodClassDisharmony.getWmc().toString(), formatter.format(rankedGodClassDisharmony.getMostRecentCommitTime()), rankedGodClassDisharmony.getCommitCount().toString() }; String[] detailedRankedGodClassDisharmonyData = { rankedGodClassDisharmony.getFileName(), rankedGodClassDisharmony.getPriority().toString(), rankedGodClassDisharmony.getRawPriority().toString(), rankedGodClassDisharmony.getChangePronenessRank().toString(), rankedGodClassDisharmony.getEffortRank().toString(), rankedGodClassDisharmony.getWmc().toString(), rankedGodClassDisharmony.getWmcRank().toString(), rankedGodClassDisharmony.getAtfd().toString(), rankedGodClassDisharmony.getAtfdRank().toString(), rankedGodClassDisharmony.getTcc().toString(), rankedGodClassDisharmony.getTccRank().toString(), formatter.format(rankedGodClassDisharmony.getFirstCommitTime()), formatter.format(rankedGodClassDisharmony.getMostRecentCommitTime()), rankedGodClassDisharmony.getCommitCount().toString(), rankedGodClassDisharmony.getPath() }; final String[] rankedDisharmonyData = showDetails ? detailedRankedGodClassDisharmonyData : simpleRankedGodClassDisharmonyData; for (String rowData : rankedDisharmonyData) { stringBuilder.append(drawTableCell(rowData)); } stringBuilder.append("\n"); } stringBuilder.append("\n"); stringBuilder.append("
").append(heading).append("
\n"); return stringBuilder.toString(); } private String renderHighlyCoupledClassInfo(List rankedCBODisharmonies) { StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append( ""); int maxCboPriority = rankedCBODisharmonies.get(rankedCBODisharmonies.size() - 1).getPriority(); stringBuilder.append(renderCBOChart(rankedCBODisharmonies, maxCboPriority)); stringBuilder.append( "

Highly Coupled classes by the numbers: (Refactor starting with Priority 1)

"); stringBuilder.append(""); // Content stringBuilder.append(""); for (String heading : cboTableHeadings) { stringBuilder.append(""); } stringBuilder.append(""); stringBuilder.append(""); for (RankedDisharmony rankedCboClassDisharmony : rankedCBODisharmonies) { stringBuilder.append(""); String[] rankedCboClassDisharmonyData = { rankedCboClassDisharmony.getFileName(), rankedCboClassDisharmony.getPriority().toString(), rankedCboClassDisharmony.getChangePronenessRank().toString(), rankedCboClassDisharmony.getEffortRank().toString(), formatter.format(rankedCboClassDisharmony.getMostRecentCommitTime()), rankedCboClassDisharmony.getCommitCount().toString() }; for (String rowData : rankedCboClassDisharmonyData) { stringBuilder.append(drawTableCell(rowData)); } stringBuilder.append(""); } stringBuilder.append(""); stringBuilder.append("
").append(heading).append("
"); return stringBuilder.toString(); } String drawTableCell(String rowData) { if (isNumber(rowData) || isDateTime(rowData)) { return new StringBuilder() .append("") .append(rowData) .append("\n") .toString(); } else { return new StringBuilder() .append("") .append(rowData) .append("\n") .toString(); } } boolean isNumber(String rowData) { return rowData.matches("-?\\d+(\\.\\d+)?"); } boolean isDateTime(String rowData) { return rowData.contains(", "); } public String printTitle(String projectName, String projectVersion) { return ""; // empty on purpose } public String printHead() { return ""; // empty on purpose } String printScripts() { return ""; // empty on purpose } public String printOpenBodyTag() { return " \n"; } public String printBreadcrumbs() { return "
\n" + "
\n" + "
\n" + "
\n"; } public String printProjectHeader(String projectName, String projectVersion) { return "
\n" + "
\n" + "
\n" + "
\n" + "
\n" + "
\n" + "

RefactorFirst Report for " + projectName + " " + projectVersion + "

\n"; } public String printProjectFooter() { return "
\n" + "
\n" + "
\n" + "Last Published: " + formatter.format(Instant.now()) + "
\n" + "
\n" + "
\n" + "
"; } String renderGithubButtons() { return ""; // empty on purpose } String getOutputName() { // This report will generate simple-report.html when invoked in a project with `mvn site` return "refactor-first-report"; } String renderGodClassChart(List rankedGodClassDisharmonies, int maxGodClassPriority) { return ""; // empty on purpose } String writeGodClassGchartJs(List rankedDisharmonies, int maxPriority) { // return empty string on purpose return ""; } String writeGCBOGchartJs(List rankedDisharmonies, int maxPriority) { // return empty string on purpose return ""; } String renderCBOChart(List rankedCBODisharmonies, int maxCboPriority) { return ""; // empty on purpose } String getClassName(String fqn) { // handle no package if (!fqn.contains(".")) { return fqn; } int lastIndex = fqn.lastIndexOf("."); return fqn.substring(lastIndex + 1); } static String[] extractVertexes(DefaultWeightedEdge edge) { return edge.toString().replace("(", "").replace(")", "").split(":"); } } ================================================ FILE: report/src/main/java/org/hjug/refactorfirst/report/json/JsonReport.java ================================================ package org.hjug.refactorfirst.report.json; import java.util.List; import lombok.Builder; import lombok.Data; @Data @Builder class JsonReport { private List rankedDisharmonies; private List errors; } ================================================ FILE: report/src/main/java/org/hjug/refactorfirst/report/json/JsonReportDisharmonyEntry.java ================================================ package org.hjug.refactorfirst.report.json; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.time.format.FormatStyle; import java.util.Locale; import lombok.Builder; import lombok.Data; import org.hjug.cbc.RankedDisharmony; @Data @Builder class JsonReportDisharmonyEntry { private static final DateTimeFormatter formatter = DateTimeFormatter.ofLocalizedDateTime(FormatStyle.SHORT) .withLocale(Locale.getDefault()) .withZone(ZoneId.systemDefault()); private final String fileName; private final String className; private final String fullFilePath; private final Integer effortRank; private final Integer changePronenessRank; private final Integer priority; private final Integer weightedMethodCount; private final Integer commitCount; private final String mostRecentCommitTime; public static JsonReportDisharmonyEntry fromRankedDisharmony(RankedDisharmony entry) { return JsonReportDisharmonyEntry.builder() .fileName(entry.getFileName()) .className(entry.getClassName()) .effortRank(entry.getEffortRank()) .changePronenessRank(entry.getChangePronenessRank()) .priority(entry.getRawPriority()) .weightedMethodCount(entry.getWmc()) .commitCount(entry.getCommitCount()) .mostRecentCommitTime(formatter.format(entry.getMostRecentCommitTime())) .fullFilePath(entry.getPath()) .build(); } } ================================================ FILE: report/src/main/java/org/hjug/refactorfirst/report/json/JsonReportExecutor.java ================================================ package org.hjug.refactorfirst.report.json; import static org.hjug.refactorfirst.report.ReportWriter.writeReportToDisk; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import java.io.File; import java.io.IOException; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.hjug.cbc.CostBenefitCalculator; import org.hjug.cbc.RankedDisharmony; @Slf4j public class JsonReportExecutor { private static final String FILE_NAME = "refactor-first-data.json"; private static final ObjectMapper MAPPER = new ObjectMapper(); public void execute(File baseDir, String outputDirectory) { String projectBaseDir; if (baseDir != null) { projectBaseDir = baseDir.getPath(); } else { projectBaseDir = Paths.get("").toAbsolutePath().toString(); } // TODO: revisit final CostBenefitCalculator costBenefitCalculator = new CostBenefitCalculator(projectBaseDir, new HashMap<>()); try { costBenefitCalculator.runPmdAnalysis(); } catch (IOException e) { log.error("Error running PMD analysis."); throw new RuntimeException(e); } final List rankedDisharmonies = costBenefitCalculator.calculateGodClassCostBenefitValues(); final List disharmonyEntries = rankedDisharmonies.stream() .map(JsonReportDisharmonyEntry::fromRankedDisharmony) .collect(Collectors.toList()); final JsonReport report = JsonReport.builder().rankedDisharmonies(disharmonyEntries).build(); try { final String reportJson = MAPPER.writeValueAsString(report); writeReportToDisk(outputDirectory, FILE_NAME, new StringBuilder(reportJson).toString()); } catch (final JsonProcessingException jsonProcessingException) { final String errorMessage = "Could not generate a json report: " + jsonProcessingException; log.error(errorMessage); final JsonReport errorReport = JsonReport.builder() .errors(new ArrayList<>(Collections.singletonList(errorMessage))) .build(); writeErrorReport(errorReport, outputDirectory); } } private void writeErrorReport(final JsonReport errorReport, String outputDirectory) { try { writeReportToDisk( outputDirectory, FILE_NAME, new StringBuilder(MAPPER.writeValueAsString(errorReport)).toString()); } catch (final JsonProcessingException jsonProcessingException) { log.error("failed to write error report: ", jsonProcessingException); } } } ================================================ FILE: report/src/test/java/org/hjug/refactorfirst/report/HtmlReportTest.java ================================================ package org.hjug.refactorfirst.report; import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.*; import org.hjug.cbc.CycleNode; import org.hjug.cbc.RankedCycle; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultDirectedWeightedGraph; import org.jgrapht.graph.DefaultWeightedEdge; import org.junit.jupiter.api.Test; class HtmlReportTest { private HtmlReport mavenReport = new HtmlReport(); @Test void testGetOutputName() { // This report will generate simple-report.html when invoked in a project with `mvn site` assertEquals("refactor-first-report", mavenReport.getOutputName()); } @Test void getName() { // Name of the report when listed in the project-reports.html page of a project assertEquals("Refactor First Report", mavenReport.getName(Locale.getDefault())); } @Test void getDescription() { // Description of the report when listed in the project-reports.html page of a project assertEquals( "Ranks the disharmonies in a codebase. The classes that should be refactored first " + " have the highest priority values.", mavenReport.getDescription(Locale.getDefault())); } @Test void buildCycleDot() { Graph classGraph = new DefaultDirectedWeightedGraph<>(DefaultWeightedEdge.class); classGraph.addVertex("A"); classGraph.addVertex("B"); classGraph.addVertex("C"); classGraph.addEdge("A", "B"); classGraph.addEdge("B", "C"); classGraph.addEdge("C", "A"); classGraph.setEdgeWeight("A", "B", 2); String cycleName = "Test"; List cycleNodes = new ArrayList<>(); RankedCycle rankedCycle = new RankedCycle(cycleName, 0, classGraph.vertexSet(), classGraph.edgeSet(), 0, null, cycleNodes); HtmlReport htmlReport = new HtmlReport(); String dot = htmlReport.buildCycleDot(classGraph, rankedCycle); String expectedDot = "`strict digraph G {\n" + "A -> B [ label = \"2\" weight = \"2\" ];\n" + "B -> C [ label = \"1\" weight = \"1\" ];\n" + "C -> A [ label = \"1\" weight = \"1\" ];\n" + "A;\n" + "B;\n" + "C;\n" + "}`;"; assertEquals(expectedDot, dot); } } ================================================ FILE: report/src/test/java/org/hjug/refactorfirst/report/SimpleHtmlReportTest.java ================================================ package org.hjug.refactorfirst.report; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; class SimpleHtmlReportTest { @Test void isDateTime() { HtmlReport htmlReport = new HtmlReport(); String commitDateTime = "7/22/23, 5:00 AM"; Assertions.assertTrue(htmlReport.isDateTime(commitDateTime)); } } ================================================ FILE: report/src/test/resources/highlight.html ================================================
================================================ FILE: report/src/test/resources/sigmaPlayground.html ================================================ DOT Graph with Sigma.js and Graphology
================================================ FILE: report/src/test/resources/spriteText.html ================================================
================================================ FILE: spring-petclinic-rest-report.html ================================================ Refactor First Report for spring-petclinic-rest 3.4.3

RefactorFirst Report for spring-petclinic-rest 3.4.3

Back Edges
Highly Coupled Classes
Class Cycles

Class Map

Red lines represent back edges to remove.
Zoom in / out with your mouse wheel and click/move to drag the image.
Excludes classes that have no incoming and outgoing edges
Number of classes: 82 Number of relationships: 192

Show RefactorFirst some ❤️
Star Fork Watch Issue Sponsor

Current Cycle Count: 3
Current Average Cycle Node Count: 2.3333333333333335
Current Total Back Edge Count: 4
Current Total Min Weight Back Edge Count: 1
Edge Edge Weight In # of Cycles New Cycle Count New Avg Cycle Node Count Avg Node Δ ÷ Effort
Visit → Pet : 3 3 1 3 2.0 0.11111111111111116
Owner → Pet : 13 13 1 3 2.0 0.025641025641025654
Role → User : 3 3 1 2 2.5 -0.055555555555555504
EntityUtils → BaseEntity : 1 1 1 2 2.5 -0.16666666666666652
Show RefactorFirst some ❤️
Star Fork Watch Issue Sponsor







Show RefactorFirst some ❤️
Star Fork Watch Issue Sponsor

Coupling Between Objects Chart Legend:

X-Axis: Number of objects the class is coupled to
Y-Axis: Relative churn
Color: Priority of what to fix first
Circle size: Priority (Visual) of what to fix first

Highly Coupled classes by the numbers: (Refactor starting with Priority 1)

ClassPriorityChange Proneness RankCoupling CountMost Recent Commit DateCommit Count
ClinicServiceImpl.java 1 1 22 12/28/24, 11:16 AM 23







Class Cycles by the numbers:

Cycle Name Priority Class Count Relationship Count
Pet 1 3 4
Role 2 2 2
BaseEntity 3 2 2






Class Cycle : Pet

Class Map

Red lines represent back edges to remove.
Zoom in / out with your mouse wheel and click/move to drag the image.


Bold text indicates back edge to remove to decompose cycle
Number of classes: 3 Number of relationships: 4
Classes Relationships
Pet Pet → Visit : 9
Pet → Owner : 3
Owner Owner → Pet : 13
Visit Visit → Pet : 3





Class Cycle : Role

Class Map

Red lines represent back edges to remove.
Zoom in / out with your mouse wheel and click/move to drag the image.


Bold text indicates back edge to remove to decompose cycle
Number of classes: 2 Number of relationships: 2
Classes Relationships
Role Role → User : 3
User User → Role : 6





Class Cycle : BaseEntity

Class Map

Red lines represent back edges to remove.
Zoom in / out with your mouse wheel and click/move to drag the image.


Bold text indicates back edge to remove to decompose cycle
Number of classes: 2 Number of relationships: 2
Classes Relationships
BaseEntity BaseEntity → EntityUtils : 4
EntityUtils EntityUtils → BaseEntity : 1

Last Published: 4/2/25, 7:40 PM

================================================ FILE: test-resources/pom.xml ================================================ 4.0.0 org.hjug.refactorfirst refactor-first 0.8.1-SNAPSHOT org.hjug.refactorfirst.testresources test-resources RefactorFirst Test Resources ================================================ FILE: test-resources/src/main/resources/AttributeHandler.java ================================================ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.myfaces.tobago.facelets; import org.apache.commons.beanutils.PropertyUtils; import org.apache.myfaces.tobago.component.Attributes; import org.apache.myfaces.tobago.component.SupportsMarkup; import org.apache.myfaces.tobago.component.SupportsRenderedPartially; import org.apache.myfaces.tobago.context.Markup; import org.apache.myfaces.tobago.el.ConstantMethodBinding; import org.apache.myfaces.tobago.internal.util.StringUtils; import org.apache.myfaces.tobago.util.ComponentUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.el.ELException; import javax.el.ExpressionFactory; import javax.el.MethodExpression; import javax.el.ValueExpression; import javax.faces.FacesException; import javax.faces.component.ActionSource; import javax.faces.component.ActionSource2; import javax.faces.component.EditableValueHolder; import javax.faces.component.UIComponent; import javax.faces.component.ValueHolder; import javax.faces.convert.Converter; import javax.faces.event.MethodExpressionActionListener; import javax.faces.event.MethodExpressionValueChangeListener; import javax.faces.validator.MethodExpressionValidator; import javax.faces.view.facelets.ComponentHandler; import javax.faces.view.facelets.FaceletContext; import javax.faces.view.facelets.TagAttribute; import javax.faces.view.facelets.TagConfig; import javax.faces.view.facelets.TagException; import javax.faces.view.facelets.TagHandler; import java.beans.IntrospectionException; import java.beans.PropertyDescriptor; //from Apache MyFaces 2.0.8 //Retrieved from http://grepcode.com/file_/repo1.maven.org/maven2/org.apache.myfaces.tobago/tobago-core/2.0.8/org/apache/myfaces/tobago/facelets/AttributeHandler.java/?v=source public final class AttributeHandler extends TagHandler { private static final Logger LOG = LoggerFactory.getLogger(AttributeHandler.class); private final TagAttribute name; private final TagAttribute value; private final TagAttribute mode; public AttributeHandler(final TagConfig config) { super(config); this.name = getRequiredAttribute(Attributes.NAME); this.value = getRequiredAttribute(Attributes.VALUE); this.mode = getAttribute(Attributes.MODE); } public void apply(final FaceletContext faceletContext, final UIComponent parent) throws ELException { if (parent == null) { throw new TagException(tag, "Parent UIComponent was null"); } if (ComponentHandler.isNew(parent)) { if (mode != null) { if ("isNotSet".equals(mode.getValue())) { boolean result = false; String expressionString = value.getValue(); if (!value.isLiteral()) { while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { result = true; break; } else { expressionString = expression.getExpressionString(); } } else { result = false; break; } } } else { result = StringUtils.isEmpty(expressionString); } parent.getAttributes().put(name.getValue(), result); } else if ("isSet".equals(mode.getValue())) { boolean result = true; String expressionString = value.getValue(); if (!value.isLiteral()) { while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { result = false; break; } else { expressionString = expression.getExpressionString(); } } else { result = true; break; } } } else { result = StringUtils.isNotEmpty(expressionString); } parent.getAttributes().put(name.getValue(), result); } else if ("action".equals(mode.getValue())) { String expressionString = value.getValue(); while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { // when the action hasn't been set while using a composition. if (LOG.isDebugEnabled()) { LOG.debug("Variable can't be resolved: value='" + expressionString + "'"); } expressionString = null; break; } else { expressionString = expression.getExpressionString(); } } else { break; } } if (expressionString != null) { final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); final MethodExpression action = new TagMethodExpression(value, expressionFactory.createMethodExpression( faceletContext, expressionString, String.class, ComponentUtils.ACTION_ARGS)); ((ActionSource2) parent).setActionExpression(action); } } else if ("actionListener".equals(mode.getValue())) { String expressionString = value.getValue(); while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { if (LOG.isDebugEnabled()) { // when the action hasn't been set while using a composition. LOG.debug("Variable can't be resolved: value='" + expressionString + "'"); } expressionString = null; break; } else { expressionString = expression.getExpressionString(); } } else { LOG.warn("Only expressions are supported mode=actionListener value='" + expressionString + "'"); expressionString = null; break; } } if (expressionString != null) { final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); final MethodExpression actionListener = new TagMethodExpression(value, expressionFactory.createMethodExpression( faceletContext, expressionString, null, ComponentUtils.ACTION_LISTENER_ARGS)); ((ActionSource) parent).addActionListener(new MethodExpressionActionListener(actionListener)); } } else if ("actionFromValue".equals(mode.getValue())) { if (!value.isLiteral()) { final String result = value.getValue(faceletContext); parent.getAttributes().put(name.getValue(), new ConstantMethodBinding(result)); } } else if ("valueIfSet".equals(mode.getValue())) { String expressionString = value.getValue(); String lastExpressionString = null; while (isMethodOrValueExpression(expressionString) && isSimpleExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression != null) { lastExpressionString = expressionString; expressionString = expression.getExpressionString(); } else { // restore last value expressionString = lastExpressionString; break; } } if (expressionString != null) { final String attributeName = name.getValue(faceletContext); if (containsMethodOrValueExpression(expressionString)) { final ValueExpression expression = value.getValueExpression(faceletContext, Object.class); parent.setValueExpression(attributeName, expression); } else { final Object literalValue = getValue(faceletContext, parent, expressionString, attributeName); parent.getAttributes().put(attributeName, literalValue); } } } else { throw new FacesException("Type " + mode + " not supported"); } } else { final String nameValue = name.getValue(faceletContext); if (Attributes.RENDERED.equals(nameValue)) { if (value.isLiteral()) { parent.setRendered(value.getBoolean(faceletContext)); } else { parent.setValueExpression(nameValue, value.getValueExpression(faceletContext, Boolean.class)); } } else if (Attributes.RENDERED_PARTIALLY.equals(nameValue) && parent instanceof SupportsRenderedPartially) { if (value.isLiteral()) { final String[] components = ComponentUtils.splitList(value.getValue()); ((SupportsRenderedPartially) parent).setRenderedPartially(components); } else { parent.setValueExpression(nameValue, value.getValueExpression(faceletContext, Object.class)); } } else if (Attributes.STYLE_CLASS.equals(nameValue)) { // TODO expression ComponentUtils.setStyleClasses(parent, value.getValue()); } else if (Attributes.MARKUP.equals(nameValue)) { if (parent instanceof SupportsMarkup) { if (value.isLiteral()) { ((SupportsMarkup) parent).setMarkup(Markup.valueOf(value.getValue())); } else { final ValueExpression expression = value.getValueExpression(faceletContext, Object.class); parent.setValueExpression(nameValue, expression); } } else { LOG.error("Component is not instanceof SupportsMarkup. Instance is: " + parent.getClass().getName()); } } else if (parent instanceof EditableValueHolder && Attributes.VALIDATOR.equals(nameValue)) { final MethodExpression methodExpression = getMethodExpression(faceletContext, null, ComponentUtils.VALIDATOR_ARGS); if (methodExpression != null) { ((EditableValueHolder) parent).addValidator(new MethodExpressionValidator(methodExpression)); } } else if (parent instanceof EditableValueHolder && Attributes.VALUE_CHANGE_LISTENER.equals(nameValue)) { final MethodExpression methodExpression = getMethodExpression(faceletContext, null, ComponentUtils.VALUE_CHANGE_LISTENER_ARGS); if (methodExpression != null) { ((EditableValueHolder) parent).addValueChangeListener( new MethodExpressionValueChangeListener(methodExpression)); } } else if (parent instanceof ValueHolder && Attributes.CONVERTER.equals(nameValue)) { setConverter(faceletContext, parent, nameValue); } else if (parent instanceof ActionSource && Attributes.ACTION.equals(nameValue)) { final MethodExpression action = getMethodExpression(faceletContext, String.class, ComponentUtils.ACTION_ARGS); if (action != null) { ((ActionSource2) parent).setActionExpression(action); } } else if (parent instanceof ActionSource && Attributes.ACTION_LISTENER.equals(nameValue)) { final MethodExpression action = getMethodExpression(faceletContext, null, ComponentUtils.ACTION_LISTENER_ARGS); if (action != null) { ((ActionSource) parent).addActionListener(new MethodExpressionActionListener(action)); } } else if (!parent.getAttributes().containsKey(nameValue)) { if (value.isLiteral()) { parent.getAttributes().put(nameValue, value.getValue()); } else { parent.setValueExpression(nameValue, value.getValueExpression(faceletContext, Object.class)); } } } } } private boolean isMethodOrValueExpression(final String string) { return (string.startsWith("${") || string.startsWith("#{")) && string.endsWith("}"); } private boolean containsMethodOrValueExpression(final String string) { return (string.contains("${") || string.contains("#{")) && string.contains("}"); } private boolean isSimpleExpression(final String string) { return string.indexOf('.') < 0 && string.indexOf('[') < 0; } private String removeElParenthesis(final String string) { return string.substring(2, string.length() - 1); } private ValueExpression getExpression(final FaceletContext faceletContext) { final String myValue = removeElParenthesis(value.getValue()); return faceletContext.getVariableMapper().resolveVariable(myValue); } private MethodExpression getMethodExpression( final FaceletContext faceletContext, final Class returnType, final Class[] args) { // in a composition may be we get the method expression string from the current variable mapper // the expression can be empty // in this case return nothing if (value.getValue().startsWith("${")) { final ValueExpression expression = getExpression(faceletContext); if (expression != null) { final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); return new TagMethodExpression(value, expressionFactory.createMethodExpression(faceletContext, expression.getExpressionString(), returnType, args)); } else { return null; } } else { return value.getMethodExpression(faceletContext, returnType, args); } } private Object getValue( final FaceletContext faceletContext, final UIComponent parent, final String expressionString, final String attributeName) { Class type = Object.class; try { type = PropertyUtils.getReadMethod( new PropertyDescriptor(attributeName, parent.getClass())).getReturnType(); } catch (final IntrospectionException e) { LOG.warn("Can't determine expected type", e); } final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); final ValueExpression valueExpression = expressionFactory .createValueExpression(faceletContext, expressionString, type); return valueExpression.getValue(faceletContext); } private void setConverter(final FaceletContext faceletContext, final UIComponent parent, final String nameValue) { // in a composition may be we get the converter expression string from the current variable mapper // the expression can be empty // in this case return nothing if (value.getValue().startsWith("${")) { final ValueExpression expression = getExpression(faceletContext); if (expression != null) { setConverter(faceletContext, parent, nameValue, expression); } } else { setConverter(faceletContext, parent, nameValue, value.getValueExpression(faceletContext, Object.class)); } } private void setConverter( final FaceletContext faceletContext, final UIComponent parent, final String nameValue, final ValueExpression expression) { if (expression.isLiteralText()) { final Converter converter = faceletContext.getFacesContext().getApplication().createConverter(expression.getExpressionString()); ((ValueHolder) parent).setConverter(converter); } else { parent.setValueExpression(nameValue, expression); } } } ================================================ FILE: test-resources/src/main/resources/AttributeHandler2.java ================================================ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.myfaces.tobago.facelets; import org.apache.commons.beanutils.PropertyUtils; import org.apache.myfaces.tobago.component.Attributes; import org.apache.myfaces.tobago.component.SupportsMarkup; import org.apache.myfaces.tobago.component.SupportsRenderedPartially; import org.apache.myfaces.tobago.context.Markup; import org.apache.myfaces.tobago.el.ConstantMethodBinding; import org.apache.myfaces.tobago.internal.util.StringUtils; import org.apache.myfaces.tobago.util.ComponentUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.el.ELException; import javax.el.ExpressionFactory; import javax.el.MethodExpression; import javax.el.ValueExpression; import javax.faces.FacesException; import javax.faces.component.ActionSource; import javax.faces.component.ActionSource2; import javax.faces.component.EditableValueHolder; import javax.faces.component.UIComponent; import javax.faces.component.ValueHolder; import javax.faces.convert.Converter; import javax.faces.event.MethodExpressionActionListener; import javax.faces.event.MethodExpressionValueChangeListener; import javax.faces.validator.MethodExpressionValidator; import javax.faces.view.facelets.ComponentHandler; import javax.faces.view.facelets.FaceletContext; import javax.faces.view.facelets.TagAttribute; import javax.faces.view.facelets.TagConfig; import javax.faces.view.facelets.TagException; import javax.faces.view.facelets.TagHandler; import java.beans.IntrospectionException; import java.beans.PropertyDescriptor; //from Apache MyFaces 2.0.8 //Retrieved from http://grepcode.com/file_/repo1.maven.org/maven2/org.apache.myfaces.tobago/tobago-core/2.0.8/org/apache/myfaces/tobago/facelets/AttributeHandler.java/?v=source public final class AttributeHandler extends TagHandler { private static final Logger LOG = LoggerFactory.getLogger(AttributeHandler.class); private final TagAttribute name; private final TagAttribute value; private final TagAttribute mode; public AttributeHandler(final TagConfig config) { super(config); this.name = getRequiredAttribute(Attributes.NAME); this.value = getRequiredAttribute(Attributes.VALUE); this.mode = getAttribute(Attributes.MODE); } public void apply(final FaceletContext faceletContext, final UIComponent parent) throws ELException { if (parent == null) { throw new TagException(tag, "Parent UIComponent was null"); } if (ComponentHandler.isNew(parent)) { if (mode != null) { if ("isNotSet".equals(mode.getValue())) { boolean result = false; String expressionString = value.getValue(); if (!value.isLiteral()) { while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { result = true; break; } else { expressionString = expression.getExpressionString(); } } else { result = false; break; } } } else { result = StringUtils.isEmpty(expressionString); } parent.getAttributes().put(name.getValue(), result); } else if ("isSet".equals(mode.getValue())) { boolean result = true; String expressionString = value.getValue(); if (!value.isLiteral()) { while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { result = false; break; } else { expressionString = expression.getExpressionString(); } } else { result = true; break; } } } else { result = StringUtils.isNotEmpty(expressionString); } parent.getAttributes().put(name.getValue(), result); } else if ("action".equals(mode.getValue())) { String expressionString = value.getValue(); while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { // when the action hasn't been set while using a composition. if (LOG.isDebugEnabled()) { LOG.debug("Variable can't be resolved: value='" + expressionString + "'"); } expressionString = null; break; } else { expressionString = expression.getExpressionString(); } } else { break; } } if (expressionString != null) { final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); final MethodExpression action = new TagMethodExpression(value, expressionFactory.createMethodExpression( faceletContext, expressionString, String.class, ComponentUtils.ACTION_ARGS)); ((ActionSource2) parent).setActionExpression(action); } } else if ("actionListener".equals(mode.getValue())) { String expressionString = value.getValue(); while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { if (LOG.isDebugEnabled()) { // when the action hasn't been set while using a composition. LOG.debug("Variable can't be resolved: value='" + expressionString + "'"); } expressionString = null; break; } else { expressionString = expression.getExpressionString(); } } else { LOG.warn("Only expressions are supported mode=actionListener value='" + expressionString + "'"); expressionString = null; break; } } if (expressionString != null) { final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); final MethodExpression actionListener = new TagMethodExpression(value, expressionFactory.createMethodExpression( faceletContext, expressionString, null, ComponentUtils.ACTION_LISTENER_ARGS)); ((ActionSource) parent).addActionListener(new MethodExpressionActionListener(actionListener)); } } else if ("actionFromValue".equals(mode.getValue())) { if (!value.isLiteral()) { final String result = value.getValue(faceletContext); parent.getAttributes().put(name.getValue(), new ConstantMethodBinding(result)); } } else if ("valueIfSet".equals(mode.getValue())) { String expressionString = value.getValue(); String lastExpressionString = null; while (isMethodOrValueExpression(expressionString) && isSimpleExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression != null) { lastExpressionString = expressionString; expressionString = expression.getExpressionString(); } else { // restore last value expressionString = lastExpressionString; break; } } if (expressionString != null) { final String attributeName = name.getValue(faceletContext); if (containsMethodOrValueExpression(expressionString)) { final ValueExpression expression = value.getValueExpression(faceletContext, Object.class); parent.setValueExpression(attributeName, expression); } else { final Object literalValue = getValue(faceletContext, parent, expressionString, attributeName); parent.getAttributes().put(attributeName, literalValue); } } } else { throw new FacesException("Type " + mode + " not supported"); } } else { final String nameValue = name.getValue(faceletContext); if (Attributes.RENDERED.equals(nameValue)) { if (value.isLiteral()) { parent.setRendered(value.getBoolean(faceletContext)); } else { parent.setValueExpression(nameValue, value.getValueExpression(faceletContext, Boolean.class)); } } else if (Attributes.RENDERED_PARTIALLY.equals(nameValue) && parent instanceof SupportsRenderedPartially) { if (value.isLiteral()) { final String[] components = ComponentUtils.splitList(value.getValue()); ((SupportsRenderedPartially) parent).setRenderedPartially(components); } else { parent.setValueExpression(nameValue, value.getValueExpression(faceletContext, Object.class)); } } else if (Attributes.STYLE_CLASS.equals(nameValue)) { // TODO expression ComponentUtils.setStyleClasses(parent, value.getValue()); } else if (Attributes.MARKUP.equals(nameValue)) { if (parent instanceof SupportsMarkup) { if (value.isLiteral()) { ((SupportsMarkup) parent).setMarkup(Markup.valueOf(value.getValue())); } else { final ValueExpression expression = value.getValueExpression(faceletContext, Object.class); parent.setValueExpression(nameValue, expression); } } else { LOG.error("Component is not instanceof SupportsMarkup. Instance is: " + parent.getClass().getName()); } } else if (parent instanceof EditableValueHolder && Attributes.VALIDATOR.equals(nameValue)) { final MethodExpression methodExpression = getMethodExpression(faceletContext, null, ComponentUtils.VALIDATOR_ARGS); if (methodExpression != null) { ((EditableValueHolder) parent).addValidator(new MethodExpressionValidator(methodExpression)); } } else if (parent instanceof EditableValueHolder && Attributes.VALUE_CHANGE_LISTENER.equals(nameValue)) { final MethodExpression methodExpression = getMethodExpression(faceletContext, null, ComponentUtils.VALUE_CHANGE_LISTENER_ARGS); if (methodExpression != null) { ((EditableValueHolder) parent).addValueChangeListener( new MethodExpressionValueChangeListener(methodExpression)); } } else if (parent instanceof ValueHolder && Attributes.CONVERTER.equals(nameValue)) { setConverter(faceletContext, parent, nameValue); } else if (parent instanceof ActionSource && Attributes.ACTION.equals(nameValue)) { final MethodExpression action = getMethodExpression(faceletContext, String.class, ComponentUtils.ACTION_ARGS); if (action != null) { ((ActionSource2) parent).setActionExpression(action); } } else if (parent instanceof ActionSource && Attributes.ACTION_LISTENER.equals(nameValue)) { final MethodExpression action = getMethodExpression(faceletContext, null, ComponentUtils.ACTION_LISTENER_ARGS); if (action != null) { ((ActionSource) parent).addActionListener(new MethodExpressionActionListener(action)); } } else if (!parent.getAttributes().containsKey(nameValue)) { if (value.isLiteral()) { parent.getAttributes().put(nameValue, value.getValue()); } else { parent.setValueExpression(nameValue, value.getValueExpression(faceletContext, Object.class)); } } } } } private boolean isMethodOrValueExpression(final String string) { return (string.startsWith("${") || string.startsWith("#{")) && string.endsWith("}"); } private boolean containsMethodOrValueExpression(final String string) { return (string.contains("${") || string.contains("#{")) && string.contains("}"); } private boolean isSimpleExpression(final String string) { return string.indexOf('.') < 0 && string.indexOf('[') < 0; } private String removeElParenthesis(final String string) { return string.substring(2, string.length() - 1); } private ValueExpression getExpression(final FaceletContext faceletContext) { final String myValue = removeElParenthesis(value.getValue()); return faceletContext.getVariableMapper().resolveVariable(myValue); } private MethodExpression getMethodExpression( final FaceletContext faceletContext, final Class returnType, final Class[] args) { // in a composition may be we get the method expression string from the current variable mapper // the expression can be empty // in this case return nothing if (value.getValue().startsWith("${")) { final ValueExpression expression = getExpression(faceletContext); if (expression != null) { final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); return new TagMethodExpression(value, expressionFactory.createMethodExpression(faceletContext, expression.getExpressionString(), returnType, args)); } else { return null; } } else { return value.getMethodExpression(faceletContext, returnType, args); } } private Object getValue( final FaceletContext faceletContext, final UIComponent parent, final String expressionString, final String attributeName) { Class type = Object.class; try { type = PropertyUtils.getReadMethod( new PropertyDescriptor(attributeName, parent.getClass())).getReturnType(); } catch (final IntrospectionException e) { LOG.warn("Can't determine expected type", e); } final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); final ValueExpression valueExpression = expressionFactory .createValueExpression(faceletContext, expressionString, type); return valueExpression.getValue(faceletContext); } private void setConverter(final FaceletContext faceletContext, final UIComponent parent, final String nameValue) { // in a composition may be we get the converter expression string from the current variable mapper // the expression can be empty // in this case return nothing if (value.getValue().startsWith("${")) { final ValueExpression expression = getExpression(faceletContext); if (expression != null) { setConverter(faceletContext, parent, nameValue, expression); } } else { setConverter(faceletContext, parent, nameValue, value.getValueExpression(faceletContext, Object.class)); } } private void setConverter( final FaceletContext faceletContext, final UIComponent parent, final String nameValue, final ValueExpression expression) { if (expression.isLiteralText()) { final Converter converter = faceletContext.getFacesContext().getApplication().createConverter(expression.getExpressionString()); ((ValueHolder) parent).setConverter(converter); } else { parent.setValueExpression(nameValue, expression); } } public static void letsAddASimpleMethod() { System.out.println("Howdy!"); } } ================================================ FILE: test-resources/src/main/resources/AttributeHandlerAndSorter.java ================================================ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.myfaces.tobago.component; import org.apache.commons.beanutils.PropertyUtils; import org.apache.myfaces.tobago.component.Attributes; import org.apache.myfaces.tobago.component.SupportsMarkup; import org.apache.myfaces.tobago.component.SupportsRenderedPartially; import org.apache.myfaces.tobago.context.Markup; import org.apache.myfaces.tobago.el.ConstantMethodBinding; import org.apache.myfaces.tobago.internal.util.StringUtils; import org.apache.myfaces.tobago.util.ComponentUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.el.ELException; import javax.el.ExpressionFactory; import javax.el.MethodExpression; import javax.el.ValueExpression; import javax.faces.FacesException; import javax.faces.component.ActionSource; import javax.faces.component.ActionSource2; import javax.faces.component.EditableValueHolder; import javax.faces.component.UIComponent; import javax.faces.component.ValueHolder; import javax.faces.convert.Converter; import javax.faces.event.MethodExpressionActionListener; import javax.faces.event.MethodExpressionValueChangeListener; import javax.faces.validator.MethodExpressionValidator; import javax.faces.view.facelets.ComponentHandler; import javax.faces.view.facelets.FaceletContext; import javax.faces.view.facelets.TagAttribute; import javax.faces.view.facelets.TagConfig; import javax.faces.view.facelets.TagException; import javax.faces.view.facelets.TagHandler; import java.beans.IntrospectionException; import java.beans.PropertyDescriptor; import org.apache.myfaces.tobago.event.SortActionEvent; import org.apache.myfaces.tobago.internal.component.AbstractUICommand; import org.apache.myfaces.tobago.internal.component.AbstractUISheet; import org.apache.myfaces.tobago.internal.util.StringUtils; import org.apache.myfaces.tobago.model.SheetState; import org.apache.myfaces.tobago.util.BeanComparator; import org.apache.myfaces.tobago.util.ValueExpressionComparator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.el.ValueExpression; import javax.faces.component.UIColumn; import javax.faces.component.UICommand; import javax.faces.component.UIComponent; import javax.faces.component.UIInput; import javax.faces.component.UIOutput; import javax.faces.component.UISelectBoolean; import javax.faces.component.UISelectMany; import javax.faces.component.UISelectOne; import javax.faces.context.FacesContext; import javax.faces.model.DataModel; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; //From http://grepcode.com/file_/repo1.maven.org/maven2/org.apache.myfaces.tobago/tobago-core/2.0.8/org/apache/myfaces/tobago/facelets/AttributeHandler.java/?v=source public final class AttributeHandler extends TagHandler { private static final Logger LOG = LoggerFactory.getLogger(org.apache.myfaces.tobago.facelets.AttributeHandler.class); private final TagAttribute name; private final TagAttribute value; private final TagAttribute mode; public AttributeHandler(final TagConfig config) { super(config); this.name = getRequiredAttribute(Attributes.NAME); this.value = getRequiredAttribute(Attributes.VALUE); this.mode = getAttribute(Attributes.MODE); } public void apply(final FaceletContext faceletContext, final UIComponent parent) throws ELException { if (parent == null) { throw new TagException(tag, "Parent UIComponent was null"); } if (ComponentHandler.isNew(parent)) { if (mode != null) { if ("isNotSet".equals(mode.getValue())) { boolean result = false; String expressionString = value.getValue(); if (!value.isLiteral()) { while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { result = true; break; } else { expressionString = expression.getExpressionString(); } } else { result = false; break; } } } else { result = StringUtils.isEmpty(expressionString); } parent.getAttributes().put(name.getValue(), result); } else if ("isSet".equals(mode.getValue())) { boolean result = true; String expressionString = value.getValue(); if (!value.isLiteral()) { while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { result = false; break; } else { expressionString = expression.getExpressionString(); } } else { result = true; break; } } } else { result = StringUtils.isNotEmpty(expressionString); } parent.getAttributes().put(name.getValue(), result); } else if ("action".equals(mode.getValue())) { String expressionString = value.getValue(); while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { // when the action hasn't been set while using a composition. if (LOG.isDebugEnabled()) { LOG.debug("Variable can't be resolved: value='" + expressionString + "'"); } expressionString = null; break; } else { expressionString = expression.getExpressionString(); } } else { break; } } if (expressionString != null) { final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); final MethodExpression action = new TagMethodExpression(value, expressionFactory.createMethodExpression( faceletContext, expressionString, String.class, ComponentUtils.ACTION_ARGS)); ((ActionSource2) parent).setActionExpression(action); } } else if ("actionListener".equals(mode.getValue())) { String expressionString = value.getValue(); while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { if (LOG.isDebugEnabled()) { // when the action hasn't been set while using a composition. LOG.debug("Variable can't be resolved: value='" + expressionString + "'"); } expressionString = null; break; } else { expressionString = expression.getExpressionString(); } } else { LOG.warn("Only expressions are supported mode=actionListener value='" + expressionString + "'"); expressionString = null; break; } } if (expressionString != null) { final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); final MethodExpression actionListener = new TagMethodExpression(value, expressionFactory.createMethodExpression( faceletContext, expressionString, null, ComponentUtils.ACTION_LISTENER_ARGS)); ((ActionSource) parent).addActionListener(new MethodExpressionActionListener(actionListener)); } } else if ("actionFromValue".equals(mode.getValue())) { if (!value.isLiteral()) { final String result = value.getValue(faceletContext); parent.getAttributes().put(name.getValue(), new ConstantMethodBinding(result)); } } else if ("valueIfSet".equals(mode.getValue())) { String expressionString = value.getValue(); String lastExpressionString = null; while (isMethodOrValueExpression(expressionString) && isSimpleExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression != null) { lastExpressionString = expressionString; expressionString = expression.getExpressionString(); } else { // restore last value expressionString = lastExpressionString; break; } } if (expressionString != null) { final String attributeName = name.getValue(faceletContext); if (containsMethodOrValueExpression(expressionString)) { final ValueExpression expression = value.getValueExpression(faceletContext, Object.class); parent.setValueExpression(attributeName, expression); } else { final Object literalValue = getValue(faceletContext, parent, expressionString, attributeName); parent.getAttributes().put(attributeName, literalValue); } } } else { throw new FacesException("Type " + mode + " not supported"); } } else { final String nameValue = name.getValue(faceletContext); if (Attributes.RENDERED.equals(nameValue)) { if (value.isLiteral()) { parent.setRendered(value.getBoolean(faceletContext)); } else { parent.setValueExpression(nameValue, value.getValueExpression(faceletContext, Boolean.class)); } } else if (Attributes.RENDERED_PARTIALLY.equals(nameValue) && parent instanceof SupportsRenderedPartially) { if (value.isLiteral()) { final String[] components = ComponentUtils.splitList(value.getValue()); ((SupportsRenderedPartially) parent).setRenderedPartially(components); } else { parent.setValueExpression(nameValue, value.getValueExpression(faceletContext, Object.class)); } } else if (Attributes.STYLE_CLASS.equals(nameValue)) { // TODO expression ComponentUtils.setStyleClasses(parent, value.getValue()); } else if (Attributes.MARKUP.equals(nameValue)) { if (parent instanceof SupportsMarkup) { if (value.isLiteral()) { ((SupportsMarkup) parent).setMarkup(Markup.valueOf(value.getValue())); } else { final ValueExpression expression = value.getValueExpression(faceletContext, Object.class); parent.setValueExpression(nameValue, expression); } } else { LOG.error("Component is not instanceof SupportsMarkup. Instance is: " + parent.getClass().getName()); } } else if (parent instanceof EditableValueHolder && Attributes.VALIDATOR.equals(nameValue)) { final MethodExpression methodExpression = getMethodExpression(faceletContext, null, ComponentUtils.VALIDATOR_ARGS); if (methodExpression != null) { ((EditableValueHolder) parent).addValidator(new MethodExpressionValidator(methodExpression)); } } else if (parent instanceof EditableValueHolder && Attributes.VALUE_CHANGE_LISTENER.equals(nameValue)) { final MethodExpression methodExpression = getMethodExpression(faceletContext, null, ComponentUtils.VALUE_CHANGE_LISTENER_ARGS); if (methodExpression != null) { ((EditableValueHolder) parent).addValueChangeListener( new MethodExpressionValueChangeListener(methodExpression)); } } else if (parent instanceof ValueHolder && Attributes.CONVERTER.equals(nameValue)) { setConverter(faceletContext, parent, nameValue); } else if (parent instanceof ActionSource && Attributes.ACTION.equals(nameValue)) { final MethodExpression action = getMethodExpression(faceletContext, String.class, ComponentUtils.ACTION_ARGS); if (action != null) { ((ActionSource2) parent).setActionExpression(action); } } else if (parent instanceof ActionSource && Attributes.ACTION_LISTENER.equals(nameValue)) { final MethodExpression action = getMethodExpression(faceletContext, null, ComponentUtils.ACTION_LISTENER_ARGS); if (action != null) { ((ActionSource) parent).addActionListener(new MethodExpressionActionListener(action)); } } else if (!parent.getAttributes().containsKey(nameValue)) { if (value.isLiteral()) { parent.getAttributes().put(nameValue, value.getValue()); } else { parent.setValueExpression(nameValue, value.getValueExpression(faceletContext, Object.class)); } } } } } private boolean isMethodOrValueExpression(final String string) { return (string.startsWith("${") || string.startsWith("#{")) && string.endsWith("}"); } private boolean containsMethodOrValueExpression(final String string) { return (string.contains("${") || string.contains("#{")) && string.contains("}"); } private boolean isSimpleExpression(final String string) { return string.indexOf('.') < 0 && string.indexOf('[') < 0; } private String removeElParenthesis(final String string) { return string.substring(2, string.length() - 1); } private ValueExpression getExpression(final FaceletContext faceletContext) { final String myValue = removeElParenthesis(value.getValue()); return faceletContext.getVariableMapper().resolveVariable(myValue); } private MethodExpression getMethodExpression( final FaceletContext faceletContext, final Class returnType, final Class[] args) { // in a composition may be we get the method expression string from the current variable mapper // the expression can be empty // in this case return nothing if (value.getValue().startsWith("${")) { final ValueExpression expression = getExpression(faceletContext); if (expression != null) { final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); return new TagMethodExpression(value, expressionFactory.createMethodExpression(faceletContext, expression.getExpressionString(), returnType, args)); } else { return null; } } else { return value.getMethodExpression(faceletContext, returnType, args); } } private Object getValue( final FaceletContext faceletContext, final UIComponent parent, final String expressionString, final String attributeName) { Class type = Object.class; try { type = PropertyUtils.getReadMethod( new PropertyDescriptor(attributeName, parent.getClass())).getReturnType(); } catch (final IntrospectionException e) { LOG.warn("Can't determine expected type", e); } final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); final ValueExpression valueExpression = expressionFactory .createValueExpression(faceletContext, expressionString, type); return valueExpression.getValue(faceletContext); } private void setConverter(final FaceletContext faceletContext, final UIComponent parent, final String nameValue) { // in a composition may be we get the converter expression string from the current variable mapper // the expression can be empty // in this case return nothing if (value.getValue().startsWith("${")) { final ValueExpression expression = getExpression(faceletContext); if (expression != null) { setConverter(faceletContext, parent, nameValue, expression); } } else { setConverter(faceletContext, parent, nameValue, value.getValueExpression(faceletContext, Object.class)); } } private void setConverter( final FaceletContext faceletContext, final UIComponent parent, final String nameValue, final ValueExpression expression) { if (expression.isLiteralText()) { final Converter converter = faceletContext.getFacesContext().getApplication().createConverter(expression.getExpressionString()); ((ValueHolder) parent).setConverter(converter); } else { parent.setValueExpression(nameValue, expression); } } } //http://grepcode.com/file_/repo1.maven.org/maven2/org.apache.myfaces.tobago/tobago-core/2.0.8/org/apache/myfaces/tobago/component/Sorter.java/?v=source class Sorter { private static final Logger LOG = LoggerFactory.getLogger(Sorter.class); private Comparator comparator; /** * @deprecated Please use {@link #perform(org.apache.myfaces.tobago.internal.component.AbstractUISheet)} */ @Deprecated public void perform(final SortActionEvent sortEvent) { final AbstractUISheet data = (AbstractUISheet) sortEvent.getComponent(); perform(data); } public void perform(final AbstractUISheet data) { Object value = data.getValue(); if (value instanceof DataModel) { value = ((DataModel) value).getWrappedData(); } final FacesContext facesContext = FacesContext.getCurrentInstance(); final SheetState sheetState = data.getSheetState(facesContext); final String sortedColumnId = sheetState.getSortedColumnId(); if (LOG.isDebugEnabled()) { LOG.debug("sorterId = '{}'", sortedColumnId); } if (sortedColumnId == null) { // not to be sorted return; } final UIColumn column = (UIColumn) data.findComponent(sortedColumnId); if (column == null) { LOG.warn("No column to sort found, sorterId = '{}'", sortedColumnId); return; } final Comparator actualComparator; if (value instanceof List || value instanceof Object[]) { final String sortProperty; try { final UIComponent child = getFirstSortableChild(column.getChildren()); if (child != null) { final String attributeName = child instanceof AbstractUICommand ? Attributes.LABEL : Attributes.VALUE; if (child.getValueExpression(attributeName) != null) { final String var = data.getVar(); if (var == null) { LOG.error("No sorting performed. Property var of sheet is not set!"); unsetSortableAttribute(column); return; } String expressionString = child.getValueExpression(attributeName).getExpressionString(); if (isSimpleProperty(expressionString)) { if (expressionString.startsWith("#{") && expressionString.endsWith("}")) { expressionString = expressionString.substring(2, expressionString.length() - 1); } sortProperty = expressionString.substring(var.length() + 1); actualComparator = new BeanComparator( sortProperty, comparator, !sheetState.isAscending()); if (LOG.isDebugEnabled()) { LOG.debug("Sort property is {}", sortProperty); } } else { final boolean descending = !sheetState.isAscending(); final ValueExpression expression = child.getValueExpression("value"); actualComparator = new ValueExpressionComparator(facesContext, var, expression, descending, comparator); } } else { LOG.error("No sorting performed. No Expression target found for sorting!"); unsetSortableAttribute(column); return; } } else { LOG.error("No sorting performed. Value is not instanceof List or Object[]!"); unsetSortableAttribute(column); return; } } catch (final Exception e) { LOG.error("Error while extracting sortMethod :" + e.getMessage(), e); if (column != null) { unsetSortableAttribute(column); } return; } // TODO: locale / comparator parameter? // don't compare numbers with Collator.getInstance() comparator // Comparator comparator = Collator.getInstance(); // comparator = new RowComparator(ascending, method); // memorize selected rows List selectedDataRows = null; if (sheetState.getSelectedRows().size() > 0) { selectedDataRows = new ArrayList(sheetState.getSelectedRows().size()); Object dataRow; for (final Integer index : sheetState.getSelectedRows()) { if (value instanceof List) { dataRow = ((List) value).get(index); } else { dataRow = ((Object[]) value)[index]; } selectedDataRows.add(dataRow); } } // do sorting if (value instanceof List) { Collections.sort((List) value, actualComparator); } else { // value is instanceof Object[] Arrays.sort((Object[]) value, actualComparator); } // restore selected rows if (selectedDataRows != null) { sheetState.getSelectedRows().clear(); for (final Object dataRow : selectedDataRows) { int index = -1; if (value instanceof List) { for (int i = 0; i < ((List) value).size() && index < 0; i++) { if (dataRow == ((List) value).get(i)) { index = i; } } } else { for (int i = 0; i < ((Object[]) value).length && index < 0; i++) { if (dataRow == ((Object[]) value)[i]) { index = i; } } } if (index >= 0) { sheetState.getSelectedRows().add(index); } } } } else { // DataModel?, ResultSet, Result or Object LOG.warn("Sorting not supported for type " + (value != null ? value.getClass().toString() : "null")); } } // XXX needs to be tested // XXX was based on ^#\{(\w+(\.\w)*)\}$ which is wrong, because there is a + missing after the last \w boolean isSimpleProperty(final String expressionString) { if (expressionString.startsWith("#{") && expressionString.endsWith("}")) { final String inner = expressionString.substring(2, expressionString.length() - 1); final String[] parts = StringUtils.split(inner, '.'); for (final String part : parts) { if (!StringUtils.isAlpha(part)) { return false; } } return true; } return false; } private void unsetSortableAttribute(final UIColumn uiColumn) { LOG.warn("removing attribute sortable from column " + uiColumn.getId()); uiColumn.getAttributes().put(Attributes.SORTABLE, Boolean.FALSE); } private UIComponent getFirstSortableChild(final List children) { UIComponent result = null; for (UIComponent child : children) { result = child; if (child instanceof UISelectMany || child instanceof UISelectOne || child instanceof UISelectBoolean || (child instanceof AbstractUICommand && child.getChildren().isEmpty()) || (child instanceof UIInput && RendererTypes.HIDDEN.equals(child.getRendererType()))) { continue; // look for a better component if any } if (child instanceof UIOutput) { break; } if (child instanceof UICommand || child instanceof javax.faces.component.UIPanel) { child = getFirstSortableChild(child.getChildren()); if (child instanceof UIOutput) { break; } } } return result; } public Comparator getComparator() { return comparator; } public void setComparator(final Comparator comparator) { this.comparator = comparator; } } ================================================ FILE: test-resources/src/main/resources/AttributeHandlerJavaEleven.java ================================================ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.myfaces.tobago.facelets; import org.apache.commons.beanutils.PropertyUtils; import org.apache.myfaces.tobago.component.Attributes; import org.apache.myfaces.tobago.component.SupportsMarkup; import org.apache.myfaces.tobago.component.SupportsRenderedPartially; import org.apache.myfaces.tobago.context.Markup; import org.apache.myfaces.tobago.el.ConstantMethodBinding; import org.apache.myfaces.tobago.internal.util.StringUtils; import org.apache.myfaces.tobago.util.ComponentUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.el.ELException; import javax.el.ExpressionFactory; import javax.el.MethodExpression; import javax.el.ValueExpression; import javax.faces.FacesException; import javax.faces.component.ActionSource; import javax.faces.component.ActionSource2; import javax.faces.component.EditableValueHolder; import javax.faces.component.UIComponent; import javax.faces.component.ValueHolder; import javax.faces.convert.Converter; import javax.faces.event.MethodExpressionActionListener; import javax.faces.event.MethodExpressionValueChangeListener; import javax.faces.validator.MethodExpressionValidator; import javax.faces.view.facelets.ComponentHandler; import javax.faces.view.facelets.FaceletContext; import javax.faces.view.facelets.TagAttribute; import javax.faces.view.facelets.TagConfig; import javax.faces.view.facelets.TagException; import javax.faces.view.facelets.TagHandler; import java.beans.IntrospectionException; import java.beans.PropertyDescriptor; //from Apache MyFaces 2.0.8 //Retrieved from http://grepcode.com/file_/repo1.maven.org/maven2/org.apache.myfaces.tobago/tobago-core/2.0.8/org/apache/myfaces/tobago/facelets/AttributeHandler.java/?v=source public final class AttributeHandlerJavaEleven extends TagHandler { private static final Logger LOG = LoggerFactory.getLogger(AttributeHandlerJavaEleven.class); private final TagAttribute name; private final TagAttribute value; private final TagAttribute mode; public AttributeHandler(final TagConfig config) { super(config); this.name = getRequiredAttribute(Attributes.NAME); this.value = getRequiredAttribute(Attributes.VALUE); this.mode = getAttribute(Attributes.MODE); } public void apply(final FaceletContext faceletContext, final UIComponent parent) throws ELException { if (parent == null) { throw new TagException(tag, "Parent UIComponent was null"); } if (ComponentHandler.isNew(parent)) { if (mode != null) { if ("isNotSet".equals(mode.getValue())) { boolean result = false; var expressionString = value.getValue(); if (!value.isLiteral()) { while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { result = true; break; } else { expressionString = expression.getExpressionString(); } } else { result = false; break; } } } else { result = StringUtils.isEmpty(expressionString); } parent.getAttributes().put(name.getValue(), result); } else if ("isSet".equals(mode.getValue())) { boolean result = true; var expressionString = value.getValue(); if (!value.isLiteral()) { while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { result = false; break; } else { expressionString = expression.getExpressionString(); } } else { result = true; break; } } } else { result = StringUtils.isNotEmpty(expressionString); } parent.getAttributes().put(name.getValue(), result); } else if ("action".equals(mode.getValue())) { var expressionString = value.getValue(); while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { // when the action hasn't been set while using a composition. if (LOG.isDebugEnabled()) { LOG.debug("Variable can't be resolved: value='" + expressionString + "'"); } expressionString = null; break; } else { expressionString = expression.getExpressionString(); } } else { break; } } if (expressionString != null) { final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); final MethodExpression action = new TagMethodExpression(value, expressionFactory.createMethodExpression( faceletContext, expressionString, String.class, ComponentUtils.ACTION_ARGS)); ((ActionSource2) parent).setActionExpression(action); } } else if ("actionListener".equals(mode.getValue())) { var expressionString = value.getValue(); while (isSimpleExpression(expressionString)) { if (isMethodOrValueExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression == null) { if (LOG.isDebugEnabled()) { // when the action hasn't been set while using a composition. LOG.debug("Variable can't be resolved: value='" + expressionString + "'"); } expressionString = null; break; } else { expressionString = expression.getExpressionString(); } } else { LOG.warn("Only expressions are supported mode=actionListener value='" + expressionString + "'"); expressionString = null; break; } } if (expressionString != null) { final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); final MethodExpression actionListener = new TagMethodExpression(value, expressionFactory.createMethodExpression( faceletContext, expressionString, null, ComponentUtils.ACTION_LISTENER_ARGS)); ((ActionSource) parent).addActionListener(new MethodExpressionActionListener(actionListener)); } } else if ("actionFromValue".equals(mode.getValue())) { if (!value.isLiteral()) { final String result = value.getValue(faceletContext); parent.getAttributes().put(name.getValue(), new ConstantMethodBinding(result)); } } else if ("valueIfSet".equals(mode.getValue())) { var expressionString = value.getValue(); String lastExpressionString = null; while (isMethodOrValueExpression(expressionString) && isSimpleExpression(expressionString)) { final ValueExpression expression = faceletContext.getVariableMapper().resolveVariable(removeElParenthesis(expressionString)); if (expression != null) { lastExpressionString = expressionString; expressionString = expression.getExpressionString(); } else { // restore last value expressionString = lastExpressionString; break; } } if (expressionString != null) { final String attributeName = name.getValue(faceletContext); if (containsMethodOrValueExpression(expressionString)) { final ValueExpression expression = value.getValueExpression(faceletContext, Object.class); parent.setValueExpression(attributeName, expression); } else { final Object literalValue = getValue(faceletContext, parent, expressionString, attributeName); parent.getAttributes().put(attributeName, literalValue); } } } else { throw new FacesException("Type " + mode + " not supported"); } } else { final String nameValue = name.getValue(faceletContext); if (Attributes.RENDERED.equals(nameValue)) { if (value.isLiteral()) { parent.setRendered(value.getBoolean(faceletContext)); } else { parent.setValueExpression(nameValue, value.getValueExpression(faceletContext, Boolean.class)); } } else if (Attributes.RENDERED_PARTIALLY.equals(nameValue) && parent instanceof SupportsRenderedPartially) { if (value.isLiteral()) { final String[] components = ComponentUtils.splitList(value.getValue()); ((SupportsRenderedPartially) parent).setRenderedPartially(components); } else { parent.setValueExpression(nameValue, value.getValueExpression(faceletContext, Object.class)); } } else if (Attributes.STYLE_CLASS.equals(nameValue)) { // TODO expression ComponentUtils.setStyleClasses(parent, value.getValue()); } else if (Attributes.MARKUP.equals(nameValue)) { if (parent instanceof SupportsMarkup) { if (value.isLiteral()) { ((SupportsMarkup) parent).setMarkup(Markup.valueOf(value.getValue())); } else { final ValueExpression expression = value.getValueExpression(faceletContext, Object.class); parent.setValueExpression(nameValue, expression); } } else { LOG.error("Component is not instanceof SupportsMarkup. Instance is: " + parent.getClass().getName()); } } else if (parent instanceof EditableValueHolder && Attributes.VALIDATOR.equals(nameValue)) { final MethodExpression methodExpression = getMethodExpression(faceletContext, null, ComponentUtils.VALIDATOR_ARGS); if (methodExpression != null) { ((EditableValueHolder) parent).addValidator(new MethodExpressionValidator(methodExpression)); } } else if (parent instanceof EditableValueHolder && Attributes.VALUE_CHANGE_LISTENER.equals(nameValue)) { final MethodExpression methodExpression = getMethodExpression(faceletContext, null, ComponentUtils.VALUE_CHANGE_LISTENER_ARGS); if (methodExpression != null) { ((EditableValueHolder) parent).addValueChangeListener( new MethodExpressionValueChangeListener(methodExpression)); } } else if (parent instanceof ValueHolder && Attributes.CONVERTER.equals(nameValue)) { setConverter(faceletContext, parent, nameValue); } else if (parent instanceof ActionSource && Attributes.ACTION.equals(nameValue)) { final MethodExpression action = getMethodExpression(faceletContext, String.class, ComponentUtils.ACTION_ARGS); if (action != null) { ((ActionSource2) parent).setActionExpression(action); } } else if (parent instanceof ActionSource && Attributes.ACTION_LISTENER.equals(nameValue)) { final MethodExpression action = getMethodExpression(faceletContext, null, ComponentUtils.ACTION_LISTENER_ARGS); if (action != null) { ((ActionSource) parent).addActionListener(new MethodExpressionActionListener(action)); } } else if (!parent.getAttributes().containsKey(nameValue)) { if (value.isLiteral()) { parent.getAttributes().put(nameValue, value.getValue()); } else { parent.setValueExpression(nameValue, value.getValueExpression(faceletContext, Object.class)); } } } } } private boolean isMethodOrValueExpression(final String string) { return (string.startsWith("${") || string.startsWith("#{")) && string.endsWith("}"); } private boolean containsMethodOrValueExpression(final String string) { return (string.contains("${") || string.contains("#{")) && string.contains("}"); } private boolean isSimpleExpression(final String string) { return string.indexOf('.') < 0 && string.indexOf('[') < 0; } private String removeElParenthesis(final String string) { return string.substring(2, string.length() - 1); } private ValueExpression getExpression(final FaceletContext faceletContext) { final String myValue = removeElParenthesis(value.getValue()); return faceletContext.getVariableMapper().resolveVariable(myValue); } private MethodExpression getMethodExpression( final FaceletContext faceletContext, final Class returnType, final Class[] args) { // in a composition may be we get the method expression string from the current variable mapper // the expression can be empty // in this case return nothing if (value.getValue().startsWith("${")) { final ValueExpression expression = getExpression(faceletContext); if (expression != null) { final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); return new TagMethodExpression(value, expressionFactory.createMethodExpression(faceletContext, expression.getExpressionString(), returnType, args)); } else { return null; } } else { return value.getMethodExpression(faceletContext, returnType, args); } } private Object getValue( final FaceletContext faceletContext, final UIComponent parent, final String expressionString, final String attributeName) { Class type = Object.class; try { type = PropertyUtils.getReadMethod( new PropertyDescriptor(attributeName, parent.getClass())).getReturnType(); } catch (final IntrospectionException e) { LOG.warn("Can't determine expected type", e); } final ExpressionFactory expressionFactory = faceletContext.getExpressionFactory(); final ValueExpression valueExpression = expressionFactory .createValueExpression(faceletContext, expressionString, type); return valueExpression.getValue(faceletContext); } private void setConverter(final FaceletContext faceletContext, final UIComponent parent, final String nameValue) { // in a composition may be we get the converter expression string from the current variable mapper // the expression can be empty // in this case return nothing if (value.getValue().startsWith("${")) { final ValueExpression expression = getExpression(faceletContext); if (expression != null) { setConverter(faceletContext, parent, nameValue, expression); } } else { setConverter(faceletContext, parent, nameValue, value.getValueExpression(faceletContext, Object.class)); } } private void setConverter( final FaceletContext faceletContext, final UIComponent parent, final String nameValue, final ValueExpression expression) { if (expression.isLiteralText()) { final Converter converter = faceletContext.getFacesContext().getApplication().createConverter(expression.getExpressionString()); ((ValueHolder) parent).setConverter(converter); } else { parent.setValueExpression(nameValue, expression); } } } ================================================ FILE: test-resources/src/main/resources/Attributes.java ================================================ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.myfaces.tobago.component; /** * Name constants of the attributes of the Tobago components. */ //from http://grepcode.com/file_/repo1.maven.org/maven2/org.apache.myfaces.tobago/tobago-core/2.0.8/org/apache/myfaces/tobago/component/Attributes.java/?v=source public final class Attributes { public static final String ACCESS_KEY = "accessKey"; public static final String ACTION = "action"; public static final String ACTION_LISTENER = "actionListener"; public static final String ALIGN = "align"; public static final String ALT = "alt"; public static final String APPLICATION_ICON = "applicationIcon"; public static final String AUTO_RELOAD = "autoReload"; public static final String BODY_CONTENT = "bodyContent"; public static final String BORDER = "border"; /** Used by a layout manager */ public static final String BORDER_BOTTOM = "borderBottom"; /** Used by a layout manager */ public static final String BORDER_LEFT = "borderLeft"; /** Used by a layout manager */ public static final String BORDER_RIGHT = "borderRight"; /** Used by a layout manager */ public static final String BORDER_TOP = "borderTop"; public static final String CHARSET = "charset"; /** @deprecated */ @Deprecated public static final String CELLSPACING = "cellspacing"; public static final String CLIENT_PROPERTIES = "clientProperties"; public static final String COLUMN_SPAN = "columnSpan"; public static final String COLUMN_SPACING = "columnSpacing"; public static final String COLUMNS = "columns"; public static final String CONVERTER = "converter"; public static final String CREATE_SPAN = "createSpan"; public static final String CSS_CLASSES_BLOCKS = "cssClassesBlocks"; /** @deprecated since 2.0.0 */ @Deprecated public static final String DATE_INPUT_ID = "dateInputId"; public static final String DATE_STYLE = "dateStyle"; public static final String DEFAULT_COMMAND = "defaultCommand"; public static final String DELAY = "delay"; public static final String DIRECT_LINK_COUNT = "directLinkCount"; public static final String DISABLED = "disabled"; public static final String ENCTYPE = "enctype"; public static final String ESCAPE = "escape"; public static final String EXPANDED = "expanded"; public static final String EXECUTE = "execute"; public static final String EVENT = "event"; public static final String FIELD_ID = "fieldId"; public static final String FIRST = "first"; public static final String FREQUENCY = "frequency"; public static final String FOCUS = "focus"; public static final String FOCUS_ID = "focusId"; public static final String FORCE_VERTICAL_SCROLLBAR = "forceVerticalScrollbar"; public static final String FORMAT_PATTERN = "formatPattern"; public static final String FOR = "for"; public static final String GLOBAL_ONLY = "globalOnly"; public static final String HEIGHT = "height"; public static final String HIDDEN = "hidden"; public static final String HOVER = "hover"; public static final String I18N = "i18n"; public static final String ICON_SIZE = "iconSize"; public static final String ID = "id"; public static final String IMMEDIATE = "immediate"; public static final String IMAGE = "image"; public static final String INLINE = "inline"; /** @deprecated */ @Deprecated public static final String INNER_HEIGHT = "innerHeight"; /** @deprecated */ @Deprecated public static final String INNER_WIDTH = "innerWidth"; public static final String ITEM_DESCRIPTION = "itemDescription"; public static final String ITEM_DISABLED = "itemDisabled"; public static final String ITEM_LABEL = "itemLabel"; public static final String ITEM_IMAGE = "itemImage"; public static final String ITEM_VALUE = "itemValue"; public static final String JSF_RESOURCE = "jsfResource"; public static final String LABEL = "label"; public static final String LABEL_POSITION = "labelPosition"; public static final String LABEL_WIDTH = "labelWidth"; public static final String LAYOUT_HEIGHT = "layoutHeight"; public static final String LAYOUT_ORDER = "layoutOrder"; public static final String LAYOUT_WIDTH = "layoutWidth"; public static final String LEFT = "left"; public static final String LINK = "link"; /** @deprecated */ @Deprecated public static final String MARGIN = "margin"; /** Used by a layout manager */ public static final String MARGIN_BOTTOM = "marginBottom"; /** Used by a layout manager */ public static final String MARGIN_LEFT = "marginLeft"; /** Used by a layout manager */ public static final String MARGIN_RIGHT = "marginRight"; /** Used by a layout manager */ public static final String MARGIN_TOP = "marginTop"; public static final String MARKED = "marked"; public static final String MARKUP = "markup"; public static final String MAX = "max"; public static final String MAX_SEVERITY = "maxSeverity"; public static final String MAX_NUMBER = "maxNumber"; public static final String MAXIMUM_HEIGHT = "maximumHeight"; public static final String MAXIMUM_WIDTH = "maximumWidth"; public static final String METHOD = "method"; public static final String MIN = "min"; public static final String MIN_SEVERITY = "minSeverity"; public static final String MINIMUM_HEIGHT = "minimumHeight"; public static final String MINIMUM_WIDTH = "minimumWidth"; public static final String MODAL = "modal"; public static final String MODE = "mode"; public static final String MUTABLE = "mutable"; public static final String NAME = "name"; public static final String NAVIGATE = "navigate"; public static final String NUMBER_STYLE = "numberStyle"; public static final String OMIT = "omit"; /** @deprecated Since 2.0.0. This attribute work not with SCP */ @Deprecated public static final String ONCHANGE = "onchange"; /** @deprecated Since 2.0.0. This attribute work not with SCP */ @Deprecated public static final String ONCLICK = "onclick"; public static final String ORDER_BY = "orderBy"; public static final String ORIENTATION = "orientation"; /** Used by a layout manager */ public static final String PADDING_BOTTOM = "paddingBottom"; /** Used by a layout manager */ public static final String PADDING_LEFT = "paddingLeft"; /** Used by a layout manager */ public static final String PADDING_RIGHT = "paddingRight"; /** Used by a layout manager */ public static final String PADDING_TOP = "paddingTop"; /** @deprecated Since 2.0.6. No longer needed. */ @Deprecated public static final String PAGE_MENU = "pageMenu"; public static final String PASSWORD = "password"; public static final String POPUP_CLOSE = "popupClose"; public static final String POPUP_LIST = "popupList"; public static final String POPUP_RESET = "popupReset"; public static final String POPUP_CALENDAR_ID = "popupCalendarId"; public static final String PREFERRED_HEIGHT = "preferredHeight"; public static final String PREFERRED_WIDTH = "preferredWidth"; public static final String PREFORMATED = "preformated"; public static final String READONLY = "readonly"; public static final String REFERENCE = "reference"; public static final String RELATIVE = "relative"; public static final String RENDERED = "rendered"; public static final String RENDERED_PARTIALLY = "renderedPartially"; public static final String RENDERER_TYPE = "rendererType"; public static final String RENDER_AS = "renderAs"; public static final String RENDER_RANGE = "renderRange"; public static final String RENDER_RANGE_EXTERN = "renderRangeExtern"; public static final String REQUIRED = "required"; public static final String RESIZABLE = "resizable"; public static final String RESOURCE = "resource"; public static final String ROW_ID = "rowId"; public static final String ROW_SPAN = "rowSpan"; public static final String ROW_SPACING = "rowSpacing"; public static final String ROWS = "rows"; public static final String SCRIPT_FILES = "scriptFiles"; public static final String SCROLLBAR_HEIGHT = "scrollbarHeight"; public static final String SCROLLBARS = "scrollbars"; // Attribute name could not be the same as the method name // this cause an infinite loop on attribute map public static final String SCROLL_POSITION = "attrScrollPosition"; public static final String SELECTED_INDEX = "selectedIndex"; public static final String SELECTED_LIST_STRING = "selectedListString"; public static final String SORTABLE = "sortable"; public static final String SELECTABLE = "selectable"; public static final String SHOW_DIRECT_LINKS = "showDirectLinks"; public static final String SHOW_HEADER = "showHeader"; public static final String SHOW_JUNCTIONS = "showJunctions"; public static final String SHOW_NAVIGATION_BAR = "showNavigationBar"; public static final String SHOW_PAGE_RANGE = "showPageRange"; public static final String SHOW_ROOT = "showRoot"; public static final String SHOW_ROOT_JUNCTION = "showRootJunction"; public static final String SHOW_ROW_RANGE = "showRowRange"; public static final String SHOW_SUMMARY = "showSummary"; public static final String SHOW_DETAIL = "showDetail"; public static final String SPAN_X = "spanX"; public static final String SPAN_Y = "spanY"; public static final String SRC = "src"; public static final String STATE = "state"; public static final String STATE_PREVIEW = "statePreview"; public static final String STYLE = "style"; /** @deprecated */ @Deprecated public static final String STYLE_CLASS = "styleClass"; public static final String SUPPRESS_TOOLBAR_CONTAINER = "suppressToolbarContainer"; public static final String SWITCH_TYPE = "switchType"; public static final String TAB_INDEX = "tabIndex"; public static final String TARGET = "target"; public static final String TIME_STYLE = "timeStyle"; public static final String TEXT_ALIGN = "textAlign"; public static final String TIMEZONE = "timezone"; public static final String TITLE = "title"; public static final String TIP = "tip"; public static final String TOP = "top"; public static final String TRANSITION = "transition"; public static final String TYPE = "type"; public static final String VALUE = "value"; public static final String VALUE_CHANGE_LISTENER = "valueChangeListener"; public static final String VAR = "var"; public static final String UNIT = "unit"; public static final String UPDATE = "update"; public static final String VALIDATOR = "validator"; public static final String WIDTH = "width"; public static final String WIDTH_LIST = "widthList"; public static final String WIDTH_LIST_STRING = "widthListString"; public static final String Z_INDEX = "zIndex"; } ================================================ FILE: test-resources/src/main/resources/Console.java ================================================ /* * Copyright 2018 Confluent Inc. * * Licensed under the Confluent Community License (the "License"); you may not use * this file except in compliance with the License. You may obtain a copy of the * License at * * http://www.confluent.io/confluent-community-license * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package io.confluent.ksql.cli.console; import static io.confluent.ksql.util.CmdLineUtil.splitByUnquotedWhitespace; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.collect.Streams; import io.confluent.ksql.cli.console.CliConfig.OnOff; import io.confluent.ksql.cli.console.KsqlTerminal.HistoryEntry; import io.confluent.ksql.cli.console.KsqlTerminal.StatusClosable; import io.confluent.ksql.cli.console.cmd.CliSpecificCommand; import io.confluent.ksql.cli.console.table.Table; import io.confluent.ksql.cli.console.table.Table.Builder; import io.confluent.ksql.cli.console.table.builder.CommandStatusTableBuilder; import io.confluent.ksql.cli.console.table.builder.ConnectorInfoTableBuilder; import io.confluent.ksql.cli.console.table.builder.ConnectorListTableBuilder; import io.confluent.ksql.cli.console.table.builder.ConnectorPluginsListTableBuilder; import io.confluent.ksql.cli.console.table.builder.DropConnectorTableBuilder; import io.confluent.ksql.cli.console.table.builder.ExecutionPlanTableBuilder; import io.confluent.ksql.cli.console.table.builder.FunctionNameListTableBuilder; import io.confluent.ksql.cli.console.table.builder.KafkaTopicsListTableBuilder; import io.confluent.ksql.cli.console.table.builder.ListVariablesTableBuilder; import io.confluent.ksql.cli.console.table.builder.PropertiesListTableBuilder; import io.confluent.ksql.cli.console.table.builder.QueriesTableBuilder; import io.confluent.ksql.cli.console.table.builder.StreamsListTableBuilder; import io.confluent.ksql.cli.console.table.builder.TableBuilder; import io.confluent.ksql.cli.console.table.builder.TablesListTableBuilder; import io.confluent.ksql.cli.console.table.builder.TerminateQueryTableBuilder; import io.confluent.ksql.cli.console.table.builder.TopicDescriptionTableBuilder; import io.confluent.ksql.cli.console.table.builder.TypeListTableBuilder; import io.confluent.ksql.cli.console.table.builder.WarningEntityTableBuilder; import io.confluent.ksql.metrics.TopicSensors.Stat; import io.confluent.ksql.model.WindowType; import io.confluent.ksql.query.QueryError; import io.confluent.ksql.rest.ApiJsonMapper; import io.confluent.ksql.rest.entity.ArgumentInfo; import io.confluent.ksql.rest.entity.AssertSchemaEntity; import io.confluent.ksql.rest.entity.AssertTopicEntity; import io.confluent.ksql.rest.entity.CommandStatusEntity; import io.confluent.ksql.rest.entity.ConnectorDescription; import io.confluent.ksql.rest.entity.ConnectorList; import io.confluent.ksql.rest.entity.ConnectorPluginsList; import io.confluent.ksql.rest.entity.CreateConnectorEntity; import io.confluent.ksql.rest.entity.DropConnectorEntity; import io.confluent.ksql.rest.entity.ExecutionPlan; import io.confluent.ksql.rest.entity.FieldInfo; import io.confluent.ksql.rest.entity.FieldInfo.FieldType; import io.confluent.ksql.rest.entity.FunctionDescriptionList; import io.confluent.ksql.rest.entity.FunctionInfo; import io.confluent.ksql.rest.entity.FunctionNameList; import io.confluent.ksql.rest.entity.KafkaTopicsList; import io.confluent.ksql.rest.entity.KafkaTopicsListExtended; import io.confluent.ksql.rest.entity.KsqlEntity; import io.confluent.ksql.rest.entity.KsqlErrorMessage; import io.confluent.ksql.rest.entity.KsqlStatementErrorMessage; import io.confluent.ksql.rest.entity.KsqlWarning; import io.confluent.ksql.rest.entity.PropertiesList; import io.confluent.ksql.rest.entity.Queries; import io.confluent.ksql.rest.entity.QueryDescription; import io.confluent.ksql.rest.entity.QueryDescriptionEntity; import io.confluent.ksql.rest.entity.QueryDescriptionList; import io.confluent.ksql.rest.entity.QueryHostStat; import io.confluent.ksql.rest.entity.QueryOffsetSummary; import io.confluent.ksql.rest.entity.QueryTopicOffsetSummary; import io.confluent.ksql.rest.entity.RunningQuery; import io.confluent.ksql.rest.entity.SourceDescription; import io.confluent.ksql.rest.entity.SourceDescriptionEntity; import io.confluent.ksql.rest.entity.SourceDescriptionList; import io.confluent.ksql.rest.entity.StreamedRow; import io.confluent.ksql.rest.entity.StreamedRow.DataRow; import io.confluent.ksql.rest.entity.StreamedRow.Header; import io.confluent.ksql.rest.entity.StreamsList; import io.confluent.ksql.rest.entity.TablesList; import io.confluent.ksql.rest.entity.TerminateQueryEntity; import io.confluent.ksql.rest.entity.TopicDescription; import io.confluent.ksql.rest.entity.TypeList; import io.confluent.ksql.rest.entity.VariablesList; import io.confluent.ksql.rest.entity.WarningEntity; import io.confluent.ksql.util.CmdLineUtil; import io.confluent.ksql.util.HandlerMaps; import io.confluent.ksql.util.HandlerMaps.ClassHandlerMap1; import io.confluent.ksql.util.HandlerMaps.Handler1; import io.confluent.ksql.util.KsqlException; import io.confluent.ksql.util.TabularRow; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.io.PrintWriter; import java.nio.charset.Charset; import java.nio.file.Path; import java.nio.file.Paths; import java.time.Instant; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import java.util.Optional; import java.util.StringTokenizer; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.lang3.ObjectUtils; import org.apache.commons.lang3.StringUtils; import org.apache.kafka.common.config.ConfigException; import org.apache.kafka.connect.runtime.rest.entities.ConnectorStateInfo; import org.jline.terminal.Terminal.Signal; import org.jline.terminal.Terminal.SignalHandler; import org.slf4j.Logger; import org.slf4j.LoggerFactory; // CHECKSTYLE_RULES.OFF: ClassDataAbstractionCoupling public class Console implements Closeable { // CHECKSTYLE_RULES.ON: ClassDataAbstractionCoupling private static final Logger log = LoggerFactory.getLogger(Console.class); private static final ObjectMapper OBJECT_MAPPER = ApiJsonMapper.INSTANCE.get(); private static final ClassHandlerMap1 PRINT_HANDLERS = HandlerMaps.forClass(KsqlEntity.class).withArgType(Console.class) .put(CommandStatusEntity.class, tablePrinter(CommandStatusEntity.class, CommandStatusTableBuilder::new)) .put(PropertiesList.class, tablePrinter(PropertiesList.class, PropertiesListTableBuilder::new)) .put(Queries.class, tablePrinter(Queries.class, QueriesTableBuilder::new)) .put(SourceDescriptionEntity.class, (console, entity) -> console.printSourceDescription(entity.getSourceDescription())) .put(SourceDescriptionList.class, Console::printSourceDescriptionList) .put(QueryDescriptionEntity.class, (console, entity) -> console.printQueryDescription(entity.getQueryDescription())) .put(QueryDescriptionList.class, Console::printQueryDescriptionList) .put(TopicDescription.class, tablePrinter(TopicDescription.class, TopicDescriptionTableBuilder::new)) .put(StreamsList.class, tablePrinter(StreamsList.class, StreamsListTableBuilder::new)) .put(TablesList.class, tablePrinter(TablesList.class, TablesListTableBuilder::new)) .put(KafkaTopicsList.class, tablePrinter(KafkaTopicsList.class, KafkaTopicsListTableBuilder.SimpleBuilder::new)) .put(KafkaTopicsListExtended.class, tablePrinter( KafkaTopicsListExtended.class, KafkaTopicsListTableBuilder.ExtendedBuilder::new)) .put(ExecutionPlan.class, tablePrinter(ExecutionPlan.class, ExecutionPlanTableBuilder::new)) .put(FunctionNameList.class, tablePrinter(FunctionNameList.class, FunctionNameListTableBuilder::new)) .put(FunctionDescriptionList.class, Console::printFunctionDescription) .put(CreateConnectorEntity.class, tablePrinter(CreateConnectorEntity.class, ConnectorInfoTableBuilder::new)) .put(DropConnectorEntity.class, tablePrinter(DropConnectorEntity.class, DropConnectorTableBuilder::new)) .put(ConnectorList.class, tablePrinter(ConnectorList.class, ConnectorListTableBuilder::new)) .put(ConnectorPluginsList.class, tablePrinter(ConnectorPluginsList.class, ConnectorPluginsListTableBuilder::new)) .put(ConnectorDescription.class, Console::printConnectorDescription) .put(TypeList.class, tablePrinter(TypeList.class, TypeListTableBuilder::new)) .put(WarningEntity.class, tablePrinter(WarningEntity.class, WarningEntityTableBuilder::new)) .put(VariablesList.class, tablePrinter(VariablesList.class, ListVariablesTableBuilder::new)) .put(TerminateQueryEntity.class, tablePrinter(TerminateQueryEntity.class, TerminateQueryTableBuilder::new)) .put(AssertTopicEntity.class, Console::printAssertTopic) .put(AssertSchemaEntity.class, Console::printAssertSchema) .build(); private static Handler1 tablePrinter( final Class entityType, final Supplier> tableBuilderType) { try { final TableBuilder tableBuilder = tableBuilderType.get(); return (console, type) -> { final Table table = tableBuilder.buildTable(entityType.cast(type)); table.print(console); }; } catch (final Exception e) { throw new IllegalStateException("Error instantiating tableBuilder: " + tableBuilderType); } } private final Map cliSpecificCommands; private final KsqlTerminal terminal; private final RowCaptor rowCaptor; private OutputFormat outputFormat; private Optional spoolFile = Optional.empty(); private CliConfig config; public interface RowCaptor { void addRow(DataRow row); void addRows(List> fields); } public static Console build(final OutputFormat outputFormat) { final AtomicReference consoleRef = new AtomicReference<>(); final Predicate isCliCommand = line -> { final Console theConsole = consoleRef.get(); return theConsole != null && theConsole.getCliCommand(line).isPresent(); }; final Path historyFilePath = Paths.get(System.getProperty( "history-file", System.getProperty("user.home") + "/.ksql-history" )).toAbsolutePath(); final KsqlTerminal terminal = new JLineTerminal(isCliCommand, historyFilePath); final Console console = new Console( outputFormat, terminal, new NoOpRowCaptor()); consoleRef.set(console); return console; } public Console( final OutputFormat outputFormat, final KsqlTerminal terminal, final RowCaptor rowCaptor ) { this.outputFormat = Objects.requireNonNull(outputFormat, "outputFormat"); this.terminal = Objects.requireNonNull(terminal, "terminal"); this.rowCaptor = Objects.requireNonNull(rowCaptor, "rowCaptor"); this.cliSpecificCommands = Maps.newLinkedHashMap(); this.config = new CliConfig(ImmutableMap.of()); } public PrintWriter writer() { return terminal.writer(); } public void flush() { terminal.flush(); } public void setSpool(final File file) { try { terminal.setSpool(new PrintWriter(file, Charset.defaultCharset().name())); spoolFile = Optional.of(file); terminal.writer().println("Session will be spooled to " + file.getAbsolutePath()); terminal.writer().println("Enter SPOOL OFF to disable"); } catch (final IOException e) { throw new KsqlException("Cannot SPOOL to file: " + file, e); } } public void unsetSpool() { terminal.unsetSpool(); spoolFile.ifPresent(f -> terminal.writer().println("Spool written to " + f.getAbsolutePath())); spoolFile = Optional.empty(); } public int getWidth() { return terminal.getWidth(); } public void clearScreen() { terminal.clearScreen(); } public StatusClosable setStatusMessage(final String message) { return terminal.setStatusMessage(message); } public void handle(final Signal signal, final SignalHandler signalHandler) { terminal.handle(signal, signalHandler); } public void setCliProperty(final String name, final Object value) { try { config = config.with(name, value); } catch (final ConfigException e) { terminal.writer().println(e.getMessage()); } } @Override public void close() { terminal.close(); } public void addResult(final List> rowValues) { rowCaptor.addRows(rowValues); } public Map getCliSpecificCommands() { return new HashMap<>(cliSpecificCommands); } public String nextNonCliCommand() { String line; do { line = terminal.readLine(); } while (maybeHandleCliSpecificCommands(line)); return line; } public List getHistory() { return Collections.unmodifiableList(terminal.getHistory()); } public void printErrorMessage(final KsqlErrorMessage errorMessage) { if (errorMessage instanceof KsqlStatementErrorMessage) { printKsqlEntityList(((KsqlStatementErrorMessage) errorMessage).getEntities()); } printError(errorMessage.getMessage(), errorMessage.toString()); } public void printError(final String shortMsg, final String fullMsg) { log.error(fullMsg); terminal.printError(shortMsg); } public void printStreamedRow(final StreamedRow row) { row.getErrorMessage().ifPresent(this::printErrorMessage); row.getFinalMessage().ifPresent(finalMsg -> writer().println(finalMsg)); row.getHeader().ifPresent(this::printRowHeader); if (row.getRow().isPresent()) { switch (outputFormat) { case JSON: printAsJson(row.getRow().get()); break; case TABULAR: printAsTable(row.getRow().get()); break; default: throw new RuntimeException(String.format( "Unexpected output format: '%s'", outputFormat.name() )); } } } public void printKsqlEntityList(final List entityList) { switch (outputFormat) { case JSON: printAsJson(entityList); break; case TABULAR: final boolean showStatements = entityList.size() > 1; for (final KsqlEntity ksqlEntity : entityList) { writer().println(); if (showStatements) { writer().println(ksqlEntity.getStatementText()); } printAsTable(ksqlEntity); } break; default: throw new RuntimeException(String.format( "Unexpected output format: '%s'", outputFormat.name() )); } } private void printRowHeader(final Header header) { switch (outputFormat) { case JSON: printAsJson(header); break; case TABULAR: writer().println( TabularRow.createHeader( getWidth(), header.getSchema().columns(), config.getString(CliConfig.WRAP_CONFIG).equalsIgnoreCase(OnOff.ON.toString()), config.getInt(CliConfig.COLUMN_WIDTH_CONFIG) ) ); break; default: throw new RuntimeException(String.format( "Unexpected output format: '%s'", outputFormat.name() )); } } public void registerCliSpecificCommand(final CliSpecificCommand cliSpecificCommand) { cliSpecificCommands.put(cliSpecificCommand.getName().toLowerCase(), cliSpecificCommand); } public void setOutputFormat(final String newFormat) { try { outputFormat = OutputFormat.get(newFormat); writer().printf("Output format set to %s%n", outputFormat.name()); } catch (final IllegalArgumentException exception) { writer().printf( "Invalid output format: '%s' (valid formats: %s)%n", newFormat, OutputFormat.VALID_FORMATS ); } } public OutputFormat getOutputFormat() { return outputFormat; } private Optional getCliCommand(final String line) { final List parts = splitByUnquotedWhitespace(StringUtils.stripEnd(line.trim(), ";")); if (parts.isEmpty()) { return Optional.empty(); } final String command = String.join(" ", parts); return cliSpecificCommands.values().stream() .filter(cliSpecificCommand -> cliSpecificCommand.matches(command)) .map(cliSpecificCommand -> CliCmdExecutor.of(cliSpecificCommand, parts)) .findFirst(); } private void printAsTable(final DataRow row) { rowCaptor.addRow(row); final boolean tombstone = row.getTombstone().orElse(false); final List columns = tombstone ? row.getColumns().stream() .map(val -> val == null ? "" : val) .collect(Collectors.toList()) : row.getColumns(); writer().println(TabularRow.createRow( getWidth(), columns, config.getString(CliConfig.WRAP_CONFIG).equalsIgnoreCase(OnOff.ON.toString()), config.getInt(CliConfig.COLUMN_WIDTH_CONFIG)) ); flush(); } private void printAsTable(final KsqlEntity entity) { final Handler1 handler = PRINT_HANDLERS.get(entity.getClass()); if (handler == null) { throw new RuntimeException(String.format( "Unexpected KsqlEntity class: '%s'", entity.getClass().getCanonicalName() )); } handler.handle(this, entity); printWarnings(entity); } private void printWarnings(final KsqlEntity entity) { for (final KsqlWarning warning : entity.getWarnings()) { writer().println("WARNING: " + warning.getMessage()); } } private static String formatFieldType( final FieldInfo field, final Optional windowType, final boolean isTable ) { final FieldType possibleFieldType = field.getType().orElse(null); if (possibleFieldType == FieldType.HEADER) { final String headerType = field.getHeaderKey() .map(k -> "(header('" + k + "'))") .orElse("(headers)"); return String.format("%-16s %s", field.getSchema().toTypeString(), headerType); } if (possibleFieldType == FieldType.KEY) { final String wt = windowType .map(v -> " (Window type: " + v + ")") .orElse(""); final String keyType = isTable ? "(primary key)" : "(key)"; return String.format("%-16s %s%s", field.getSchema().toTypeString(), keyType, wt); } return field.getSchema().toTypeString(); } private void printSchema( final Optional windowType, final List fields, final boolean isTable ) { final Table.Builder tableBuilder = new Table.Builder(); if (!fields.isEmpty()) { tableBuilder.withColumnHeaders("Field", "Type"); fields.forEach(f -> tableBuilder.withRow( f.getName(), formatFieldType(f, windowType, isTable) )); tableBuilder.build().print(this); } } private void printTopicInfo(final SourceDescription source) { final String timestamp = source.getTimestamp().isEmpty() ? "Not set - using " : source.getTimestamp(); writer().println(String.format("%-20s : %s", "Timestamp field", timestamp)); writer().println(String.format("%-20s : %s", "Key format", source.getKeyFormat())); writer().println(String.format("%-20s : %s", "Value format", source.getValueFormat())); if (!source.getTopic().isEmpty()) { String topicInformation = String.format("%-20s : %s", "Kafka topic", source.getTopic() ); // If Describe ACLs permissions aren't given for a topic, partitions and replica default to 0 // Details aren't printed out if the Describe fails. if (source.getPartitions() != 0) { topicInformation = topicInformation.concat(String.format( " (partitions: %d, replication: %d)", source.getPartitions(), source.getReplication() )); } writer().println(topicInformation); } } private void printSourceConstraints(final List sourceConstraints) { if (!sourceConstraints.isEmpty()) { writer().println(String.format( "%n%-20s%n%-20s", "Sources that have a DROP constraint on this source", "--------------------------------------------------" )); sourceConstraints.forEach(sourceName -> writer().println(sourceName)); } } private void printQueries( final List queries, final String type, final String operation ) { if (!queries.isEmpty()) { writer().println(String.format( "%n%-20s%n%-20s", "Queries that " + operation + " from this " + type, "-----------------------------------" )); for (final RunningQuery writeQuery : queries) { writer().println(writeQuery.getId() + " (" + writeQuery.getState().orElse("N/A") + ") : " + writeQuery.getQuerySingleLine()); } writer().println("\nFor query topology and execution plan please run: EXPLAIN "); } } private void printExecutionPlan(final QueryDescription queryDescription) { if (!queryDescription.getExecutionPlan().isEmpty()) { writer().println(String.format( "%n%-20s%n%-20s%n%s", "Execution plan", "--------------", queryDescription.getExecutionPlan() )); } } private void printTopology(final QueryDescription queryDescription) { if (!queryDescription.getTopology().isEmpty()) { writer().println(String.format( "%n%-20s%n%-20s%n%s", "Processing topology", "-------------------", queryDescription.getTopology() )); } } private void printOverriddenProperties(final QueryDescription queryDescription) { final Map overriddenProperties = queryDescription.getOverriddenProperties(); if (overriddenProperties.isEmpty()) { return; } final List> rows = overriddenProperties.entrySet().stream() .sorted(Entry.comparingByKey()) .map(prop -> Arrays.asList(prop.getKey(), Objects.toString(prop.getValue()))) .collect(Collectors.toList()); new Builder() .withColumnHeaders("Property", "Value") .withRows(rows) .withHeaderLine(String.format( "%n%-20s%n%-20s", "Overridden Properties", "---------------------")) .build() .print(this); } private void printQueryError(final QueryDescription query) { writer().println(); final DateTimeFormatter dateFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd hh:mm:ss,SSS (z)"); for (final QueryError error : query.getQueryErrors()) { final Instant ts = Instant.ofEpochMilli(error.getTimestamp()); final String errorDate = ts.atZone(ZoneId.systemDefault()).format(dateFormatter); writer().println(String.format("%-20s : %s", "Error Date", errorDate)); writer().println(String.format("%-20s : %s", "Error Details", error.getErrorMessage())); writer().println(String.format("%-20s : %s", "Error Type", error.getType())); } } private void printStatistics(final SourceDescription source) { final List statistics = source.getClusterStatistics(); final List errors = source.getClusterErrorStats(); if (statistics.isEmpty() && errors.isEmpty()) { writer().println(String.format( "%n%-20s%n%s", "Local runtime statistics", "------------------------" )); writer().println(source.getStatistics()); writer().println(source.getErrorStats()); return; } final List headers = ImmutableList.of("Host", "Metric", "Value", "Last Message"); final Stream rows = Streams.concat(statistics.stream(), errors.stream()); writer().println(String.format( "%n%-20s%n%s", "Runtime statistics by host", "-------------------------" )); final Table statsTable = new Table.Builder() .withColumnHeaders(headers) .withRows(rows .sorted(Comparator .comparing(QueryHostStat::host) .thenComparing(Stat::name) ) .map((metric) -> { final String hostCell = metric.host().toString(); final String formattedValue = String.format("%10.0f", metric.getValue()); return ImmutableList.of(hostCell, metric.name(), formattedValue, metric.timestamp()); })) .build(); statsTable.print(this); } private void printSourceDescription(final SourceDescription source) { final boolean isTable = source.getType().equalsIgnoreCase("TABLE"); writer().println(String.format("%-20s : %s", "Name", source.getName())); if (!source.isExtended()) { printSchema(source.getWindowType(), source.getFields(), isTable); writer().println( "For runtime statistics and query details run: DESCRIBE EXTENDED;"); return; } writer().println(String.format("%-20s : %s", "Type", source.getType())); printTopicInfo(source); writer().println(String.format("%-20s : %s", "Statement", source.getStatement())); writer().println(""); printSchema(source.getWindowType(), source.getFields(), isTable); printSourceConstraints(source.getSourceConstraints()); printQueries(source.getReadQueries(), source.getType(), "read"); printQueries(source.getWriteQueries(), source.getType(), "write"); printStatistics(source); writer().println(String.format( "(%s)", "Statistics of the local KSQL server interaction with the Kafka topic " + source.getTopic() )); if (!source.getQueryOffsetSummaries().isEmpty()) { writer().println(); writer().println("Consumer Groups summary:"); for (QueryOffsetSummary entry : source.getQueryOffsetSummaries()) { writer().println(); writer().println(String.format("%-20s : %s", "Consumer Group", entry.getGroupId())); if (entry.getTopicSummaries().isEmpty()) { writer().println(""); } for (QueryTopicOffsetSummary topicSummary : entry.getTopicSummaries()) { writer().println(); writer().println(String.format("%-20s : %s", "Kafka topic", topicSummary.getKafkaTopic())); writer().println(String.format("%-20s : %s", "Max lag", topicSummary.getOffsets().stream() .mapToLong(s -> s.getLogEndOffset() - s.getConsumerOffset()) .max() .orElse(0))); writer().println(""); final Table taskTable = new Table.Builder() .withColumnHeaders( ImmutableList.of("Partition", "Start Offset", "End Offset", "Offset", "Lag")) .withRows(topicSummary.getOffsets() .stream() .map(offset -> ImmutableList.of( String.valueOf(offset.getPartition()), String.valueOf(offset.getLogStartOffset()), String.valueOf(offset.getLogEndOffset()), String.valueOf(offset.getConsumerOffset()), String.valueOf(offset.getLogEndOffset() - offset.getConsumerOffset()) ))) .build(); taskTable.print(this); } } } } private void printSourceDescriptionList(final SourceDescriptionList sourceDescriptionList) { sourceDescriptionList.getSourceDescriptions().forEach( sourceDescription -> { printSourceDescription(sourceDescription); writer().println(); }); } private void printQuerySources(final QueryDescription query) { if (!query.getSources().isEmpty()) { writer().println(String.format( "%n%-20s%n%-20s", "Sources that this query reads from: ", "-----------------------------------" )); for (final String sources : query.getSources()) { writer().println(sources); } writer().println("\nFor source description please run: DESCRIBE [EXTENDED] "); } } private void printQuerySinks(final QueryDescription query) { if (!query.getSinks().isEmpty()) { writer().println(String.format( "%n%-20s%n%-20s", "Sinks that this query writes to: ", "-----------------------------------" )); for (final String sinks : query.getSinks()) { writer().println(sinks); } writer().println("\nFor sink description please run: DESCRIBE [EXTENDED] "); } } private void printQueryDescription(final QueryDescription query) { writer().println(String.format("%-20s : %s", "ID", query.getId())); writer().println(String.format("%-20s : %s", "Query Type", query.getQueryType())); if (query.getStatementText().length() > 0) { writer().println(String.format("%-20s : %s", "SQL", query.getStatementText())); } if (!query.getKsqlHostQueryStatus().isEmpty()) { writer().println(String.format( "%-20s : %s", "Host Query Status", query.getKsqlHostQueryStatus())); } writer().println(); printSchema(query.getWindowType(), query.getFields(), false); printQuerySources(query); printQuerySinks(query); printExecutionPlan(query); printTopology(query); printOverriddenProperties(query); printQueryError(query); } private void printConnectorDescription(final ConnectorDescription description) { final ConnectorStateInfo status = description.getStatus(); writer().println(String.format("%-20s : %s", "Name", status.name())); writer().println(String.format("%-20s : %s", "Class", description.getConnectorClass())); writer().println(String.format("%-20s : %s", "Type", description.getStatus().type())); writer().println(String.format("%-20s : %s", "State", status.connector().state())); writer().println(String.format("%-20s : %s", "WorkerId", status.connector().workerId())); if (!ObjectUtils.defaultIfNull(status.connector().trace(), "").isEmpty()) { writer().println(String.format("%-20s : %s", "Trace", status.connector().trace())); } if (!status.tasks().isEmpty()) { writer().println(); final Table taskTable = new Table.Builder() .withColumnHeaders(ImmutableList.of("Task ID", "State", "Error Trace")) .withRows(status.tasks() .stream() .map(task -> ImmutableList.of( String.valueOf(task.id()), task.state(), ObjectUtils.defaultIfNull(task.trace(), "")))) .build(); taskTable.print(this); } if (!description.getSources().isEmpty()) { writer().println(); final Table sourceTable = new Table.Builder() .withColumnHeaders("KSQL Source Name", "Kafka Topic", "Type") .withRows(description.getSources() .stream() .map(source -> ImmutableList .of(source.getName(), source.getTopic(), source.getType()))) .build(); sourceTable.print(this); } if (!description.getTopics().isEmpty()) { writer().println(); final Table topicTable = new Table.Builder() .withColumnHeaders("Related Topics") .withRows(description.getTopics().stream().map(ImmutableList::of)) .build(); topicTable.print(this); } } private void printQueryDescriptionList(final QueryDescriptionList queryDescriptionList) { queryDescriptionList.getQueryDescriptions().forEach( queryDescription -> { printQueryDescription(queryDescription); writer().println(); }); } private void printFunctionDescription(final FunctionDescriptionList describeFunction) { final String functionName = describeFunction.getName().toUpperCase(); final String baseFormat = "%-12s: %s%n"; final String subFormat = "\t%-12s: %s%n"; writer().printf(baseFormat, "Name", functionName); if (!describeFunction.getAuthor().trim().isEmpty()) { writer().printf(baseFormat, "Author", describeFunction.getAuthor()); } if (!describeFunction.getVersion().trim().isEmpty()) { writer().printf(baseFormat, "Version", describeFunction.getVersion()); } printDescription(baseFormat, "Overview", describeFunction.getDescription()); writer().printf(baseFormat, "Type", describeFunction.getType().name()); writer().printf(baseFormat, "Jar", describeFunction.getPath()); writer().printf(baseFormat, "Variations", ""); final Collection functions = describeFunction.getFunctions(); functions.forEach(functionInfo -> { final String arguments = functionInfo.getArguments().stream() .map(Console::argToString) .collect(Collectors.joining(", ")); writer().printf("%n\t%-12s: %s(%s)%n", "Variation", functionName, arguments); writer().printf(subFormat, "Returns", functionInfo.getReturnType()); printDescription(subFormat, "Description", functionInfo.getDescription()); functionInfo.getArguments() .forEach(a -> printDescription(subFormat, a.getName(), a.getDescription())); } ); } private void printAssertTopic(final AssertTopicEntity assertTopic) { final String existence = assertTopic.getExists() ? " exists" : " does not exist"; writer().printf("Topic " + assertTopic.getTopicName() + existence + ".\n"); } private void printAssertSchema(final AssertSchemaEntity assertSchema) { if (!assertSchema.getId().isPresent() && !assertSchema.getSubject().isPresent()) { throw new RuntimeException("No subject or id found in AssertSchema response."); } final String existence = assertSchema.getExists() ? " exists" : " does not exist"; final String subject = assertSchema.getSubject().isPresent() ? " subject " + assertSchema.getSubject().get() : ""; final String id = assertSchema.getId().isPresent() ? " id " + assertSchema.getId().get() : ""; writer().printf("Schema with" + subject + id + existence + ".\n"); } private static String argToString(final ArgumentInfo arg) { final String type = arg.getType() + (arg.getIsVariadic() ? "[]" : ""); return arg.getName().isEmpty() ? type : (arg.getName() + " " + type); } private void printDescription(final String format, final String name, final String description) { final String trimmed = description.trim(); if (trimmed.isEmpty()) { return; } final int labelLen = String.format(format.replace("%n", ""), name, "") .replace("\t", " ") .length(); final int width = Math.max(getWidth(), 80) - labelLen; final String fixedWidth = splitLongLine(trimmed, width); final String indent = String.format("%-" + labelLen + "s", ""); final String result = fixedWidth .replace(System.lineSeparator(), System.lineSeparator() + indent); writer().printf(format, name, result); } private static String splitLongLine(final String input, final int maxLineLength) { final StringTokenizer spaceTok = new StringTokenizer(input, " \n", true); final StringBuilder output = new StringBuilder(input.length()); int lineLen = 0; while (spaceTok.hasMoreTokens()) { final String word = spaceTok.nextToken(); final boolean isNewLineChar = word.equals("\n"); if (isNewLineChar || lineLen + word.length() > maxLineLength) { output.append(System.lineSeparator()); lineLen = 0; if (isNewLineChar) { continue; } } output.append(word); lineLen += word.length(); } return output.toString(); } private void printAsJson(final Object o) { try { OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValue(writer(), o); writer().println(); flush(); } catch (final IOException e) { throw new RuntimeException("Failed to write to console", e); } } static class NoOpRowCaptor implements RowCaptor { @Override public void addRow(final DataRow row) { } @Override public void addRows(final List> fields) { } } public boolean maybeHandleCliSpecificCommands(final String line) { if (line == null) { return false; } return getCliCommand(line) .map(cmd -> { cmd.execute(writer()); flush(); return true; }) .orElse(false); } private static final class CliCmdExecutor { private final CliSpecificCommand cmd; private final List args; private static CliCmdExecutor of(final CliSpecificCommand cmd, final List lineParts) { final String[] nameParts = cmd.getName().split("\\s+"); final List argList = lineParts.subList(nameParts.length, lineParts.size()).stream() .map(CmdLineUtil::removeMatchedSingleQuotes) .collect(Collectors.toList()); return new CliCmdExecutor(cmd, argList); } private CliCmdExecutor(final CliSpecificCommand cmd, final List args) { this.cmd = Objects.requireNonNull(cmd, "cmd"); this.args = ImmutableList.copyOf(Objects.requireNonNull(args, "args")); } public void execute(final PrintWriter terminal) { cmd.execute(args, terminal); } } }